本文中使用的是ue5.4.2中官方自带的Opencv4.55,官方ncnn-20240410-windows-vs2022-shared库,ncnn-assets-master的模型库,进行实时pose捕捉,并将关键点显示在UI中。
需要创建ue c++ 项目,新建空白插件NCNNPlugin,复制bin,include,lib到插件目录下的Source\NCNNPlugin中,在NCNNPlugin.Build.cs中加载include、lib、dll,将dll复制到插件目录下的Plugins\NCNNPlugin\Binaries\Win64中(Plugins就是插件目录),再复制一份到项目目录下的XXX\Binaries\Win64中(XXX就是项目目录),其他都不用管,在项目的build.cs中开启插件NCNNPlugin,到这里就完成了插件的创建,NCNN库的加载。
NCNNPlugin.Build.cs
cs
// Copyright Epic Games, Inc. All Rights Reserved.
using System;
using UnrealBuildTool;
using System.IO;
public class NCNNPlugin : ModuleRules
{
public NCNNPlugin(ReadOnlyTargetRules Target) : base(Target)
{
PCHUsage = ModuleRules.PCHUsageMode.UseExplicitOrSharedPCHs;
string NCNNPath = Path.Combine(ModuleDirectory, "");
Console.WriteLine("ModuleDirectory::::::" + ModuleDirectory);
PublicIncludePaths.AddRange(
new string[] {
Path.Combine(ModuleDirectory,"include")
}
);
PublicAdditionalLibraries.AddRange(new string[]
{
Path.Combine(ModuleDirectory,"lib","ncnn.lib"),
});
if (Target.Platform == UnrealTargetPlatform.Win64)
{
string DLLPath = Path.Combine(ModuleDirectory, "bin", "ncnn.dll");
RuntimeDependencies.Add(DLLPath);
Console.WriteLine("DLLPath::::::" + DLLPath);
PublicDelayLoadDLLs.Add("ncnn.dll");
// 将 DLL 文件复制到项目的二进制目录
string ProjectBinariesDir = Path.Combine(Target.RelativeEnginePath, "Binaries", Target.Platform.ToString());
RuntimeDependencies.Add(Path.Combine(ProjectBinariesDir, "ncnn.dll"), StagedFileType.NonUFS);
PublicDefinitions.Add("__ARM_NEON=0");
PublicDefinitions.Add("__SSE2__=0");
PublicDefinitions.Add("__ANDROID_API__=0");
PublicDefinitions.Add("__mips_msa=0");
PublicDefinitions.Add("__loongarch_sx=0");
PublicDefinitions.Add("__riscv_vector=0");
}
PrivateIncludePaths.AddRange(
new string[] {
// ... add other private include paths required here ...
}
);
PublicDependencyModuleNames.AddRange(
new string[]
{
"Core",
// ... add other public dependencies that you statically link with here ...
}
);
PrivateDependencyModuleNames.AddRange(
new string[]
{
"CoreUObject",
"Engine",
"Slate",
"SlateCore",
// ... add private dependencies that you statically link with here ...
}
);
DynamicallyLoadedModuleNames.AddRange(
new string[]
{
// ... add any modules that your module loads dynamically here ...
}
);
}
}
项目的build.cs
cs
// Copyright Epic Games, Inc. All Rights Reserved.
using UnrealBuildTool;
public class NCNN_Thiredparty : ModuleRules
{
public NCNN_Thiredparty(ReadOnlyTargetRules Target) : base(Target)
{
PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs;
PublicDependencyModuleNames.AddRange(new string[]
{
"Core",
"CoreUObject",
"Engine",
"InputCore",
"EnhancedInput",
"OpenCV",
"OpenCVHelper",
"UMG",
"Slate",
"SlateCore",
"ImageWrapper",
"NCNNPlugin"
});
PrivateDependencyModuleNames.AddRange(new string[] { });
// Uncomment if you are using Slate UI
// PrivateDependencyModuleNames.AddRange(new string[] { "Slate", "SlateCore" });
// Uncomment if you are using online features
// PrivateDependencyModuleNames.Add("OnlineSubsystem");
// To include OnlineSubsystemSteam, add it to the plugins section in your uproject file with the Enabled attribute set to true
}
}
NCNN库的使用
创建Object类NcnnDataClass类,ActorComponent类NCNNTrackComponent,UserWidget类MyCPPWidget类(需要编辑器内创建),BP_UserWidget类(内添加image小部件CVImage)
NcnnDataClass.h(NcnnDataClass.cpp中不用修改)
cpp
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "UObject/Object.h"
#if WITH_OPENCV
#include "PreOpenCVHeaders.h"
#include "opencv2/opencv.hpp"
#include "PostOpenCVHeaders.h"
#endif
#include "NcnnDataClass.generated.h"
struct KeyPoint {
cv::Point2f p;
float prob;
};
UCLASS()
class NCNN_THIREDPARTY_API UNcnnDataClass : public UObject
{
GENERATED_BODY()
};
MyCPPWidget.h(MyCPPWidget.cpp中不用修改)
cpp
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "Blueprint/UserWidget.h"
#include "Components/Image.h"
#include "MyCPPWidget.generated.h"
/**
*
*/
UCLASS()
class NCNN_THIREDPARTY_API UMyCPPWidget : public UUserWidget
{
GENERATED_BODY()
public:
virtual void NativeConstruct() override;
//绑定BP_userwidget中的小部件
UPROPERTY(BlueprintReadWrite,meta = (BindWidget))
UImage* CVImage;
};
NCNNTrackComponent.h
cpp
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "Components/ActorComponent.h"
#include "ncnn/net.h"
#if WITH_OPENCV
#include "PreOpenCVHeaders.h"
#include "opencv2/opencv.hpp"
#include "PostOpenCVHeaders.h"
#endif
#include "NcnnDataClass.h"
#include "NCNNTrackComponent.generated.h"
UCLASS(ClassGroup=(Custom), meta=(BlueprintSpawnableComponent))
class NCNN_THIREDPARTY_API UNCNNTrackComponent : public UActorComponent
{
GENERATED_BODY()
public:
// Sets default values for this component's properties
UNCNNTrackComponent();
protected:
// Called when the game starts
virtual void BeginPlay() override;
virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override;
public:
// Called every frame
virtual void TickComponent(float DeltaTime, ELevelTick TickType,
FActorComponentTickFunction* ThisTickFunction) override;
//NCNN
ncnn::Net posenet;
static int detect_posenet(const cv::Mat& bgr, std::vector<KeyPoint>& keypoints, ncnn::Net& posenet);
static void draw_pose(cv::Mat& image, const std::vector<KeyPoint>& keypoints, int radius = 3, const cv::Scalar& color = cv::Scalar(0, 255, 0));
//UI
//这里需要在创建一个ACtor并添加NCNNTrack组件后在编辑器内指认BP_widget,目的是将该widget->AddToViewport();
UPROPERTY(EditAnywhere,BlueprintReadWrite)
TSubclassOf<UUserWidget> BP_widget;
UPROPERTY(BlueprintReadOnly)
UTexture2D* CVLoadImage;
// OpenCV VideoCapture
cv::VideoCapture VideoCapture;
float FrameTime;
bool bIsVideoPlaying = false;
float AccumulatedTime = 0.0f;
cv::Mat Frame;
cv::Mat FrameBGRA;
};
NCNNTrackComponent.cpp
cpp
#include "NCNNTrackComponent.h"
#include "MyCPPWidget.h"
// 取消冲突宏定义
#undef UpdateResource
// Sets default values for this component's properties
UNCNNTrackComponent::UNCNNTrackComponent()
{
PrimaryComponentTick.bCanEverTick = true;
posenet.opt.use_vulkan_compute = true;
posenet.load_param("D:/ue_temp/NCNN_Thiredparty/pose.param");
posenet.load_model("D:/ue_temp/NCNN_Thiredparty/pose.bin");
}
// Called when the game starts
void UNCNNTrackComponent::BeginPlay()
{
Super::BeginPlay();
//加载视频
FString filepath = TEXT("d:/dance.mp4");
UE_LOG(LogTemp, Warning, TEXT("尝试加载视频文件: %s"), *filepath);
// 使用 OpenCV 读取视频
VideoCapture.open(TCHAR_TO_UTF8(*filepath));
if (!VideoCapture.isOpened())
{
UE_LOG(LogTemp, Warning, TEXT("无法加载视频文件: %s"), *filepath);
return;
}
UE_LOG(LogTemp, Warning, TEXT("成功加载视频文件: %s"), *filepath);
// 从视频文件中获取帧速率
double FPS = VideoCapture.get(cv::CAP_PROP_FPS);
if (FPS <= 0)
{
UE_LOG(LogTemp, Warning, TEXT("无法获取帧速率,默认设置为30 FPS"));
FPS = 30.0; // 如果获取失败,默认设置为30 FPS
}
FrameTime = 1.0f / FPS;
bIsVideoPlaying = true;
UE_LOG(LogTemp, Warning, TEXT("视频帧速率: %f FPS"), FPS);
// 初始化纹理
int width = static_cast<int>(VideoCapture.get(cv::CAP_PROP_FRAME_WIDTH));
int height = static_cast<int>(VideoCapture.get(cv::CAP_PROP_FRAME_HEIGHT));
UTexture2D* Texture2D = UTexture2D::CreateTransient(width, height, PF_B8G8R8A8);
if (!Texture2D)
{
UE_LOG(LogTemp, Warning, TEXT("纹理创建失败"));
return;
}
CVLoadImage = Texture2D;
// 创建并设置 UI 小部件
UUserWidget* bpwidget = CreateWidget<UUserWidget>(GetWorld(), BP_widget);
UMyCPPWidget* CPPWidget = Cast<UMyCPPWidget>(bpwidget);
if (CPPWidget && CPPWidget->CVImage)
{
CPPWidget->CVImage->SetBrushFromTexture(CVLoadImage);
CPPWidget->CVImage->Brush.ImageSize = FVector2D(CVLoadImage->GetSizeX(), CVLoadImage->GetSizeY());
}
bpwidget->AddToViewport();
}
void UNCNNTrackComponent::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
Super::EndPlay(EndPlayReason);
if (VideoCapture.isOpened())
{
VideoCapture.release();
UE_LOG(LogTemp, Warning, TEXT("VideoCapture released successfully"));
}
bIsVideoPlaying = false;
}
void UNCNNTrackComponent::TickComponent(float DeltaTime, ELevelTick TickType,
FActorComponentTickFunction* ThisTickFunction)
{
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
if (!VideoCapture.isOpened() || !bIsVideoPlaying)
{
return;
}
// 累积时间
AccumulatedTime += DeltaTime;
if (AccumulatedTime >= FrameTime)
{
if (VideoCapture.read(Frame))
{
// 调整帧大小至 256x512,这里需要注意视频尺寸需要与模型要求尺寸保持一致关键点才能在正确位置
cv::resize(Frame, Frame, cv::Size(256, 512));
std::vector<KeyPoint> keypoints;
detect_posenet(Frame, keypoints, posenet);
// 如果 FrameBGRA 为空或尺寸不匹配,则初始化
if (FrameBGRA.empty() || FrameBGRA.size() != Frame.size())
{
FrameBGRA = cv::Mat(Frame.size(), CV_8UC4);
}
// 转换 BGR 图像为 BGRA 格式,并在 FrameBGRA 中保存
cv::cvtColor(Frame, FrameBGRA, cv::COLOR_BGR2BGRA);
draw_pose(FrameBGRA, keypoints);
// 更新纹理
FTexture2DMipMap& MipMap = CVLoadImage->GetPlatformData()->Mips[0];
void* TextureData = MipMap.BulkData.Lock(LOCK_READ_WRITE);
int32 SrcPitch = FrameBGRA.cols * FrameBGRA.elemSize();
int32 DstPitch = MipMap.SizeX * 4;
for (int32 Row = 0; Row < FrameBGRA.rows; ++Row)
{
FMemory::Memcpy(static_cast<uint8*>(TextureData) + Row * DstPitch, FrameBGRA.ptr(Row), SrcPitch);
}
MipMap.BulkData.Unlock();
CVLoadImage->UpdateResource();
}
// 重置累积时间
AccumulatedTime -= FrameTime;
}
}
int UNCNNTrackComponent::detect_posenet(const cv::Mat& bgr, std::vector<KeyPoint>& keypoints, ncnn::Net& posenet)
{
int w = bgr.cols;
int h = bgr.rows;
// 使用原始尺寸创建输入张量
ncnn::Mat in = ncnn::Mat::from_pixels(bgr.data, ncnn::Mat::PIXEL_BGR2RGB, w, h);
// 数据预处理:减去均值和归一化
const float mean_vals[3] = { 0.485f * 255.f, 0.456f * 255.f, 0.406f * 255.f };
const float norm_vals[3] = { 1 / 0.229f / 255.f, 1 / 0.224f / 255.f, 1 / 0.225f / 255.f };
in.substract_mean_normalize(mean_vals, norm_vals);
ncnn::Extractor ex = posenet.create_extractor();
ex.input("data", in);
ncnn::Mat out;
ex.extract("conv3_fwd", out);
// 解析heatmap以获取关键点
keypoints.clear();
for (int p = 0; p < out.c; p++) {
const ncnn::Mat m = out.channel(p);
float max_prob = 0.f;
int max_x = 0;
int max_y = 0;
for (int y = 0; y < m.h; y++) {
const float* ptr = m.row(y);
for (int x = 0; x < m.w; x++) {
float prob = ptr[x];
if (prob > max_prob) {
max_prob = prob;
max_x = x;
max_y = y;
}
}
}
KeyPoint keypoint;
keypoint.p = cv::Point2f(max_x * w / (float)m.w, max_y * h / (float)m.h); // 按照原始图像尺寸调整关键点位置
keypoint.prob = max_prob;
keypoints.push_back(keypoint);
}
return 0;
}
void UNCNNTrackComponent::draw_pose(cv::Mat& image, const std::vector<KeyPoint>& keypoints, int radius,
const cv::Scalar& color)
{
// draw joint
for (size_t i = 0; i < keypoints.size(); i++) {
const KeyPoint& keypoint = keypoints[i];
fprintf(stderr, "%.2f %.2f = %.5f\n", keypoint.p.x, keypoint.p.y, keypoint.prob);
if (keypoint.prob < 0.5f)
continue;
// 使用自定义半径和颜色绘制关键点
cv::circle(image, keypoint.p, radius, color, -1);
}
}