linux C++ onnxruntime yolov8 视频检测Demo

linux C++ onnxruntime yolov8 视频检测Demo

目录

项目目录

效果

​编辑CMakeLists.txt

代码

下载


项目目录

效果

./yolov8_demo --help

./yolov8_demo -c=2 -p=true

./yolov8_demo -c=1 -s=true

CMakeLists.txt

# cmake needs this line
cmake_minimum_required(VERSION 3.0)

# Define project name
project(yolov8_demo)

# Release模式下的编译指令
# SET(CMAKE_BUILD_TYPE "Release")
# set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} -s")
# set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -std=c++17 -pthread -Wall -Wl")

# Debug模式下的编译指令
SET(CMAKE_BUILD_TYPE "Debug")
set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_DEBUG}")
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -std=c++17 -pthread") 

set(OpenCV_LIBS opencv_videoio opencv_imgcodecs opencv_imgproc opencv_core opencv_dnn opencv_highgui)

include_directories(
    /usr/local/include/opencv4
    ${PROJECT_SOURCE_DIR}/include
    ${PROJECT_SOURCE_DIR}/include/onnxruntime
)

link_directories(
    ${PROJECT_SOURCE_DIR}/lib/onnxruntime   # 第三方动态库文件
    /usr/local/lib/
)

#递归指定源码的路径
file(GLOB_RECURSE SRCS ${PROJECT_SOURCE_DIR}/src/*.cpp)

# Declare the executable target built from your sources
add_executable(yolov8_demo ${SRCS})

# Link your application with OpenCV libraries
target_link_libraries(yolov8_demo 
-lonnxruntime
${OpenCV_LIBS}
)

代码

main.cpp

#include <opencv2/core.hpp>

#include <opencv2/highgui.hpp>

#include <iostream>

#include <YoloV8.hpp>

#include <unistd.h>

#include <sys/syscall.h>

#include <thread>

int VideoDet(int index, bool showDet, bool useGPU, bool printPerStepInfo)

{

size_t threadId = static_cast<size_t>(syscall(SYS_gettid));

// std::cout << "index:" << index << " thread id: " << threadId << std::endl;

cv::VideoCapture capture("./test/test_car_person_1080P.mp4");

// 检查视频是否成功打开

if (!capture.isOpened())

{

std::cout << "无法读取视频文件" << std::endl;

return -1;

}

int frameCount = capture.get(cv::CAP_PROP_FRAME_COUNT); // 获取视频帧数

double fps = capture.get(cv::CAP_PROP_FPS); // 获取帧率

int delay = int(1000 / fps); // 根据帧率计算帧间间隔时间

// delay=1;

std::string model_path = "./models/yolov8n.onnx";

std::string lable_path = "./models/lable.txt";

int GPUCount = 2;

int device_id = 0;

if (index >= GPUCount)

{

device_id = index % GPUCount;

}

else

{

device_id = index;

}

// device_id=0;

YoloV8 yoloV8(model_path, lable_path, useGPU, device_id);

yoloV8.index = index;

yoloV8.threadId = threadId;

yoloV8.videoFps = fps;

yoloV8.frameCount = frameCount;

// std::cout << "device_id:" << yoloV8.device_id << std::endl;

// vector<DetectionResult> detectionResult;

// Mat frame=cv::imread("../test/dog.jpg");

// yoloV8.Detect(frame, detectionResult);

// std::cout << "detectionResult size:" << detectionResult.size() << std::endl;

string winname = "detectionResult-" + std::to_string(index);

while (true)

{

double start = (double)cv::getTickCount();

delay = int(1000 / fps);

Mat frame;

bool success = capture.read(frame); // 读取一帧数据

// 检查是否成功读取帧

if (!success)

{

std::cout << "index:" << index << ",读取完毕" << std::endl;

yoloV8.PrintAvgCostTime();

break;

}

vector<DetectionResult> detectionResult;

yoloV8.Detect(frame, detectionResult);

// std::cout <<"index:"<<index<< " thread id: " << threadId << " detectionResult size: " << detectionResult.size() << std::endl;

yoloV8.detectionResultSize = detectionResult.size();

if (printPerStepInfo)

{

yoloV8.PrintCostTime();

yoloV8.PrintAvgCostTime();

}

if (showDet)

{

yoloV8.Draw(frame, detectionResult);

imshow(winname, frame);

double costTime = ((double)getTickCount() - start) / getTickFrequency();

delay = delay - costTime;

if (delay <= 0)

{

delay = 1;

}

if (waitKey(delay) == 27) // 通过按下ESC键退出循环

{

break;

}

}

}

capture.release(); // 释放视频对象

if (showDet)

{

cv::destroyWindow(winname);

}

return 0;

}

int main(int argc, char *const argv[])

{

int threadCount = 1;

bool showDet = false;

bool useGPU = false;

bool printPerStepInfo = true;

const char *keys ="{h help || print this message}"

"{c threadCount | 1 | run thread number}"

"{s showDet | false | show detection result}"

"{g useGPU | false | use GPU}"

"{p printPerStepInfo | false | print per Step Info}";

cv::CommandLineParser parser(argc, argv, keys);

if(parser.has("help"))

{

parser.about("YoloV8 demo v1.0.0");

parser.printMessage();

return 0;

}

threadCount=parser.get<int>("threadCount");

showDet=parser.get<bool>("showDet");

useGPU=parser.get<bool>("useGPU");

printPerStepInfo=parser.get<bool>("printPerStepInfo");

std::cout << std::boolalpha;

std::cout << "threadCount:" << threadCount << ",showDet:" << showDet<< ",useGPU:" << useGPU << ",printPerStepInfo:" << printPerStepInfo << std::endl;

for (size_t i = 0; i < threadCount; i++)

{

std::thread thread(VideoDet, i, showDet, useGPU, printPerStepInfo);

thread.detach();

}

while (true)

{

sleep(100);

}

return 0;

}

#include <opencv2/core.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
#include <YoloV8.hpp>
#include <unistd.h>
#include <sys/syscall.h>
#include <thread>

int VideoDet(int index, bool showDet, bool useGPU, bool printPerStepInfo)
{
    size_t threadId = static_cast<size_t>(syscall(SYS_gettid));
    // std::cout << "index:" << index << " thread id: " << threadId << std::endl;

    cv::VideoCapture capture("./test/test_car_person_1080P.mp4");

    // 检查视频是否成功打开
    if (!capture.isOpened())
    {
        std::cout << "无法读取视频文件" << std::endl;
        return -1;
    }

    int frameCount = capture.get(cv::CAP_PROP_FRAME_COUNT); // 获取视频帧数
    double fps = capture.get(cv::CAP_PROP_FPS);             // 获取帧率
    int delay = int(1000 / fps);                            // 根据帧率计算帧间间隔时间
    // delay=1;

    std::string model_path = "./models/yolov8n.onnx";
    std::string lable_path = "./models/lable.txt";

    int GPUCount = 2;
    int device_id = 0;
    if (index >= GPUCount)
    {
        device_id = index % GPUCount;
    }
    else
    {
        device_id = index;
    }

    // device_id=0;

    YoloV8 yoloV8(model_path, lable_path, useGPU, device_id);

    yoloV8.index = index;
    yoloV8.threadId = threadId;
    yoloV8.videoFps = fps;
    yoloV8.frameCount = frameCount;

    // std::cout << "device_id:" << yoloV8.device_id << std::endl;

    // vector<DetectionResult> detectionResult;
    // Mat frame=cv::imread("../test/dog.jpg");
    // yoloV8.Detect(frame, detectionResult);
    // std::cout << "detectionResult size:" << detectionResult.size() << std::endl;

    string winname = "detectionResult-" + std::to_string(index);

    while (true)
    {

        double start = (double)cv::getTickCount();
        delay = int(1000 / fps);

        Mat frame;
        bool success = capture.read(frame); // 读取一帧数据

        // 检查是否成功读取帧
        if (!success)
        {
            std::cout << "index:" << index << ",读取完毕" << std::endl;
            yoloV8.PrintAvgCostTime();
            break;
        }

        vector<DetectionResult> detectionResult;

        yoloV8.Detect(frame, detectionResult);

        // std::cout <<"index:"<<index<< " thread id: " << threadId << " detectionResult size: " << detectionResult.size() << std::endl;
        yoloV8.detectionResultSize = detectionResult.size();

        if (printPerStepInfo)
        {
            yoloV8.PrintCostTime();
            yoloV8.PrintAvgCostTime();
        }

        if (showDet)
        {
            yoloV8.Draw(frame, detectionResult);

            imshow(winname, frame);

            double costTime = ((double)getTickCount() - start) / getTickFrequency();
            delay = delay - costTime;
            if (delay <= 0)
            {
                delay = 1;
            }

            if (waitKey(delay) == 27) // 通过按下ESC键退出循环
            {
                break;
            }
        }
    }

    capture.release(); // 释放视频对象

    if (showDet)
    {
        cv::destroyWindow(winname);
    }

    return 0;
}

int main(int argc, char *const argv[])
{
    int threadCount = 1;
    bool showDet = false;
    bool useGPU = false;
    bool printPerStepInfo = true;

    const char *keys ="{h help                || print this message}"
        "{c threadCount         | 1     | run thread number}"
        "{s showDet             | false | show detection result}"
        "{g useGPU              | false | use GPU}"
        "{p printPerStepInfo    | false | print per Step Info}";

    cv::CommandLineParser parser(argc, argv, keys);

    if(parser.has("help"))
    {
        parser.about("YoloV8 demo v1.0.0");
        parser.printMessage();
        return 0;
    }

    threadCount=parser.get<int>("threadCount");
    showDet=parser.get<bool>("showDet");
    useGPU=parser.get<bool>("useGPU");
    printPerStepInfo=parser.get<bool>("printPerStepInfo");

    std::cout << std::boolalpha;
    std::cout << "threadCount:" << threadCount << ",showDet:" << showDet<< ",useGPU:" << useGPU << ",printPerStepInfo:" << printPerStepInfo << std::endl;

    for (size_t i = 0; i < threadCount; i++)
    {
        std::thread thread(VideoDet, i, showDet, useGPU, printPerStepInfo);
        thread.detach();
    }

    while (true)
    {
        sleep(100);
    }

    return 0;
}

下载

源码下载

相关推荐
liruiqiang053 分钟前
机器学习 - 投票感知器
人工智能·算法·机器学习
zm-v-159304339861 小时前
从CNN到Transformer:遥感影像目标检测的未来趋势
目标检测·cnn·transformer
刘什么洋啊Zz3 小时前
MacOS下使用Ollama本地构建DeepSeek并使用本地Dify构建AI应用
人工智能·macos·ai·ollama·deepseek
奔跑草-4 小时前
【拥抱AI】GPT Researcher 源码试跑成功的心得与总结
人工智能·gpt·ai搜索·deep research·深度检索
禁默4 小时前
【第四届网络安全、人工智能与数字经济国际学术会议(CSAIDE 2025】网络安全,人工智能,数字经济的研究
人工智能·安全·web安全·数字经济·学术论文
boooo_hhh6 小时前
深度学习笔记16-VGG-16算法-Pytorch实现人脸识别
pytorch·深度学习·机器学习
AnnyYoung6 小时前
华为云deepseek大模型平台:deepseek满血版
人工智能·ai·华为云
INDEMIND7 小时前
INDEMIND:AI视觉赋能服务机器人,“零”碰撞避障技术实现全天候安全
人工智能·视觉导航·服务机器人·商用机器人
慕容木木7 小时前
【全网最全教程】使用最强DeepSeekR1+联网的火山引擎,没有生成长度限制,DeepSeek本体的替代品,可本地部署+知识库,注册即可有750w的token使用
人工智能·火山引擎·deepseek·deepseek r1
南 阳7 小时前
百度搜索全面接入DeepSeek-R1满血版:AI与搜索的全新融合
人工智能·chatgpt