ubuntu18.04 安装 x264、ffmpeg、nv-codec-hearers 支持GPU硬件加速

ubuntu18.04 安装 x264、ffmpeg、nv-codec-hearers 支持GPU硬件加速

1、x264
shell 复制代码
./configure --prefix=/usr/local --enable-shared --enable-static --enable-debug --disable-asm
make
make install
2、安装 nv-codec-hearers

链接:Releases · FFmpeg/nv-codec-headers

shell 复制代码
##Clone 
git clone https://git.videolan.org/git/ffmpeg/nv-codec-headers.git


## 可使用 git tag 查看可选的版本,选择的是n11.0.10.2 ,文件夹里README中支持的 Linux: 455.28 or newer
git tag

## PC机安装的cuda11.7对应的driver是 515.65.01,满足需求

git checkout n11.0.10.2 -b nv-codec-headers

cd nv-codec-headers/
make

make Install


pkg-config --modversion ffnvcodec  ## 通过pkg-config 验证ffnvcodec,如果找不到,把ffnvcodec.pc路径添加到PKG_CONFIG_PATH

3、ffmpeg-4.2

shell 复制代码
## 安装依赖库
sudo apt-get install build-essential yasm cmake libtool libc6 libc6-dev unzip wget libnuma1 libnuma-dev
sudo apt-get install -y autoconf automake build-essential git libass-dev libfreetype6-dev libsdl2-dev libtheora-dev libtool libva-dev libvdpau-dev libvorbis-dev libxcb1-dev libxcb-shm0-dev libxcb-xfixes0-dev pkg-config texinfo wget zlib1g-dev
sudo apt install libavformat-dev
sudo apt install libavcodec-dev
sudo apt install libswresample-dev
sudo apt install libswscale-dev
sudo apt install libavutil-dev
sudo apt install libsdl1.2-dev



git clone git://source.ffmpeg.org/ffmpeg.git -b release/4.2 --depth=1    ## 下载源码
cd ffmpeg
./configure  --prefix=/usr/local/ffmpeg \
                --enable-shared \
                 --disable-static \
                 --disable-doc \
                 --disable-ffplay \
                 --disable-ffmpeg  \
                 --disable-ffprobe  \
                 --enable-gpl \
                 --enable-libx264 \
                 --enable-cuda \
                 --enable-cuvid \
                 --enable-nvenc \
                 --enable-nvdec \
                 --enable-nonfree \
                 --enable-libnpp \
                 --enable-debug \
                 --extra-cflags=-I/usr/local/cuda/include \
                 --extra-ldflags=-L/usr/local/cuda/lib64 

make
make install

源码实例

cpp 复制代码
#include <iostream>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <unistd.h>
#include <thread> // 包含 <thread> 头文件
#include <atomic>
#include <fstream>
#include <mutex>         // std::mutex, std::unique_lock

#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/videoio.hpp"
#include "opencv2/opencv.hpp"
#include "opencv2/imgproc/types_c.h"
#include <signal.h>
#include "network_comm.h"

extern "C" {
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
// #include "libavdevice/avdevice.h"
#include <libavutil/opt.h>
#include "libswscale/swscale.h"
#include "libpostproc/postprocess.h"
#include "libavutil/time.h"
#include "x264.h"
#include "x264_config.h"
}


pthread_mutex_t g_mutex1 = PTHREAD_MUTEX_INITIALIZER;   //定义一个互斥量
pthread_mutex_t g_mutex2 = PTHREAD_MUTEX_INITIALIZER;   //定义一个互斥量
using namespace std;
using namespace cv;
Network_COMM network_COMM;

//加载类别名


// 将YUYV图像转换YUV420p并拷贝到FFmpeg的AVFrame
void yuyvToYuv420pCopyAVFrame(const cv::Mat& yuyvImage, AVFrame* avFrame) {
    // 确保输入图像是 YUYV 格式
    if (yuyvImage.type() != CV_8UC2) {
        std::cerr << "Input image must be in YUYV format (CV_8UC2)" << std::endl;
        return;
    }

    int width = yuyvImage.cols;
    int height = yuyvImage.rows;

    // 设置 SwsContext 以进行颜色空间转换
    SwsContext* swsCtx = sws_getContext(
        width, height, AV_PIX_FMT_YUYV422, // 输入格式
        width, height, AV_PIX_FMT_YUV420P, // 输出格式
        SWS_BILINEAR,                      // 插值方法
        nullptr, nullptr, nullptr);

    // 转换图像
    const uint8_t* src[] = { yuyvImage.data };
    int srcStride[] = { yuyvImage.step[0] };
    uint8_t* dst[] = { avFrame->data[0], avFrame->data[1], avFrame->data[2] };
    int dstStride[] = { avFrame->linesize[0], avFrame->linesize[1], avFrame->linesize[2] };

    // 执行转换
    sws_scale(swsCtx, src, srcStride, 0, height, dst, dstStride);

    // 清理
    sws_freeContext(swsCtx);
}

// 将OpenCV的BGR图像转换YUV420p并拷贝到FFmpeg的AVFrame
void rgbToYuv420pCopyAVFrame(const cv::Mat& img, AVFrame* av_frame) {
    // 确保输入图像是BGR格式
    if (img.type() != CV_8UC3) {
        std::cerr << "Error: Input image must be of type CV_8UC3 (BGR)." << std::endl;
        return;
    }

    // 创建用于存储YUV数据的矩阵
    cv::Mat yuv_img;
    cv::cvtColor(img, yuv_img, cv::COLOR_BGR2YUV_I420); // 转换为YUV420P

    // 拷贝Y分量
    memcpy(av_frame->data[0], yuv_img.data, av_frame->width * av_frame->height); // Y

    // 拷贝U和V分量
    // U分量在YUV420P中是宽度和高度都减半
    int uv_width = av_frame->width / 2;
    int uv_height = av_frame->height / 2;

    // U分量
    memcpy(av_frame->data[1], yuv_img.data + av_frame->width * av_frame->height, uv_width * uv_height); // U

    // V分量
    memcpy(av_frame->data[2], yuv_img.data + av_frame->width * av_frame->height + (uv_width * uv_height), uv_width * uv_height); // V
}




   FILE *out=fopen("out.h264","wb+");
   FILE *fp=fopen("out.txt","wb+");


void MAIN_HandleSig(int signo)
{
    if (SIGINT == signo || SIGTERM == signo)
    {

        fclose(fp);
        fclose(out);
        printf("\n\n\n!!!!!!!!!!!!!!!!!!!!!!!!!!!退出main !!!!!!!!!!!!!!!!!!!!!!!!\n\n\n");
        std::exit(0);

    }

}



int main() {

    struct sigaction newact,oldact;

    /* 设置信号忽略 */
    newact.sa_handler = MAIN_HandleSig; //这个地方也可以是函数
    sigemptyset(&newact.sa_mask);
    sigaddset(&newact.sa_mask, SIGQUIT);
    newact.sa_flags = 0;

    sigaction(SIGINT,&newact,&oldact);//原来的备份到oldact里面
    sigaction(SIGTERM,&newact,&oldact);//原来的备份到oldact里面

    float           seconds;
    char            result[100] = {0};
    int             _add;


    cv::VideoCapture capture;  
    capture.open("../a.mp4");
    if (!capture.isOpened()) {
        printf("../a.mp4 open Erro\r\n");
        return NULL;
    }

    cv::Size const frame_size(capture.get(cv::CAP_PROP_FRAME_WIDTH), capture.get(cv::CAP_PROP_FRAME_HEIGHT));
    std::cout << "Camera resolution: " << frame_size.width << "x" << frame_size.height << std::endl;

    cv::Mat rgbImage(frame_size.height, frame_size.width, CV_8UC3);

    // 创建UDP socket
    int udp_sockfd = socket(AF_INET, SOCK_DGRAM, 0);
    if (udp_sockfd < 0) {
        std::cerr << "Failed to create socket." << std::endl;
        return NULL;
    }

    int frameCount = 0;

/*-------------------------------------------------------FFmpeg 编码 Start-----------------------------------------------------------------*/
    // 初始化 FFmpeg 库
    av_register_all();

    // 示例:使用 NVENC
    // AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264_NVENC);
    // //查找并创建编码器
    // AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264);
    // if (!codec) {
    //     std::cerr << "AVCodec not found\n";
    //     return NULL;
    // }


    AVCodec* codec = avcodec_find_encoder_by_name("h264_nvenc");
    if (!codec) {
        cout << "InitVideoCodec 获取编码器出错" << endl;
        return false;
    }

    enum AVHWDeviceType type = av_hwdevice_find_type_by_name("cuda");

    if (type == AV_HWDEVICE_TYPE_NONE) {
        printf( "Device type %s is not supported.\n", "cuda");
        printf("Available device types:");
        while ((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)
            printf( " %s", av_hwdevice_get_type_name(type));
        printf("\n");
        return -1;
    }



    // 分配编码器上下文
    AVCodecContext* codec_ctx = avcodec_alloc_context3(codec);
    if (!codec_ctx) {
        std::cerr << "Could not allocate video codec context\n";
        return NULL;
    }




    // 设置编码器参数
    codec_ctx->codec_id = AV_CODEC_ID_H264;
    codec_ctx->bit_rate = 5000000; // 设置比特率
    codec_ctx->width = frame_size.width;      // 视频宽度
    codec_ctx->height = frame_size.height;      // 视频高度
    codec_ctx->time_base = (AVRational){1, 30}; // 设置时间基,30 FPS
    codec_ctx->framerate = (AVRational){30, 1};  // 设置帧率
    codec_ctx->gop_size = 60;     // 关键帧间隔
    codec_ctx->pix_fmt = AV_PIX_FMT_YUV420P; // 像素格式

    av_opt_set(codec_ctx->priv_data, "tune", "zerolatency", 0); // 设置低延迟参数
    av_opt_set(codec_ctx->priv_data, "preset", "medium", 0); // 选择"中等"预设
    av_opt_set(codec_ctx->priv_data, "crf", "23", 0);        // 设置 CRF,23 是默认为良好质量的值

    // 打开编码器
    if (avcodec_open2(codec_ctx, codec, NULL) < 0) {
        std::cerr << "Could not open codec\n";
        return NULL;
    }

    // 分配帧和包
    AVFrame *frame = av_frame_alloc();
    if (!frame) {
        std::cerr << "AVFrame not found\n";
        return NULL;
    }
    // 设置 AVFrame 的参数
    frame->format = codec_ctx->pix_fmt;
    frame->width = codec_ctx->width;
    frame->height = codec_ctx->height;

    // 分配内存
    if (av_frame_get_buffer(frame, 0) < 0) {
        std::cerr << "Failed to allocate memory for AVFrame" << std::endl;
        return NULL;
    }

    // 分配一个新的 AVPacket 结构
    AVPacket* pkt = av_packet_alloc();
    if (!pkt) {
        std::cerr << "Could not allocate the packet\n";
        return NULL;
    }

/*-------------------------------------------------------FFmpeg 编码 End-----------------------------------------------------------------*/
    cv::Mat readImage;
    struct timeval tv, tv1, tv2;
    double time1,time2 = 0;   //毫秒

    while(true)
    {
        if (!capture.read(readImage)) 
        {
            printf("Video capture return false !!!\r\n");
            break;
        }



        rgbToYuv420pCopyAVFrame(readImage, frame);

             //开始计时
        gettimeofday(&tv, 0);


        // 编码帧
        int ret = avcodec_send_frame(codec_ctx, frame);
        if (ret < 0) {
            std::cerr << "SDI Error sending a frame for encoding\n";
            return NULL;
        }

        while (ret >= 0) 
        {
            ret = avcodec_receive_packet(codec_ctx, pkt);
            if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
            {
                // std::cerr <<  "SDI  ret == AVERROR(EAGAIN) || ret == AVERROR_EOF\n";
                break;
            }
            else if (ret < 0) {
                std::cerr <<  "SDI Error during encoding\n";
                return NULL;
            }

            // 将数据包写入文件
             fwrite(pkt->data, 1, pkt->size, out);


                 gettimeofday(&tv2, 0);//获得执行后的时间


        tv2.tv_sec -= tv.tv_sec;
        tv2.tv_usec -= tv.tv_usec;
        time2 = 1000000LL * tv2.tv_sec + tv2.tv_usec;
        time2 /= 1000;
        cout << "Frame SDI:" << ++frameCount <<  fixed << setprecision(2) <<"   time2 ======== " << time2 << " ms" << endl;

            memset(result,0,sizeof(result));
            _add = sprintf(result,"%d ,",frameCount);
             _add += sprintf(result+_add,"%.2f\n ",time2);

           fwrite(result,1,10,fp);

            // // 分包处理
            // std::vector<std::vector<char>> packets = network_COMM.splitData((char *)pkt->data, pkt->size, 14000);

            // // 打印分包结果
            // for (int i = 0; i < packets.size(); i++) {
            //     std::cout << "SDI Packet " << i + 1 << " size: " << packets[i].size() << " bytes" << std::endl;

            //     network_COMM.UDP_send(udp_sockfd, "192.168.1.136", 9001,  packets[i].data(), packets[i].size());
            // }       
        }


    }

    av_frame_free(&frame); // 释放 AVFrame
    av_packet_unref(pkt);
    close(udp_sockfd);

    return 0;
}

CMakeLists.txt示例

cmake 复制代码
set(x264_libs_DIR /usr/local/lib)                        ## x264
set(x264_headers_DIR /usr/local/include) 
add_library(x264 SHARED IMPORTED )
set_target_properties(x264 PROPERTIES IMPORTED_LOCATION ${x264_libs_DIR}/libx264.so )


set(ffmpeg_libs_DIR /usr/local/lib)                         ## ffmpeg
set(ffmpeg_headers_DIR /usr/local/include) 

#对于find_package找不到的外部依赖库,可以用add_library添加
# SHARED表示添加的是动态库
# IMPORTED表示是引入已经存在的动态库
add_library( avcodec SHARED IMPORTED )

# 指定所添加依赖库的导入路径

set_target_properties( avcodec PROPERTIES IMPORTED_LOCATION ${ffmpeg_libs_DIR}/libavcodec.so )

add_library( avfilter SHARED IMPORTED )
set_target_properties( avfilter PROPERTIES IMPORTED_LOCATION ${ffmpeg_libs_DIR}/libavfilter.so )

add_library( swresample SHARED IMPORTED )
set_target_properties( swresample PROPERTIES IMPORTED_LOCATION ${ffmpeg_libs_DIR}/libswresample.so )

add_library( swscale SHARED IMPORTED )
set_target_properties( swscale PROPERTIES IMPORTED_LOCATION ${ffmpeg_libs_DIR}/libswscale.so )

add_library( avformat SHARED IMPORTED )
set_target_properties( avformat PROPERTIES IMPORTED_LOCATION ${ffmpeg_libs_DIR}/libavformat.so )

add_library( avutil SHARED IMPORTED )
set_target_properties( avutil PROPERTIES IMPORTED_LOCATION ${ffmpeg_libs_DIR}/libavutil.so ) 


target_link_libraries(my_project 
        avformat avcodec x264  avfilter  avutil avdevice swscale swresample)
相关推荐
近津薪荼2 小时前
优选算法——滑动窗口4(找子串)
c++·学习·算法
2301_822377652 小时前
模板代码异常处理
开发语言·c++·算法
hcnaisd22 小时前
基于C++的游戏引擎开发
开发语言·c++·算法
JMchen1232 小时前
Android相机硬件抽象层(HAL)逆向工程:定制ROM的相机优化深度指南
android·开发语言·c++·python·数码相机·移动开发·android studio
王老师青少年编程2 小时前
2022信奥赛C++提高组csp-s复赛真题及题解:策略游戏
c++·真题·csp·信奥赛·csp-s·提高组·策略游戏
近津薪荼2 小时前
递归专题5——快速幂
c++·学习·算法
郝学胜-神的一滴2 小时前
跨平台通信的艺术与哲学:Qt与Linux Socket的深度对话
linux·服务器·开发语言·网络·c++·qt·软件构建
小龙报2 小时前
【数据结构与算法】指针美学与链表思维:单链表核心操作全实现与深度精讲
c语言·开发语言·数据结构·c++·物联网·算法·链表
一匹电信狗11 小时前
【LeetCode_547_990】并查集的应用——省份数量 + 等式方程的可满足性
c++·算法·leetcode·职场和发展·stl