一个基于 FFmpeg 4.x 和 QOpenGLWidget的简单视频播放器代码示例,实现视频解码和渲染到 Qt 窗口的功能。
1)ffmpeg库界面,视频解码支持软解和硬解方式。
硬解后,硬件解码完成需要将数据从GPU复制到CPU。优先采用av_hwframe_map函数,失败后再使用av_hwframe_transfer_data
av_hwframe_map(frameHW, frame, AV_HWFRAME_MAP_READ); // 映射硬件数据帧
/*
av_hwframe_map 映射硬件数据帧,第3个参数值有三种类型:
AV_HWFRAME_MAP_READ:目标帧可读。
AV_HWFRAME_MAP_WRITE:目标帧可写。
AV_HWFRAME_MAP_DIRECT:避免数据拷贝(依赖硬件支持)。
优先使用 AV_HWFRAME_MAP_DIRECT 减少内存拷贝开销。
使用AV_HWFRAME_MAP_DIRECT时,你应该确保你的应用逻辑不会修改通过映射获得的软件帧内容,以避免不期望的副作用。
使用AV_HWFRAME_MAP_READ时,你将获得数据的一致性但可能会有性能上的损失。
*/
2)显示帧数据采用QOpenGLWidget。
class FrameOpenGLWidget : public QOpenGLWidget, protected QOpenGLFunctions{
}
该类支持YUV420P、NV12、RGB帧数据显示。在这种数据之间切换时,调用类中setFrameDataFormat方法切换。
3)解码后,根据码流类型,更新opengl初始化纹理。以便显示YUV420P、NV12、RGB数据。
码流类型改变后,发送信号出去,UI槽中调用setFrameDataFormat,更新opengl初始化纹理。(因为opengl初始化纹理是非线程安全的,需要在UI或同FrameOpenGLWidget线程中处理。)
4)解码播放时间同步控制。
1. Qt 项目配置(.pro
文件)
QT += core gui opengl
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
CONFIG += c++11
INCLUDEPATH += $$PWD/ffmpeg-4.2.2-win32/include
LIBS += -L$$PWD/ffmpeg-4.2.2-win32/lib -lavcodec -lavformat -lavutil -lswscale
# The following define makes your compiler emit warnings if you use
# any Qt feature that has been marked deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
SOURCES += \
frameopenglwidget.cpp \
main.cpp \
mainwindow.cpp \
videodecode.cpp
HEADERS += \
YUV420PWidget.h \
frameopenglwidget.h \
mainwindow.h \
videodecode.h
FORMS += \
mainwindow.ui
# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target
DISTFILES += \
RESOURCES += \
player.qrc
2**. 视频解码类**
videodecode.h
#ifndef VIDEODECODE_H
#define VIDEODECODE_H
//视频解码类
#include <QString>
#include <QImage>
#include <thread>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}
/*
struct AVFormatContext;
struct AVCodecContext;
struct AVRational;
struct AVPacket;
struct AVFrame;
struct AVCodec;
struct SwsContext;
struct AVBufferRef;
*/
//流类型
enum StreamType
{
StreamType_Video = 0,
StreamType_Audio = 1,
StreamType_Text = 2,
};
//格式类型
enum FormatType
{
FormatType_RGB24 = 0,
FormatType_RGB32 = 1,
FormatType_YUV420P = 2,
FormatType_NV12 = 3,
};
//文件状态
enum FileStatus
{
FileStatus_OverFileTail = 0, //达到文件尾
FileStatus_OverFileHead = 1, //达到文件头
FileStatus_TrigeException = 2, //发生异常
};
//流解码回调函数
typedef void (*StreamDecodeCallback)(int nStreamType, int nFormatType, long long llDecodeTs, long long llPlayTs, int width, int height, unsigned char ** pStreamData, int * linesize, void * pUserData);
//文件状态回调函数
typedef void (*FileStatusCallback)(int FileStatus, int nErrorCode, void * pUserData);
class VideoDecode
{
public:
VideoDecode();
~VideoDecode();
public:
void globalInit();//初始化ffmpeg库(整个程序中只需加载一次)
void globalUnInit();//反初始化ffmpeg库(整个程序中只需加载一次)
public:
void setStreamDecodeCallback(StreamDecodeCallback funStreamDecodeCallback, void * userData);
void setFileStatusCallback(FileStatusCallback funFileStatusCallback, void * userData);
void setHWDecoder(bool flag); // 是否使用硬件解码器
bool isHWDecoder();
bool open(const QString& url); // 打开媒体文件,或者流媒体rtmp、strp、http
void close(); // 关闭
bool isClose();
public:
void decodeProccessThread(); //解码线程
static QImage ConvertRGB24FrameToQImage(unsigned char *data, int width, int height);
protected:
void initHWDecoder(const AVCodec *codec);
bool dataCopy(); //硬件解码完成需要将数据从GPU复制到CPU
void freeDecode();
qreal rationalToDouble(AVRational* rational);
private:
// FFmpeg 相关对象
AVFormatContext *formatCtx = nullptr;
AVCodecContext *codecCtx = nullptr;
AVFrame *frame = nullptr, *rgbFrame = nullptr;
AVFrame *frameHW = nullptr;
SwsContext *swsCtx = nullptr;
uchar* buffer = nullptr; // YUV图像需要转换位RGBA图像,这里保存转换后的图形数据
AVPacket* packet = nullptr;
int videoStreamIndex = -1; // 视频流索引
qint64 totalTime = 0; // 视频总时长
qint64 totalFrames = 0; // 视频总帧数
qint64 obtainFrames = 0; // 视频当前获取到的帧数
qint64 pts = 0; // 图像帧的显示时间
qreal frameRate = 0; // 视频帧率
int width = 0; //视频分辨率大小width
int height = 0; //视频分辨率大小height
std::vector<int> vecHWDeviceTypes; // 保存当前环境支持的硬件解码器
AVBufferRef* hw_device_ctx = nullptr; // 对数据缓冲区的引用
bool hwDecoderFlag = false; // 记录是否使用硬件解码
std::thread threadDecode;
bool stopWorkFlag = true;
StreamDecodeCallback funCallbackByStreamDecode = nullptr;
void * userDataByStreamDecode = nullptr;
FileStatusCallback funCallbackByFileStatus = nullptr;
void * userDataByFileStatus = nullptr;
};
#endif // VIDEODECODE_H
videodecode.cpp
#include "videodecode.h"
#include <QTime>
#include <QDebug>
#include <QStringList>
#include <chrono>
/*********************************** FFmpeg获取GPU硬件解码帧格式的回调函数 *****************************************/
static enum AVPixelFormat g_pixelFormat;
/**
* @brief 回调函数,获取GPU硬件解码帧的格式
* @param s
* @param fmt
* @return
*/
AVPixelFormat get_hw_format(AVCodecContext* s, const enum AVPixelFormat* fmt)
{
Q_UNUSED(s)
const enum AVPixelFormat* p;
for (p = fmt; *p != -1; p++)
{
if(*p == g_pixelFormat)
{
return *p;
}
}
qDebug() << "无法获取硬件表面格式."; // 当同时打开太多路视频时,如果超过了GPU的能力,可能会返回找不到解码帧格式
return AV_PIX_FMT_NONE;
}
/************************************************ END ******************************************************/
VideoDecode::VideoDecode()
{
}
VideoDecode::~VideoDecode()
{
}
void VideoDecode::globalInit()
{
// av_register_all(); // 已经从源码中删除
/**
* 初始化网络库,用于打开网络流媒体,此函数仅用于解决旧GnuTLS或OpenSSL库的线程安全问题。
* 一旦删除对旧GnuTLS和OpenSSL库的支持,此函数将被弃用,并且此函数不再有任何用途。
*/
avformat_network_init();
}
void VideoDecode::globalUnInit()
{
avformat_network_deinit();
}
qreal VideoDecode::rationalToDouble(AVRational* rational)
{
qreal frameRate = (rational->den == 0) ? 0 : (qreal(rational->num) / rational->den);
return frameRate;
}
void VideoDecode::setStreamDecodeCallback(StreamDecodeCallback funStreamDecodeCallback, void * userData)
{
funCallbackByStreamDecode = funStreamDecodeCallback;
userDataByStreamDecode = userData;
}
void VideoDecode::setFileStatusCallback(FileStatusCallback funFileStatusCallback, void * userData)
{
funCallbackByFileStatus = funFileStatusCallback;
userDataByFileStatus = userData;
}
//初始化硬件解码器
void VideoDecode::initHWDecoder(const AVCodec *codec)
{
if(!codec) return;
for(int i = 0; ; i++)
{
const AVCodecHWConfig* config = avcodec_get_hw_config(codec, i); // 检索编解码器支持的硬件配置。
if(!config)
{
qDebug() << "打开硬件解码器失败!";
return; // 没有找到支持的硬件配置
}
if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX) // 判断是否是设备类型
{
for(auto i : vecHWDeviceTypes)
{
if(config->device_type == AVHWDeviceType(i)) // 判断设备类型是否是支持的硬件解码器
{
g_pixelFormat = config->pix_fmt;
// 打开指定类型的设备,并为其创建AVHWDeviceContext。
int ret = av_hwdevice_ctx_create(&hw_device_ctx, config->device_type, nullptr, nullptr, 0);
if(ret < 0)
{
freeDecode();
return ;
}
qDebug() << "打开硬件解码器:" << av_hwdevice_get_type_name(config->device_type);
codecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx); // 创建一个对AVBuffer的新引用。
codecCtx->get_format = get_hw_format; // 由一些解码器调用,以选择将用于输出帧的像素格式
return;
}
}
}
}
}
//硬件解码完成需要将数据从GPU复制到CPU
bool VideoDecode::dataCopy()
{
if(frame->format != g_pixelFormat)
{
av_frame_unref(frame);
return false;
}
// av_hwframe_map处理速度比av_hwframe_transfer_data快(av_hwframe_map在ffmpeg3.3以后才有)
int ret = av_hwframe_map(frameHW, frame, AV_HWFRAME_MAP_READ); // 映射硬件数据帧
/*
av_hwframe_map 映射硬件数据帧,第3个参数值有三种类型:
AV_HWFRAME_MAP_READ:目标帧可读。
AV_HWFRAME_MAP_WRITE:目标帧可写。
AV_HWFRAME_MAP_DIRECT:避免数据拷贝(依赖硬件支持)。
优先使用 AV_HWFRAME_MAP_DIRECT 减少内存拷贝开销。
使用AV_HWFRAME_MAP_DIRECT时,你应该确保你的应用逻辑不会修改通过映射获得的软件帧内容,以避免不期望的副作用。
使用AV_HWFRAME_MAP_READ时,你将获得数据的一致性但可能会有性能上的损失。
*/
if(ret >= 0)
{
//映射硬件数据帧成功
frameHW->width = frame->width;
frameHW->height = frame->height;
}
else
{
//映射硬件数据帧失败
ret = av_hwframe_transfer_data(frameHW, frame, 0); // 将解码后的数据从GPU复制到CPU(frameHW) 比较耗时,但硬解码速度比软解码快很多
if(ret < 0)
{
av_frame_unref(frame);
return false;
}
av_frame_copy_props(frameHW, frame); // 仅将"metadata"字段从src复制到dst。
}
return true;
}
void VideoDecode::setHWDecoder(bool flag)
{
hwDecoderFlag = flag;
}
bool VideoDecode::isHWDecoder()
{
return hwDecoderFlag;
}
bool VideoDecode::open(const QString& url)
{
if(url.isNull()) return false;
AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE; // ffmpeg支持的硬件解码器
QStringList strTypes;
while ((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE) // 遍历支持的设备类型。
{
vecHWDeviceTypes.push_back(type);
const char* ctype = av_hwdevice_get_type_name(type); // 获取AVHWDeviceType的字符串名称。
if(ctype)
{
strTypes.append(QString(ctype));
}
}
qDebug() << "支持的硬件解码器:";
qDebug() << strTypes;
AVDictionary* dict = nullptr;
av_dict_set(&dict, "rtsp_transport", "tcp", 0); // 设置rtsp流使用tcp打开,如果打开失败错误信息为【Error number -135 occurred】可以切换(UDP、tcp、udp_multicast、http),比如vlc推流就需要使用udp打开
av_dict_set(&dict, "max_delay", "3", 0); // 设置最大复用或解复用延迟(以微秒为单位)。当通过【UDP】 接收数据时,解复用器尝试重新排序接收到的数据包(因为它们可能无序到达,或者数据包可能完全丢失)。这可以通过将最大解复用延迟设置为零(通过max_delayAVFormatContext 字段)来禁用。
av_dict_set(&dict, "timeout", "1000000", 0); // 以微秒为单位设置套接字 TCP I/O 超时,如果等待时间过短,也可能会还没连接就返回了。
// 打开输入流并返回解封装上下文
int ret = avformat_open_input(&formatCtx, // 返回解封装上下文
url.toStdString().data(), // 打开视频地址
nullptr, // 如果非null,此参数强制使用特定的输入格式。自动选择解封装器(文件格式)
&dict); // 参数设置
// 释放参数字典
if(dict)
{
av_dict_free(&dict);
}
// 打开视频失败
if(ret < 0)
{
qDebug() << "Failed to avformat_open_input";
return false;
}
// 读取媒体文件的数据包以获取流信息。
ret = avformat_find_stream_info(formatCtx, nullptr);
if(ret < 0)
{
qDebug() << "Failed to avformat_find_stream_info";
freeDecode();
return false;
}
totalTime = formatCtx->duration / (AV_TIME_BASE / 1000); // 计算视频总时长(毫秒)
qDebug() << QString("视频总时长:%1 ms,[%2]").arg(totalTime).arg(QTime::fromMSecsSinceStartOfDay(int(totalTime)).toString("HH:mm:ss zzz"));
// 通过AVMediaType枚举查询视频流ID(也可以通过遍历查找),最后一个参数无用
videoStreamIndex = av_find_best_stream(formatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, 0);
if(videoStreamIndex < 0)
{
qDebug() << "Failed to av_find_best_stream";
freeDecode();
return false;
}
AVStream* videoStream = formatCtx->streams[videoStreamIndex]; // 通过查询到的索引获取视频流
// 获取视频图像分辨率(AVStream中的AVCodecContext在新版本中弃用,改为使用AVCodecParameters)
width = videoStream->codecpar->width;
height = videoStream->codecpar->height;
frameRate = rationalToDouble(&videoStream->avg_frame_rate); // 视频帧率
// 通过解码器ID获取视频解码器(新版本返回值必须使用const)
const AVCodec* codec = avcodec_find_decoder(videoStream->codecpar->codec_id);
totalFrames = videoStream->nb_frames;
qDebug() << QString("分辨率:[w:%1,h:%2] 帧率:%3 总帧数:%4 解码器:%5")
.arg(width).arg(height).arg(frameRate).arg(totalFrames).arg(codec->name);
// 分配AVCodecContext并将其字段设置为默认值。
codecCtx = avcodec_alloc_context3(codec);
if(!codecCtx)
{
qDebug() << "Failed to avcodec_alloc_context3";
freeDecode();
return false;
}
// 使用视频流的codecpar为解码器上下文赋值
ret = avcodec_parameters_to_context(codecCtx, videoStream->codecpar);
if(ret < 0)
{
qDebug() << "Failed to avcodec_parameters_to_context";
freeDecode();
return false;
}
codecCtx->flags2 |= AV_CODEC_FLAG2_FAST; // 允许不符合规范的加速技巧。
codecCtx->thread_count = 8; // 使用8线程解码
if(isHWDecoder())
{
initHWDecoder(codec); // 初始化硬件解码器(在avcodec_open2前调用)
}
// 初始化解码器上下文,如果之前avcodec_alloc_context3传入了解码器,这里设置NULL就可以
ret = avcodec_open2(codecCtx, nullptr, nullptr);
if(ret < 0)
{
qDebug() << "Failed to avcodec_open2";
freeDecode();
return false;
}
// 分配AVPacket并将其字段设置为默认值。
packet = av_packet_alloc();
if(!packet)
{
qDebug() << "Failed to av_packet_alloc";
freeDecode();
return false;
}
// 初始化帧和转换上下文
frame = av_frame_alloc();
rgbFrame = av_frame_alloc();
frameHW = av_frame_alloc();
int size = av_image_get_buffer_size(AV_PIX_FMT_RGB24, codecCtx->width, codecCtx->height, 1);
buffer = (uint8_t *)av_malloc(size + 1000);
av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, buffer, AV_PIX_FMT_RGB24,
codecCtx->width, codecCtx->height, 1);
/*
// 初始化 SWS 上下文(YUV -> RGB 转换)
swsCtx = sws_getContext(codecCtx->width, codecCtx->height, codecCtx->pix_fmt,
codecCtx->width, codecCtx->height, AV_PIX_FMT_RGB24,
SWS_BILINEAR, nullptr, nullptr, nullptr);
*/
stopWorkFlag = false;
std::thread t(std::bind(&VideoDecode::decodeProccessThread,this));
threadDecode = std::move(t);
return true;
}
void VideoDecode::close()
{
stopWorkFlag = true;
// 因为avformat_flush不会刷新AVIOContext (s->pb)。如果有必要,在调用此函数之前调用avio_flush(s->pb)。
if(formatCtx && formatCtx->pb)
{
avio_flush(formatCtx->pb);
}
if(formatCtx)
{
avformat_flush(formatCtx); // 清理读取缓冲
}
if(threadDecode.joinable())
{
threadDecode.join();
}
freeDecode();
}
bool VideoDecode::isClose()
{
return stopWorkFlag;
}
QImage VideoDecode::ConvertRGB24FrameToQImage(unsigned char *data, int width, int height)
{
// 创建 QImage 并显示
QImage img(data, width, height, QImage::Format_RGB888);
return img;
}
void VideoDecode::decodeProccessThread()
{
std::chrono::high_resolution_clock::time_point tpStart = std::chrono::high_resolution_clock::now();
int nWaitTimes = 40;
if(frameRate != 0)
{
nWaitTimes = 1000.0/frameRate;
}
long long llDecodeTs = 0;
long long llPlayTs = 0;
long long llStartPlayTs = 0;
bool bStartPlayTsSetValueFlag = false;
bool bProccessFileTail = false;
while (true)
{
if(stopWorkFlag)
{
break;
}
// 读取下一帧数据
int readRet = av_read_frame(formatCtx, packet);
if(readRet < 0)
{
if (readRet == AVERROR_EOF)
{
int ret = avcodec_send_packet(codecCtx, packet); // 读取完成后向解码器中传如空AVPacket,否则无法读取出最后几帧
if(ret < 0)
{
av_packet_unref(packet);
bProccessFileTail = true;
break;
}
}
else
{
break;
}
}
else
{
if(stopWorkFlag)
{
break;
}
if(packet->stream_index == videoStreamIndex) // 如果是图像数据则进行解码
{
av_packet_rescale_ts(packet, formatCtx->streams[videoStreamIndex]->time_base, codecCtx->time_base); // 转换至解码器时间基
// 将读取到的原始数据包传入解码器
int ret = avcodec_send_packet(codecCtx, packet);
if(ret < 0)
{
qDebug() << "Error sending packet";
av_packet_unref(packet);
continue;
}
}
else
{
//其他流(比如:音频)
av_packet_unref(packet);
continue;
}
}
// 接收解码后的帧(这里一次只解码一帧)
int ret = avcodec_receive_frame(codecCtx, frame);
if (ret == AVERROR(EAGAIN))
{
av_packet_unref(packet);
continue;
}
else if (ret == AVERROR_EOF)
{
av_packet_unref(packet);
//当无法读取到AVPacket并且解码器中也没有数据时表示读取完成
bProccessFileTail = true;
break;
}
else if (ret < 0)
{
qDebug() << "Error during decoding";
av_packet_unref(packet);
continue;
}
else
{
// 这样写是为了兼容软解码或者硬件解码打开失败情况
AVFrame* frameTemp = frame;
if(!frame->data[0]) // 如果是硬件解码就进入
{
// 将解码后的数据从GPU拷贝到CPU
if(!dataCopy())
{
av_frame_unref(frameHW);
continue;
}
frameTemp = frameHW;
}
// 处理时间戳的核心逻辑
int64_t raw_pts = frameTemp->pts;
int64_t raw_dts = frameTemp->pkt_dts;
// 处理未定义时间戳的情况
if (raw_pts == AV_NOPTS_VALUE)
{
// 使用DTS或估算PTS(需要根据帧率等参数)
if(raw_dts != AV_NOPTS_VALUE)
{
raw_pts = raw_dts;
}
else
{
raw_pts = 0;
raw_dts = 0;
}
}
// 转换为显示时间戳(秒)
double display_time = raw_pts * av_q2d(codecCtx->time_base);
// 转换为全局时间基(例如用于音视频同步)
AVRational timeBaseTemp{1, AV_TIME_BASE};//AV_TIME_BASE_Q
llPlayTs = av_rescale_q(raw_pts, codecCtx->time_base, timeBaseTemp);
llDecodeTs = av_rescale_q(raw_dts, codecCtx->time_base, timeBaseTemp);
if(!bStartPlayTsSetValueFlag)
{
llStartPlayTs = llPlayTs;
bStartPlayTsSetValueFlag = true;
}
qDebug("Frame:%4d PTS:%lld display_time:%.2f DTS:%lld llPlayTs:%lld llDecodeTs:%lld packet dts:%lld pts:%lld",
codecCtx->frame_number, raw_pts, display_time, raw_dts, llPlayTs, llDecodeTs, packet->dts, packet->pts);
av_packet_unref(packet); // 释放数据包,引用计数-1,为0时释放空间
if(!swsCtx || (frameTemp->width != width || frameTemp->height != height))
{
//重新申请
width = frameTemp->width;
height = frameTemp->height;
if(swsCtx)
{
sws_freeContext(swsCtx);
swsCtx = nullptr;
}
if(buffer)
{
av_free(buffer);
buffer = nullptr;
}
int size = av_image_get_buffer_size(AV_PIX_FMT_RGB24, frameTemp->width, frameTemp->height, 1);
buffer = (uint8_t *)av_malloc(size + 1000);
av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, buffer, AV_PIX_FMT_RGB24,
frameTemp->width, frameTemp->height, 1);
swsCtx = sws_getCachedContext(swsCtx,
frameTemp->width, // 输入图像的宽度
frameTemp->height, // 输入图像的高度
(AVPixelFormat)frameTemp->format, // 输入图像的像素格式
frameTemp->width, // 输出图像的宽度
frameTemp->height, // 输出图像的高度
AV_PIX_FMT_RGB24, // 输出图像的像素格式
SWS_BILINEAR, // 选择缩放算法(只有当输入输出图像大小不同时有效),一般选择SWS_FAST_BILINEAR
nullptr, // 输入图像的滤波器信息, 若不需要传NULL
nullptr, // 输出图像的滤波器信息, 若不需要传NULL
nullptr);
}
//休眠等待
long long llPlayTsDiff = llPlayTs - llStartPlayTs;
auto duration = std::chrono::duration_cast<std::chrono::microseconds>(std::chrono::high_resolution_clock::now() - tpStart);
// 计算需要等待的时间(单位:微秒)
int64_t delay = llPlayTsDiff - duration.count();
// 同步控制
if (delay > 0)
{
std::this_thread::sleep_for(std::chrono::microseconds(delay)); // 等待至目标时间
}
else if (delay < -100000)
{
// 允许100ms误差阈值
// 丢弃滞后帧,追赶进度
av_frame_unref(frame);
av_frame_unref(frameHW);
continue;
}
if(/*0 && */frameTemp->format == AV_PIX_FMT_YUV420P)
{
//回调流数据(方便渲染)
if(funCallbackByStreamDecode && !stopWorkFlag)
{
funCallbackByStreamDecode(StreamType_Video,FormatType_YUV420P,llDecodeTs,llPlayTs,frameTemp->width,frameTemp->height,frameTemp->data, frameTemp->linesize, userDataByStreamDecode);
}
}
else if(frameTemp->format == AV_PIX_FMT_NV12)
{
//回调流数据(方便渲染)
if(funCallbackByStreamDecode && !stopWorkFlag)
{
funCallbackByStreamDecode(StreamType_Video,FormatType_NV12,llDecodeTs,llPlayTs,frameTemp->width,frameTemp->height,frameTemp->data, frameTemp->linesize, userDataByStreamDecode);
}
}
else
{
//其他格式,转换成rgb
// 转换颜色空间到 RGB24
sws_scale(swsCtx, frameTemp->data, frameTemp->linesize, 0, frameTemp->height, rgbFrame->data, rgbFrame->linesize);
//回调流数据(方便渲染)
if(funCallbackByStreamDecode && !stopWorkFlag)
{
funCallbackByStreamDecode(StreamType_Video,FormatType_RGB24,llDecodeTs,llPlayTs,frameTemp->width,frameTemp->height,rgbFrame->data, rgbFrame->linesize, userDataByStreamDecode);
}
}
av_frame_unref(frame);
av_frame_unref(frameHW);
}
}
if(bProccessFileTail && !stopWorkFlag)
{
if(funCallbackByFileStatus != nullptr)
{
funCallbackByFileStatus(FileStatus_OverFileTail, 0, userDataByFileStatus);
}
}
qDebug()<<"thread is eixt";
}
void VideoDecode::freeDecode()
{
// 释放资源
if (swsCtx)
{
sws_freeContext(swsCtx);
swsCtx = nullptr;
}
if (rgbFrame)
{
av_frame_free(&rgbFrame);
rgbFrame = nullptr;
}
if (frame)
{
av_frame_free(&frame);
frame = nullptr;
}
if(frameHW)
{
av_frame_free(&frameHW);
frameHW = nullptr;
}
if (codecCtx)
{
avcodec_free_context(&codecCtx);
codecCtx = nullptr;
}
if (formatCtx)
{
avformat_close_input(&formatCtx);
formatCtx = nullptr;
}
if(buffer != nullptr)
{
av_free(buffer);
buffer = nullptr;
}
}
3**. 帧数据显示窗口类**
frameopenglwidget.h
#ifndef FRAMEOPENGLWIDGET_H
#define FRAMEOPENGLWIDGET_H
#include <QOpenGLWidget>
#include <QOpenGLShaderProgram>
#include <QOpenGLFunctions>
//帧数据格式
enum FrameDataFormat
{
FrameDataFormat_None = -1,
FrameDataFormat_RGB24 = 0,
FrameDataFormat_RGB32 = 1,
FrameDataFormat_YUV420P = 2,
FrameDataFormat_NV12 = 3,
};
class FrameOpenGLWidget : public QOpenGLWidget, protected QOpenGLFunctions
{
public:
FrameOpenGLWidget(QWidget *parent = nullptr);
virtual ~FrameOpenGLWidget();
void setFrameDataFormat(FrameDataFormat eFrameDataFormat);
FrameDataFormat getFrameDataFormat();
//更新NV12帧数据
void updateNV12Frame(const uint8_t* yData, const uint8_t* uvData, int y_linesize, int uv_linesize, int width, int height);
//更新YUV420P数据
void updateYUV420PFrame(const uint8_t* yData, const uint8_t* uData, const uint8_t* vData, int y_linesize, int u_linesize, int v_linesize, int width, int height);
//更新RGB24数据
void updateRGB24Frame(const uint8_t* rgbData, int width, int height, int linesize);
//更新RGB32数据
void updateRGB32Frame(const uint8_t* rgbData, int width, int height, int linesize);
//更新帧数据
void updateFrame(int eFrameDataFormat, uint8_t **data, int *linesize, int width, int height);
protected:
void initializeGL() override; // 初始化gl
void resizeGL(int w, int h) override; // 窗口尺寸变化
void paintGL() override; // 刷新显示
protected:
void initializeGLByNV12Frame();
void initTextureByNV12Frame();
void releaseTextureByNV12Frame();
void paintGLByNV12Frame();
void initializeGLByYUV420PFrame();
void initTextureByYUV420PFrame();
void releaseTextureByYUV420PFrame();
void paintGLByYUV420PFrame();
void initializeGLByRGBFrame();
void initTextureByRGBFrame();
void releaseTextureByRGBFrame();
void paintGLByRGBFrame();
void initializeGLByRGB32Frame();
void initTextureByRGB32Frame();
void releaseTextureByRGB32Frame();
void paintGLByRGB32Frame();
private:
QOpenGLShaderProgram m_program;
GLuint m_yTexture = 0;
GLuint m_uvTexture = 0;
GLuint m_uTexture = 0;
GLuint m_vTexture = 0;
GLuint m_rgbTexture = 0;
GLuint m_rgb32Texture = 0;
uint8_t* m_yData = nullptr;
uint8_t* m_uvData = nullptr;
uint8_t* m_uData = nullptr;
uint8_t* m_vData = nullptr;
uint8_t* m_nv12Data = nullptr;
uint8_t* m_rgbData = nullptr;
uint8_t* m_rgb32Data = nullptr;
int m_width = 0;
int m_height = 0;
QSize m_size;
QSizeF m_zoomSize;
QPointF m_pos;
bool m_nFirstUpdateFrame = true;
int m_nInitFrameDataFormat = FrameDataFormat_RGB24;
};
#endif // FRAMEOPENGLWIDGET_H
frameopenglwidget.cpp
#include "frameopenglwidget.h"
FrameOpenGLWidget::FrameOpenGLWidget(QWidget *parent) : QOpenGLWidget(parent)
{
m_size.setWidth(1);
m_size.setHeight(1);
m_zoomSize.setWidth(1);
m_zoomSize.setHeight(1);
m_pos.setX(0);
m_pos.setY(0);
m_nInitFrameDataFormat = FrameDataFormat_NV12;
}
FrameOpenGLWidget::~FrameOpenGLWidget()
{
if(m_yData != nullptr)
{
delete []m_yData;
m_yData = nullptr;
}
if(m_uData != nullptr)
{
delete []m_uData;
m_uData = nullptr;
}
if(m_vData != nullptr)
{
delete []m_vData;
m_vData = nullptr;
}
if(m_uvData != nullptr)
{
delete []m_uvData;
m_uvData = nullptr;
}
if(m_rgbData != nullptr)
{
delete []m_rgbData;
m_rgbData = nullptr;
}
if(m_rgb32Data != nullptr)
{
delete []m_rgb32Data;
m_rgb32Data = nullptr;
}
makeCurrent();
switch (m_nInitFrameDataFormat) {
case FrameDataFormat_RGB24:
releaseTextureByRGBFrame();
break;
case FrameDataFormat_RGB32:
releaseTextureByRGB32Frame();
break;
case FrameDataFormat_YUV420P:
releaseTextureByYUV420PFrame();
break;
case FrameDataFormat_NV12:
releaseTextureByNV12Frame();
break;
}
doneCurrent();
}
void FrameOpenGLWidget::setFrameDataFormat(FrameDataFormat eFrameDataFormat)
{
if(m_nInitFrameDataFormat != eFrameDataFormat)
{
makeCurrent();
m_nInitFrameDataFormat = eFrameDataFormat;
switch (m_nInitFrameDataFormat) {
case FrameDataFormat_RGB24:
releaseTextureByRGBFrame();
// 初始化纹理
initTextureByRGBFrame();
initializeGLByRGBFrame();
break;
case FrameDataFormat_RGB32:
releaseTextureByRGB32Frame();
// 初始化纹理
initTextureByRGB32Frame();
initializeGLByRGB32Frame();
break;
case FrameDataFormat_YUV420P:
releaseTextureByYUV420PFrame();
// 初始化纹理
initTextureByYUV420PFrame();
initializeGLByYUV420PFrame();
break;
case FrameDataFormat_NV12:
releaseTextureByNV12Frame();
// 初始化纹理
initTextureByNV12Frame();
initializeGLByNV12Frame();
break;
}
doneCurrent();
}
}
FrameDataFormat FrameOpenGLWidget::getFrameDataFormat()
{
return (FrameDataFormat)m_nInitFrameDataFormat;
}
//更新NV12帧数据
void FrameOpenGLWidget::updateNV12Frame(const uint8_t* yData, const uint8_t* uvData, int y_linesize, int uv_linesize, int width, int height)
{
if(m_yData == nullptr || m_uvData == nullptr)
{
if(m_yData != nullptr)
{
delete []m_yData;
}
if(m_uvData != nullptr)
{
delete []m_uvData;
}
m_yData = new uint8_t[y_linesize*height + 1];
m_uvData = new uint8_t[uv_linesize*height/2 + 1];
}
else
{
if(width != m_width || height != m_height)
{
if(m_yData != nullptr && m_uvData != nullptr)
{
delete []m_yData;
delete []m_uvData;
m_yData = new uint8_t[y_linesize*height + 1];
m_uvData = new uint8_t[uv_linesize*height/2 + 1];
}
}
}
memcpy(m_yData, yData, y_linesize*height);
memcpy(m_uvData, uvData, uv_linesize*height/2);
m_width = width;
m_height = height;
m_size.setWidth(width);
m_size.setHeight(height);
if(m_nFirstUpdateFrame)
{
resizeGL(size().width(), size().height());
m_nFirstUpdateFrame = false;
}
update();
}
//更新YUV420P数据
void FrameOpenGLWidget::updateYUV420PFrame(const uint8_t* yData, const uint8_t* uData, const uint8_t* vData, int y_linesize, int u_linesize, int v_linesize, int width, int height)
{
if(m_yData == nullptr || m_uData == nullptr || m_vData == nullptr)
{
if(m_yData != nullptr)
{
delete []m_yData;
}
if(m_uData != nullptr)
{
delete []m_uData;
}
if(m_vData != nullptr)
{
delete []m_vData;
}
m_yData = new uint8_t[y_linesize*height + 1];
m_uData = new uint8_t[u_linesize*height/2 + 1];
m_vData = new uint8_t[v_linesize*height/2 + 1];
}
else
{
if(width != m_width || height != m_height)
{
if(m_yData != nullptr && m_uData != nullptr && m_vData != nullptr)
{
delete []m_yData;
delete []m_uData;
delete []m_vData;
m_yData = new uint8_t[y_linesize*height + 1];
m_uData = new uint8_t[u_linesize*height/2 + 1];
m_vData = new uint8_t[v_linesize*height/2 + 1];
}
}
}
memcpy(m_yData, yData, y_linesize*height);
memcpy(m_uData, uData, u_linesize*height/2);
memcpy(m_vData, vData, v_linesize*height/2);
m_width = width;
m_height = height;
m_size.setWidth(width);
m_size.setHeight(height);
if(m_nFirstUpdateFrame)
{
resizeGL(size().width(), size().height());
m_nFirstUpdateFrame = false;
}
update();
}
//更新RGB24数据
void FrameOpenGLWidget::updateRGB24Frame(const uint8_t* rgbData, int width, int height, int linesize)
{
if(m_rgbData == nullptr)
{
m_rgbData = new uint8_t[linesize*height + 1000];
}
else
{
if(width != m_width || height != m_height)
{
if(m_rgbData != nullptr)
{
delete []m_rgbData;
m_rgbData = new uint8_t[linesize*height + 1000];
}
}
}
memcpy(m_rgbData, rgbData, linesize*height);
m_width = width;
m_height = height;
m_size.setWidth(width);
m_size.setHeight(height);
if(m_nFirstUpdateFrame)
{
resizeGL(size().width(), size().height());
m_nFirstUpdateFrame = false;
}
update();
}
//更新RGB32数据
void FrameOpenGLWidget::updateRGB32Frame(const uint8_t* rgbData, int width, int height, int linesize)
{
if(m_rgb32Data == nullptr)
{
m_rgb32Data = new uint8_t[linesize*height + 1000];
}
else
{
if(width != m_width || height != m_height)
{
if(m_rgb32Data != nullptr)
{
delete []m_rgb32Data;
m_rgb32Data = new uint8_t[linesize*height + 1000];
}
}
}
memcpy(m_rgb32Data, rgbData, linesize*height);
m_width = width;
m_height = height;
m_size.setWidth(width);
m_size.setHeight(height);
if(m_nFirstUpdateFrame)
{
resizeGL(size().width(), size().height());
m_nFirstUpdateFrame = false;
}
update();
}
//更新帧数据
void FrameOpenGLWidget::updateFrame(int eFrameDataFormat, uint8_t **data, int *linesize, int width, int height)
{
switch (eFrameDataFormat) {
case FrameDataFormat_RGB24:
updateRGB24Frame(data[0], width, height, linesize[0]);
break;
case FrameDataFormat_RGB32:
updateRGB32Frame(data[0], width, height, linesize[0]);
break;
case FrameDataFormat_YUV420P:
updateYUV420PFrame(data[0],data[1],data[2], linesize[0], linesize[1],linesize[2], width,height);
break;
case FrameDataFormat_NV12:
updateNV12Frame(data[0],data[1],linesize[0], linesize[1],width,height);
break;
}
}
void FrameOpenGLWidget::initializeGLByNV12Frame()
{
m_program.removeAllShaders();
// 编译着色器
m_program.addShaderFromSourceCode(QOpenGLShader::Vertex,
"#version 330 core\n"
"layout(location = 0) in vec4 vertexIn;\n"
"layout(location = 1) in vec2 textureIn;\n"
"out vec2 textureCoord;\n"
"void main(void)\n"
"{\n"
" gl_Position = vertexIn;\n"
" textureCoord = textureIn;\n"
"}\n");
m_program.addShaderFromSourceCode(QOpenGLShader::Fragment,
"#version 330 core\n"
"in vec2 textureCoord;\n"
"out vec4 fragColor;\n"
"uniform sampler2D tex_y;\n"
"uniform sampler2D tex_uv;\n"
"void main()\n"
"{\n"
" float y = texture(tex_y, textureCoord).r;\n"
" vec2 uv = texture(tex_uv, textureCoord).rg;\n"
" y = 1.1643 * (y - 0.0625);\n"
" float u = uv.x - 0.5;\n"
" float v = uv.y - 0.5;\n"
" fragColor = vec4(\n"
" y + 1.5958 * v,\n"
" y - 0.39173 * u - 0.81290 * v,\n"
" y + 2.017 * u,\n"
" 1.0\n"
" );\n"
"}\n");
m_program.link();
}
void FrameOpenGLWidget::initTextureByNV12Frame()
{
// 初始化纹理
glGenTextures(1, &m_yTexture);
glGenTextures(1, &m_uvTexture);
}
void FrameOpenGLWidget::releaseTextureByNV12Frame()
{
glDeleteTextures(1, &m_yTexture);
glDeleteTextures(1, &m_uvTexture);
}
void FrameOpenGLWidget::paintGLByNV12Frame()
{
if (!m_yData || !m_uvData) return;
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
#if 1
glViewport(m_pos.x(), m_pos.y(), m_zoomSize.width(), m_zoomSize.height()); // 设置视图大小实现图片自适应
#endif
m_program.bind();
// 更新Y纹理
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_yTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_width, m_height, 0, GL_RED, GL_UNSIGNED_BYTE, m_yData);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// 更新UV纹理
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, m_uvTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RG, m_width/2, m_height/2, 0, GL_RG, GL_UNSIGNED_BYTE, m_uvData);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// 设置纹理单元
m_program.setUniformValue("tex_y", 0);
m_program.setUniformValue("tex_uv", 1);
// 绘制矩形
static const GLfloat vertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat texCoords[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, vertices);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, texCoords);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
m_program.release();
}
void FrameOpenGLWidget::initializeGLByYUV420PFrame()
{
m_program.removeAllShaders();
// 编译着色器
m_program.addShaderFromSourceCode(QOpenGLShader::Vertex,
"#version 330 core\n"
"layout(location = 0) in vec4 vertexIn;\n"
"layout(location = 1) in vec2 textureIn;\n"
"out vec2 textureCoord;\n"
"void main() {\n"
" gl_Position = vertexIn;\n"
" textureCoord = textureIn;\n"
"}");
m_program.addShaderFromSourceCode(QOpenGLShader::Fragment,
"#version 330 core\n"
"in vec2 textureCoord;\n"
"out vec4 fragColor;\n"
"uniform sampler2D tex_y;\n"
"uniform sampler2D tex_u;\n"
"uniform sampler2D tex_v;\n"
"const mat3 yuv2rgb = mat3(\n"
" 1.164383, 1.164383, 1.164383,\n" // Y系数
" 0.0, -0.391762, 2.017232,\n" // U系数
" -0.812968, 0.0, 0.0);\n" // V系数
"void main() {\n"
" float y = texture(tex_y, textureCoord).r;\n"
" float u = texture(tex_u, textureCoord).r;\n"
" float v = texture(tex_v, textureCoord).r;\n"
" y = 1.1643 * (y - 0.0625);\n"
" u = u - 0.5;\n"
" v = v - 0.5;\n"
" fragColor = vec4(\n"
" y + 2.017232 * v,\n"
" y - 0.391762 * u - 0.812968 * v,\n"
" y + 1.164383 * u,\n"
" 1.0\n"
" );\n"
"}");
m_program.link();
}
void FrameOpenGLWidget::initTextureByYUV420PFrame()
{
// 初始化三个纹理
glGenTextures(1, &m_yTexture);
glGenTextures(1, &m_uTexture);
glGenTextures(1, &m_vTexture);
}
void FrameOpenGLWidget::releaseTextureByYUV420PFrame()
{
glDeleteTextures(1, &m_yTexture);
glDeleteTextures(1, &m_uTexture);
glDeleteTextures(1, &m_vTexture);
}
void FrameOpenGLWidget::paintGLByYUV420PFrame()
{
if (!m_yData || !m_uData || !m_vData) return;
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
#if 1
glViewport(m_pos.x(), m_pos.y(), m_zoomSize.width(), m_zoomSize.height()); // 设置视图大小实现图片自适应
#endif
m_program.bind();
// 更新Y纹理
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_yTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_width, m_height, 0,
GL_RED, GL_UNSIGNED_BYTE, m_yData);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// 更新U纹理
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, m_uTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_width/2, m_height/2, 0,
GL_RED, GL_UNSIGNED_BYTE, m_uData);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// 更新V纹理
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, m_vTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_width/2, m_height/2, 0,
GL_RED, GL_UNSIGNED_BYTE, m_vData);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// 设置纹理单元
m_program.setUniformValue("tex_y", 0);
m_program.setUniformValue("tex_u", 1);
m_program.setUniformValue("tex_v", 2);
// 绘制矩形
static const GLfloat vertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat texCoords[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, vertices);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, texCoords);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
m_program.release();
}
void FrameOpenGLWidget::initializeGLByRGBFrame()
{
m_program.removeAllShaders();
// 编译着色器
m_program.addShaderFromSourceCode(QOpenGLShader::Vertex,
"#version 330 core\n"
"layout(location = 0) in vec4 vertexIn;\n"
"layout(location = 1) in vec2 textureIn;\n"
"out vec2 textureCoord;\n"
"void main() {\n"
" vec4 adjustedPos = vertexIn;\n"
" gl_Position = adjustedPos;\n"
" textureCoord = textureIn;\n"
"}");
m_program.addShaderFromSourceCode(QOpenGLShader::Fragment,
"#version 330 core\n"
"in vec2 textureCoord;\n"
"out vec4 fragColor;\n"
"uniform sampler2D tex_rgb;\n"
"void main() {\n"
" fragColor = texture(tex_rgb, textureCoord);\n"
"}");
m_program.link();
}
void FrameOpenGLWidget::initTextureByRGBFrame()
{
// 初始化纹理
glGenTextures(1, &m_rgbTexture);
}
void FrameOpenGLWidget::releaseTextureByRGBFrame()
{
glDeleteTextures(1, &m_rgbTexture);
}
void FrameOpenGLWidget::paintGLByRGBFrame()
{
if (!m_rgbData || m_width <= 0 || m_height <= 0) return;
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
#if 1
glViewport(m_pos.x(), m_pos.y(), m_zoomSize.width(), m_zoomSize.height()); // 设置视图大小实现图片自适应
#endif
m_program.bind();
// 设置像素存储对齐(针对RGB24格式)
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// 更新RGB纹理
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_rgbTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_width, m_height, 0,
GL_RGB, GL_UNSIGNED_BYTE, m_rgbData);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
m_program.setUniformValue("tex_rgb", 0);
// 顶点和纹理坐标
static const GLfloat vertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat texCoords[] = {
0.0f, 1.0f, // 左下
1.0f, 1.0f, // 右下
0.0f, 0.0f, // 左上
1.0f, 0.0f // 右上
};
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, vertices);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, texCoords);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
m_program.release();
}
void FrameOpenGLWidget::initializeGLByRGB32Frame()
{
m_program.removeAllShaders();
// 编译着色器
m_program.addShaderFromSourceCode(QOpenGLShader::Vertex,
"#version 330 core\n"
"layout(location = 0) in vec4 vertexIn;\n"
"layout(location = 1) in vec2 textureIn;\n"
"out vec2 textureCoord;\n"
"void main() {\n"
" vec4 pos = vertexIn;\n"
" gl_Position = pos;\n"
" textureCoord = textureIn;\n"
"}");
m_program.addShaderFromSourceCode(QOpenGLShader::Fragment,
"#version 330 core\n"
"in vec2 textureCoord;\n"
"out vec4 fragColor;\n"
"uniform sampler2D tex_rgb32;\n"
"void main() {\n"
" fragColor = texture(tex, textureCoord);\n"
"}");
m_program.link();
}
void FrameOpenGLWidget::initTextureByRGB32Frame()
{
// 初始化纹理
glGenTextures(1, &m_rgb32Texture);
}
void FrameOpenGLWidget::releaseTextureByRGB32Frame()
{
glDeleteTextures(1, &m_rgb32Texture);
}
void FrameOpenGLWidget::paintGLByRGB32Frame()
{
if (!m_rgb32Data || m_width <= 0 || m_height <= 0) return;
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
#if 1
glViewport(m_pos.x(), m_pos.y(), m_zoomSize.width(), m_zoomSize.height()); // 设置视图大小实现图片自适应
#endif
m_program.bind();
// 设置纹理参数
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_rgb32Texture);
// 更新纹理数据(根据具体格式选择)
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, // 内部格式
m_width, m_height, 0,
GL_BGRA, // 数据格式(根据实际数据调整)
GL_UNSIGNED_BYTE, m_rgb32Data);
// 设置纹理参数
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
m_program.setUniformValue("tex_rgb32", 0);
// 顶点和纹理坐标
static const GLfloat vertices[] = {
-1.0f, -1.0f, // 左下
1.0f, -1.0f, // 右下
-1.0f, 1.0f, // 左上
1.0f, 1.0f // 右上
};
static const GLfloat texCoords[] = {
0.0f, 1.0f, // 左下
1.0f, 1.0f, // 右下
0.0f, 0.0f, // 左上
1.0f, 0.0f // 右上
};
// 设置顶点属性
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, vertices);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, texCoords);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
// 绘制
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
m_program.release();
}
void FrameOpenGLWidget::initializeGL()
{
initializeOpenGLFunctions();
switch (m_nInitFrameDataFormat) {
case FrameDataFormat_RGB24:
initializeGLByRGBFrame();
// 初始化纹理
initTextureByRGBFrame();
break;
case FrameDataFormat_RGB32:
initializeGLByRGB32Frame();
// 初始化纹理
initTextureByRGB32Frame();
break;
case FrameDataFormat_YUV420P:
initializeGLByYUV420PFrame();
// 初始化纹理
initTextureByYUV420PFrame();
break;
case FrameDataFormat_NV12:
initializeGLByNV12Frame();
// 初始化纹理
initTextureByNV12Frame();
break;
}
// 指定颜色缓冲区的清除值(背景色)
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
}
void FrameOpenGLWidget::resizeGL(int w, int h)
{
#if 1
//计算需要显示图片的窗口大小,用于实现长宽等比自适应显示
if(m_size.width() <= 0 || m_size.height() <= 0) return;
if((double(w) / h) < (double(m_size.width()) / m_size.height()))
{
m_zoomSize.setWidth(w);
m_zoomSize.setHeight(((double(w) / m_size.width()) * m_size.height()));
}
else
{
m_zoomSize.setHeight(h);
m_zoomSize.setWidth((double(h) / m_size.height()) * m_size.width());
}
m_pos.setX(double(w - m_zoomSize.width()) / 2);
m_pos.setY(double(h - m_zoomSize.height()) / 2);
this->update(QRect(0, 0, w, h));
#else
glViewport(0, 0, w, h);
#endif
}
void FrameOpenGLWidget::paintGL()
{
switch (m_nInitFrameDataFormat) {
case FrameDataFormat_RGB24:
paintGLByRGBFrame();
break;
case FrameDataFormat_RGB32:
paintGLByRGB32Frame();
break;
case FrameDataFormat_YUV420P:
paintGLByYUV420PFrame();
break;
case FrameDataFormat_NV12:
paintGLByNV12Frame();
break;
}
}
4**. 主窗口调用代码**
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QFileDialog>
#include <QDebug>
MainWindow::MainWindow(QWidget *parent)
: QMainWindow(parent)
, ui(new Ui::MainWindow)
{
ui->setupUi(this);
m_vdVideoDecode.globalInit();
m_playImage = this->findChild<FrameOpenGLWidget *>("widget");
connect(this, SIGNAL(sgnUpdateFrameFormat(int)), this, SLOT(sltUpdateFrameFormat(int)), Qt::BlockingQueuedConnection);
}
MainWindow::~MainWindow()
{
delete ui;
m_vdVideoDecode.globalUnInit();
}
void MainWindow::sltUpdateFrameFormat(int nFormatType)
{
if(m_vdVideoDecode.isClose())
return;
m_playImage->setFrameDataFormat((FrameDataFormat)nFormatType);
}
void MainWindow::on_pushButtonOpenFile_clicked(bool checked)
{
bool bIsSupportHardware = ui->checkBoxHW->checkState() == Qt::Checked ? true:false;
QString filename = QFileDialog::getOpenFileName(nullptr, "Open Video File");
if (!filename.isEmpty())
{
m_vdVideoDecode.setHWDecoder(bIsSupportHardware);
m_vdVideoDecode.setStreamDecodeCallback([](int nStreamType, int nFormatType, long long llDecodeTs, long long llPlayTs, int width, int height, unsigned char ** pStreamData, int * linesize, void * pUserData){
MainWindow *pMainWindow = (MainWindow *)pUserData;
if(nFormatType != pMainWindow->m_playImage->getFrameDataFormat())
{
//发送信号出去,更新opengl初始化纹理。(因为opengl初始化纹理是非线程安全的,需要在UI或同FrameOpenGLWidget线程中处理。)
emit pMainWindow->sgnUpdateFrameFormat(nFormatType);
}
pMainWindow->m_playImage->updateFrame(nFormatType, pStreamData, linesize, width, height);
},this);
m_vdVideoDecode.setFileStatusCallback([](int FileStatus, int nErrorCode, void * pUserData){
qDebug()<<"file is end";
},this);
m_vdVideoDecode.open(filename);
}
}
void MainWindow::on_pushButtonCloseFile_clicked()
{
m_vdVideoDecode.close();
}
