一 概述
本文章是基于ffmpeg实现对IPC相机获取的h264/h265裸码流数据解码,通过窗口句柄和回调函数两种形式将画面渲染到播放界面.
二 描述
接口描述
1.主要接口
typedef std::function<void(const char* pReBuf, int lBufSize, int lUser, int lReserved)> DecCallback; //定义的回调函数
bool OpenStream(WId winId);//初始化接口
bool SetDecCallback(long lUser,DecCallback fun);//回调函数接口
void CloseStream();//关闭流接口
void InputStream(const char* data, int len);//接收裸码流数据接口
2.配置接口
//设置数据包最大缓存大小
void SetMaxInputQueueSize(int size) { maxInputQueueSize = size; }
//设置解码后视频帧最大缓存大小
void SetMaxOutputQueueSize(int size) { maxOutputQueueSize = size; }
//设置视频帧率
void SetFrameRate(double fps) { targetFrameRate = fps; }
//设置解码类型(264/265)
void SetCodecType(AVCodecID codecId) { targetCodecId = codecId; }
//设置是否使能硬解
void SetHardwareAccel(bool enable) { useHardwareAccel = enable; }
//设置解码重试次数
void SetMaxDecodeRetry(int retry) { maxDecodeRetry = retry; }
3.状态查询接口
bool IsRunning() const { return isRunning; }
int GetInputQueueSize() const { return inputQueue.size(); }
int GetOutputQueueSize() const { return outputQueue.size(); }
AVCodecID GetCurrentCodec() const { return currentCodecId; }
std::string GetLastError() const { return lastError; }
依赖描述
解码依赖的库:
ffmpeg.4.4
SDL 2.32.8
流程描述
1.设置参数并打开媒体流;
2.接收IPC裸码流数据;
3.将数据包放入输入队列;
4.解码线程从输入队列取数据包进行解码,将解码后数据帧放入输出队列;
5.显示线程从输出队列取数据帧;
6.如果设置回调函数,则将数据交给上层显示;
7.如果没设置回调,则将数据交给SDL库处理,可以直接显示画面
三 关键代码
ffmpegstreamdecoder.h
cpp
#ifndef FFMPEG_STREAM_DECODER_H
#define FFMPEG_STREAM_DECODER_H
#include <iostream>
#include <thread>
#include <mutex>
#include <queue>
#include <condition_variable>
#include <atomic>
#include <vector>
#include <memory>
#include <chrono>
#include <stdexcept>
#include <string>
#include <QtWidgets/QWidget>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
#include <SDL2/SDL.h>
}
// 定义缓冲区大小
#define INPUT_BUFFER_SIZE 4096
#define MAX_QUEUE_SIZE 100
// 解码前的数据结构
struct InputData {
std::vector<uint8_t> data;
int64_t pts;
int64_t dts;
InputData(const char* buf, int len, int64_t pts_ = 0, int64_t dts_ = 0)
: data(buf, buf + len), pts(pts_), dts(dts_) {}
};
// 解码后的数据结构
struct DecodedFrame {
AVFrame* frame;
int64_t pts;
int64_t dts;
DecodedFrame(AVFrame* f, int64_t pts_, int64_t dts_) : frame(f), pts(pts_), dts(dts_) {}
~DecodedFrame() {
if (frame) {
av_frame_free(&frame);
}
}
};
typedef std::function<void(const char* pReBuf, int lBufSize, int lUser, int lReserved)> DecCallback;
class FFmpegStreamDecoder {
public:
FFmpegStreamDecoder();
~FFmpegStreamDecoder();
// 主要接口
bool OpenStream(WId winId);
bool SetDecCallback(long lUser,DecCallback fun);
void CloseStream();
void InputStream(const char* data, int len);
// 配置接口
void SetMaxInputQueueSize(int size) { maxInputQueueSize = size; }
void SetMaxOutputQueueSize(int size) { maxOutputQueueSize = size; }
void SetFrameRate(double fps) { targetFrameRate = fps; }
void SetCodecType(AVCodecID codecId) { targetCodecId = codecId; }
void SetHardwareAccel(bool enable) { useHardwareAccel = enable; }
void SetMaxDecodeRetry(int retry) { maxDecodeRetry = retry; }
// 状态查询
bool IsRunning() const { return isRunning; }
int GetInputQueueSize() const { return inputQueue.size(); }
int GetOutputQueueSize() const { return outputQueue.size(); }
AVCodecID GetCurrentCodec() const { return currentCodecId; }
std::string GetLastError() const { return lastError; }
// 统计信息
struct Statistics {
int64_t totalFramesDecoded;
int64_t totalFramesDisplayed;
int64_t droppedFrames;
int64_t decodeErrors;
int64_t totalInputBytes;
double averageDecodeTime;
};
Statistics GetStatistics() const;
void ResetStatistics();
private:
// 线程函数
void DecodeThread();
void DisplayThread();
void SDLEventThread();
// 内部方法
bool InitDecoder();
bool InitSDL(WId winId);
void Cleanup();
bool DecodePacket(const AVPacket* packet);
void DisplayFrame(AVFrame* frame);
bool DetectCodecFromData(const uint8_t* data, int len, AVCodecID& codecId);
bool ReinitDecoder(AVCodecID codecId);
AVCodecContext* CreateCodecContext(AVCodecID codecId);
bool ConfigureDecoder(AVCodecContext* ctx);
// 成员变量
std::atomic<bool> isRunning;
std::atomic<bool> isDecoding;
std::atomic<bool> isDisplaying;
// 线程
std::thread decodeThread;
std::thread displayThread;
std::thread sdlEventThread;
// 队列和同步
std::queue<std::shared_ptr<InputData>> inputQueue;
std::queue<std::shared_ptr<DecodedFrame>> outputQueue;
std::mutex inputMutex;
std::mutex outputMutex;
std::condition_variable inputCond;
std::condition_variable outputCond;
// 配置参数
int maxInputQueueSize;
int maxOutputQueueSize;
double targetFrameRate;
AVCodecID targetCodecId;
bool useHardwareAccel;
int maxDecodeRetry;
// FFmpeg相关
AVCodecContext* codecContext;
AVCodec* codec;
AVCodecParserContext* parser;
AVFrame* frame;
AVFrame* swsFrame;
// SDL相关
SDL_Window* window;
SDL_Renderer* renderer;
SDL_Texture* texture;
SwsContext* swsContext;
// 视频参数
int videoWidth;
int videoHeight;
AVPixelFormat pixelFormat;
AVCodecID currentCodecId;
// 错误处理
std::string lastError;
// 自动检测相关
std::atomic<bool> isCodecDetected;
AVCodecID detectedCodecId;
// 解码器重置相关
std::mutex decoderMutex;
std::atomic<bool> needDecoderReset;
// 统计信息
int64_t totalFramesDecoded;
int64_t totalFramesDisplayed;
int64_t droppedFrames;
int64_t decodeErrors;
int64_t totalInputBytes;
int64_t totalDecodeTime;
// 硬件加速相关
AVBufferRef* hwDeviceContext;
enum AVPixelFormat hwPixelFormat;
// 配置文件路径
std::string decoderConfigPath;
//解码回调
DecCallback _decCb;
};
#endif // FFMPEG_STREAM_DECODER_H
ffmpegstreamdecoder.cpp
cpp
#include "ffmpegstreamdecoder.h"
#include <sstream>
#include <fstream>
#include <algorithm>
#include <QDebug>
#define BUFSIZE (1024*1024*2)
uint8_t _decodedBuf[BUFSIZE];
FFmpegStreamDecoder::FFmpegStreamDecoder()
: isRunning(false), isDecoding(false), isDisplaying(false),
maxInputQueueSize(MAX_QUEUE_SIZE), maxOutputQueueSize(MAX_QUEUE_SIZE),
targetFrameRate(30.0), targetCodecId(AV_CODEC_ID_NONE),
useHardwareAccel(false), maxDecodeRetry(3),
codecContext(nullptr), codec(nullptr), parser(nullptr),
frame(nullptr), swsFrame(nullptr), window(nullptr), renderer(nullptr),
texture(nullptr), swsContext(nullptr), videoWidth(0), videoHeight(0),
pixelFormat(AV_PIX_FMT_RGB24), currentCodecId(AV_CODEC_ID_NONE),
isCodecDetected(false), detectedCodecId(AV_CODEC_ID_NONE),
needDecoderReset(false), totalFramesDecoded(0),
totalFramesDisplayed(0), droppedFrames(0), decodeErrors(0),
totalInputBytes(0), totalDecodeTime(0), hwDeviceContext(nullptr),
hwPixelFormat(AV_PIX_FMT_NONE) {
// 注册FFmpeg编解码器
// avcodec_register_all();
// av_register_all();
av_log_set_level(AV_LOG_ERROR); // 设置日志级别
}
FFmpegStreamDecoder::~FFmpegStreamDecoder() {
CloseStream();
}
bool FFmpegStreamDecoder::OpenStream(WId winId) {
try {
if (isRunning) {
lastError = "Stream already running";
return false;
}
// 初始化SDL
if (!InitSDL(winId)) {
return false;
}
// 初始化解码器
if (!InitDecoder()) {
Cleanup();
return false;
}
// 创建线程
isRunning = true;
isDecoding = true;
isDisplaying = true;
decodeThread = std::thread(&FFmpegStreamDecoder::DecodeThread, this);
displayThread = std::thread(&FFmpegStreamDecoder::DisplayThread, this);
sdlEventThread = std::thread(&FFmpegStreamDecoder::SDLEventThread, this);
return true;
} catch (const std::exception& e) {
lastError = "Exception: " + std::string(e.what());
Cleanup();
return false;
}
}
bool FFmpegStreamDecoder::SetDecCallback(long lUser, DecCallback fun)
{
_decCb = fun;
return true;
}
void FFmpegStreamDecoder::CloseStream() {
if (!isRunning) return;
isRunning = false;
isDecoding = false;
isDisplaying = false;
// 通知所有等待的线程
inputCond.notify_all();
outputCond.notify_all();
// 等待线程结束
if (decodeThread.joinable()) {
decodeThread.join();
}
if (displayThread.joinable()) {
displayThread.join();
}
if(sdlEventThread.joinable()){
sdlEventThread.join();
}
Cleanup();
}
void FFmpegStreamDecoder::InputStream(const char* data, int len) {
if (!isRunning || len <= 0) return;
totalInputBytes += len;
std::unique_lock<std::mutex> lock(inputMutex);
// 等待队列有空间
inputCond.wait(lock, [this]() {
return inputQueue.size() < maxInputQueueSize || !isRunning;
});
if (!isRunning) return;
// 添加数据到队列
auto inputData = std::make_shared<InputData>(data, len, av_gettime(), av_gettime());
inputQueue.push(inputData);
// 通知解码线程
inputCond.notify_one();
}
bool FFmpegStreamDecoder::InitDecoder() {
try {
// 如果指定了编解码器类型,直接使用
if (targetCodecId != AV_CODEC_ID_NONE) {
codecContext = CreateCodecContext(targetCodecId);
if (!codecContext) {
return false;
}
if (!ConfigureDecoder(codecContext)) {
avcodec_free_context(&codecContext);
return false;
}
currentCodecId = targetCodecId;
isCodecDetected = true;
} else {
// 否则等待自动检测
codecContext = nullptr;
currentCodecId = AV_CODEC_ID_NONE;
isCodecDetected = false;
}
// 分配帧
frame = av_frame_alloc();
if (!frame) {
lastError = "Failed to allocate frame";
return false;
}
swsFrame = av_frame_alloc();
if (!swsFrame) {
lastError = "Failed to allocate sws frame";
av_frame_free(&frame);
return false;
}
return true;
} catch (const std::exception& e) {
lastError = "Decoder init exception: " + std::string(e.what());
return false;
}
}
AVCodecContext* FFmpegStreamDecoder::CreateCodecContext(AVCodecID codecId) {
try {
// 查找指定的解码器
codec = avcodec_find_decoder(codecId);
if (!codec) {
lastError = "Codec not found: " + std::to_string(codecId);
return nullptr;
}
// 创建解码器上下文
AVCodecContext* ctx = avcodec_alloc_context3(codec);
if (!ctx) {
lastError = "Failed to allocate codec context";
return nullptr;
}
// 设置解码器参数
ctx->thread_count = 4; // 使用多线程解码
ctx->thread_type = FF_THREAD_FRAME;
// 硬件加速配置
if (useHardwareAccel) {
// 尝试不同的硬件加速方式
const char* hw_accel_names[] = {"cuda", "vaapi", "qsv", "d3d11va", nullptr};
for (int i = 0; hw_accel_names[i]; i++) {
AVBufferRef* hw_device_ctx = nullptr;
int ret = av_hwdevice_ctx_create(&hw_device_ctx,
av_hwdevice_find_type_by_name(hw_accel_names[i]),
nullptr, nullptr, 0);
if (ret == 0) {
ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx);
av_buffer_unref(&hw_device_ctx);
break;
}
}
}
return ctx;
} catch (const std::exception& e) {
lastError = "Create codec context exception: " + std::string(e.what());
return nullptr;
}
}
bool FFmpegStreamDecoder::ConfigureDecoder(AVCodecContext* ctx) {
try {
// 打开解码器
int ret = avcodec_open2(ctx, codec, nullptr);
if (ret < 0) {
char errbuf[1024];
av_strerror(ret, errbuf, sizeof(errbuf));
lastError = "Failed to open codec: " + std::string(errbuf);
return false;
}
// 创建解析器
parser = av_parser_init(ctx->codec_id);
if (!parser) {
lastError = "Failed to create parser";
avcodec_close(ctx);
return false;
}
// 设置解析器参数
parser->flags |= PARSER_FLAG_COMPLETE_FRAMES;
return true;
} catch (const std::exception& e) {
lastError = "Configure decoder exception: " + std::string(e.what());
return false;
}
}
bool FFmpegStreamDecoder::DetectCodecFromData(const uint8_t* data, int len, AVCodecID& codecId) {
try {
if (len < 16) return false;
// 检查H.264标识
if (len >= 4 && data[0] == 0x00 && data[1] == 0x00 && data[2] == 0x00 && data[3] == 0x01) {
uint8_t nal_type = data[4] & 0x1F;
if (nal_type == 7 || nal_type == 8 || nal_type == 5 || nal_type == 1) {
codecId = AV_CODEC_ID_H264;
return true;
}
}
// 检查H.264短起始码
if (len >= 3 && data[0] == 0x00 && data[1] == 0x00 && data[2] == 0x01) {
uint8_t nal_type = data[3] & 0x1F;
if (nal_type == 7 || nal_type == 8 || nal_type == 5 || nal_type == 1) {
codecId = AV_CODEC_ID_H264;
return true;
}
}
// 检查H.265标识
if (len >= 4) {
if ((data[0] == 0x00 && data[1] == 0x00 && data[2] == 0x00 && data[3] == 0x01) ||
(data[0] == 0x00 && data[1] == 0x00 && data[2] == 0x01)) {
int offset = (data[0] == 0x00 && data[1] == 0x00 && data[2] == 0x00 && data[3] == 0x01) ? 4 : 3;
if (len > offset) {
uint8_t nal_type = (data[offset] >> 1) & 0x3F;
if (nal_type >= 32 && nal_type <= 39) { // VPS, SPS, PPS等
codecId = AV_CODEC_ID_H265;
return true;
}
}
}
}
return false;
} catch (const std::exception& e) {
lastError = "Detect codec exception: " + std::string(e.what());
return false;
}
}
bool FFmpegStreamDecoder::ReinitDecoder(AVCodecID codecId) {
std::lock_guard<std::mutex> lock(decoderMutex);
try {
// 清理旧的解码器资源
if (parser) {
av_parser_close(parser);
parser = nullptr;
}
if (codecContext) {
avcodec_close(codecContext);
avcodec_free_context(&codecContext);
codecContext = nullptr;
}
// 创建新的解码器
codecContext = CreateCodecContext(codecId);
if (!codecContext) {
return false;
}
if (!ConfigureDecoder(codecContext)) {
avcodec_free_context(&codecContext);
codecContext = nullptr;
return false;
}
currentCodecId = codecId;
isCodecDetected = true;
return true;
} catch (const std::exception& e) {
lastError = "Reinit decoder exception: " + std::string(e.what());
return false;
}
}
bool FFmpegStreamDecoder::InitSDL(WId winId) {
try {
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_EVENTS) < 0) {
lastError = "SDL initialization failed: " + std::string(SDL_GetError());
return false;
}
// 创建或获取窗口
if (winId) {
window = SDL_CreateWindowFrom((void*)winId);
} else {
window = SDL_CreateWindow("FFmpeg Stream Decoder",
SDL_WINDOWPOS_CENTERED,
SDL_WINDOWPOS_CENTERED,
1280, 720,
SDL_WINDOW_SHOWN | SDL_WINDOW_RESIZABLE);
}
if (!window) {
lastError = "Failed to create SDL window: " + std::string(SDL_GetError());
return false;
}
//设置窗口为可调整大小
SDL_SetWindowResizable(window, SDL_TRUE);
SDL_SetWindowMinimumSize(window, 400, 300);
// 创建渲染器
renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC);
if (!renderer) {
lastError = "Failed to create SDL renderer: " + std::string(SDL_GetError());
SDL_DestroyWindow(window);
window = nullptr;
return false;
}
return true;
} catch (const std::exception& e) {
lastError = "SDL init exception: " + std::string(e.what());
return false;
}
}
void FFmpegStreamDecoder::Cleanup() {
// 清理SDL资源
if (texture) {
SDL_DestroyTexture(texture);
texture = nullptr;
}
if (renderer) {
SDL_DestroyRenderer(renderer);
renderer = nullptr;
}
if (window) {
SDL_DestroyWindow(window);
window = nullptr;
}
// 清理FFmpeg资源
if (swsContext) {
sws_freeContext(swsContext);
swsContext = nullptr;
}
if (swsFrame) {
av_frame_free(&swsFrame);
swsFrame = nullptr;
}
if (frame) {
av_frame_free(&frame);
frame = nullptr;
}
if (parser) {
av_parser_close(parser);
parser = nullptr;
}
if (codecContext) {
avcodec_close(codecContext);
avcodec_free_context(&codecContext);
codecContext = nullptr;
}
// 清理硬件加速资源
if (hwDeviceContext) {
av_buffer_unref(&hwDeviceContext);
hwDeviceContext = nullptr;
}
// 清空队列
std::queue<std::shared_ptr<InputData>> emptyInput;
std::queue<std::shared_ptr<DecodedFrame>> emptyOutput;
{
std::lock_guard<std::mutex> lock(inputMutex);
std::swap(inputQueue, emptyInput);
}
{
std::lock_guard<std::mutex> lock(outputMutex);
std::swap(outputQueue, emptyOutput);
}
// 重置状态
videoWidth = 0;
videoHeight = 0;
pixelFormat = AV_PIX_FMT_RGB24;
currentCodecId = AV_CODEC_ID_NONE;
isCodecDetected = false;
detectedCodecId = AV_CODEC_ID_NONE;
needDecoderReset = false;
}
void FFmpegStreamDecoder::DecodeThread() {
AVPacket packet;
av_init_packet(&packet);
uint8_t* parserBuffer = nullptr;
int parserBufferSize = 0;
std::vector<uint8_t> detectionBuffer;
try {
while (isDecoding) {
std::shared_ptr<InputData> inputData;
{
std::unique_lock<std::mutex> lock(inputMutex);
if (inputCond.wait_for(lock, std::chrono::milliseconds(100),
[this]() { return !inputQueue.empty() || !isDecoding; })) {
if (!isDecoding || inputQueue.empty()) break;
inputData = inputQueue.front();
inputQueue.pop();
} else {
continue;
}
}
if (!inputData || inputData->data.empty()) continue;
// 如果还没有检测到编解码器,先进行检测
if (!isCodecDetected) {
detectionBuffer.insert(detectionBuffer.end(),
inputData->data.begin(),
inputData->data.end());
// 尝试检测编解码器
AVCodecID detectedId;
if (DetectCodecFromData(detectionBuffer.data(),
detectionBuffer.size(),
detectedId)) {
if (ReinitDecoder(detectedId)) {
// 检测成功,继续处理当前数据
} else {
lastError = "Failed to initialize detected codec: " + std::to_string(detectedId);
continue;
}
} else if (detectionBuffer.size() > 1024 * 10) { // 超过10KB还没检测出来,放弃
lastError = "Cannot detect codec from data";
continue;
} else {
// 继续积累数据
continue;
}
}
// 检查是否需要重置解码器
if (needDecoderReset) {
if (ReinitDecoder(currentCodecId)) {
needDecoderReset = false;
} else {
lastError = "Failed to reset decoder";
continue;
}
}
// 使用解析器解析数据
const uint8_t* data = inputData->data.data();
int dataSize = inputData->data.size();
int remaining = dataSize;
while (remaining > 0 && isDecoding) {
int ret = av_parser_parse2(parser, codecContext,
&packet.data, &packet.size,
data, remaining,
inputData->pts, inputData->dts,
inputData->pts);
if (ret < 0) {
char errbuf[1024];
av_strerror(ret, errbuf, sizeof(errbuf));
lastError = "Parser error: " + std::string(errbuf);
break;
}
data += ret;
remaining -= ret;
if (packet.size > 0) {
int retry = 0;
bool decoded = false;
while (retry < maxDecodeRetry && !decoded) {
if (DecodePacket(&packet)) {
decoded = true;
totalFramesDecoded++;
} else {
retry++;
if (retry >= maxDecodeRetry) {
decodeErrors++;
}
}
}
}
}
}
} catch (const std::exception& e) {
lastError = "Decode thread exception: " + std::string(e.what());
isDecoding = false;
}
av_packet_unref(&packet);
if (parserBuffer) {
av_free(parserBuffer);
}
}
bool FFmpegStreamDecoder::DecodePacket(const AVPacket* packet) {
try {
int64_t startTime = av_gettime();
// 发送数据包到解码器
int ret = avcodec_send_packet(codecContext, packet);
if (ret < 0) {
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
return false;
}
char errbuf[1024];
av_strerror(ret, errbuf, sizeof(errbuf));
lastError = "Send packet error: " + std::string(errbuf);
return false;
}
// 接收解码后的帧
while (ret >= 0) {
ret = avcodec_receive_frame(codecContext, frame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
}
if (ret < 0) {
char errbuf[1024];
av_strerror(ret, errbuf, sizeof(errbuf));
lastError = "Receive frame error: " + std::string(errbuf);
return false;
}
// 计算解码时间
int64_t decodeTime = av_gettime() - startTime;
totalDecodeTime += decodeTime;
// 如果是硬件加速解码,需要拷贝到CPU内存
AVFrame* displayFrame = frame;
if (frame->format == hwPixelFormat && swsFrame) {
ret = av_hwframe_transfer_data(swsFrame, frame, 0);
if (ret < 0) {
char errbuf[1024];
av_strerror(ret, errbuf, sizeof(errbuf));
lastError = "HW frame transfer error: " + std::string(errbuf);
return false;
}
displayFrame = swsFrame;
}
// 更新视频参数
if (videoWidth != codecContext->width ||
videoHeight != codecContext->height ||
pixelFormat != displayFrame->format) {
videoWidth = codecContext->width;
videoHeight = codecContext->height;
pixelFormat = (AVPixelFormat)displayFrame->format;
// 重新创建SWS上下文
if (swsContext) {
sws_freeContext(swsContext);
}
swsContext = sws_getContext(videoWidth, videoHeight, pixelFormat,
videoWidth, videoHeight, AV_PIX_FMT_RGB24,
SWS_BILINEAR, nullptr, nullptr, nullptr);
// 更新SWS帧
if (swsFrame) {
av_frame_free(&swsFrame);
}
swsFrame = av_frame_alloc();
if (swsFrame) {
swsFrame->format = AV_PIX_FMT_RGB24;
swsFrame->width = videoWidth;
swsFrame->height = videoHeight;
av_frame_get_buffer(swsFrame, 32);
}
}
// 创建新的帧副本
AVFrame* frameCopy = av_frame_alloc();
if (!frameCopy) {
return false;
}
ret = av_frame_ref(frameCopy, displayFrame);
if (ret < 0) {
av_frame_free(&frameCopy);
return false;
}
// 将解码后的帧加入输出队列
std::unique_lock<std::mutex> lock(outputMutex);
if (outputQueue.size() >= maxOutputQueueSize) {
// 队列已满,丢弃最旧的帧
outputQueue.pop();
droppedFrames++;
}
auto decodedFrame = std::make_shared<DecodedFrame>(frameCopy, frame->pts, frame->pkt_dts);
outputQueue.push(decodedFrame);
outputCond.notify_one();
return true;
}
return false;
} catch (const std::exception& e) {
lastError = "Decode packet exception: " + std::string(e.what());
return false;
}
}
/**
* 将 AVFrame 转换为连续的内存缓冲区
* @param frame 输入的 AVFrame
* @param data 输出的数据缓冲区(需要调用者释放)
* @param len 输出的数据长度
* @return 成功返回 0,失败返回错误码
*/
int avframe_to_buffer(AVFrame *frame, uint8_t *data, int *len) {
if (!frame || !data || !len) {
return -1;
}
int ret;
*len = av_image_get_buffer_size(
(AVPixelFormat)frame->format,
frame->width,
frame->height,
1 // 对齐方式
);
if (*len <= 0) {
return -2;
}
data = (uint8_t *)av_malloc(*len);
if (!*data) {
return -3;
}
ret = av_image_copy_to_buffer(
data,
*len,
(const uint8_t *const *)frame->data,
(const int *)frame->linesize,
(AVPixelFormat)frame->format,
frame->width,
frame->height,
1
);
if (ret < 0) {
av_free(data);
data = NULL;
*len = 0;
return ret;
}
return 0;
}
void FFmpegStreamDecoder::DisplayThread() {
try {
while (isDisplaying) {
std::shared_ptr<DecodedFrame> decodedFrame;
{
std::unique_lock<std::mutex> lock(outputMutex);
if (outputCond.wait_for(lock, std::chrono::milliseconds(100),
[this]() { return !outputQueue.empty() || !isDisplaying; })) {
if (!isDisplaying || outputQueue.empty()) break;
decodedFrame = outputQueue.front();
outputQueue.pop();
} else {
continue;
}
}
if (decodedFrame && decodedFrame->frame) {
if(_decCb){
int len;
avframe_to_buffer(decodedFrame->frame,_decodedBuf,&len);
_decCb((const char*)_decodedBuf,len,0,0);
}else{
DisplayFrame(decodedFrame->frame);
}
totalFramesDisplayed++;
}
// 控制帧率
if (targetFrameRate > 0) {
std::this_thread::sleep_for(std::chrono::milliseconds((int)(1000.0 / targetFrameRate)));
}
}
} catch (const std::exception& e) {
lastError = "Display thread exception: " + std::string(e.what());
isDisplaying = false;
}
}
void FFmpegStreamDecoder::SDLEventThread()
{
int windowWidth = 800;
int windowHeight = 600;
SDL_Event event;
while(isRunning){
while (SDL_PollEvent(&event)) {
switch (event.type) {
case SDL_QUIT:
break;
case SDL_WINDOWEVENT:
if (event.window.event == SDL_WINDOWEVENT_SIZE_CHANGED) {
windowWidth = event.window.data1;
windowHeight = event.window.data2;
qDebug() << "windowWidth:" << windowWidth;
qDebug() << "windowHeight:" << windowHeight;
// 更新渲染器视口
SDL_Rect viewport = {0, 0, windowWidth, windowHeight};
SDL_RenderSetViewport(renderer, &viewport);
}
break;
case SDL_KEYDOWN:
if (event.key.keysym.sym == SDLK_ESCAPE) {
}
break;
}
}
std::this_thread::sleep_for(std::chrono::milliseconds(50));
}
return;
}
void FFmpegStreamDecoder::DisplayFrame(AVFrame* frame) {
try {
if (!renderer || !frame || videoWidth <= 0 || videoHeight <= 0) return;
// 确保纹理存在且大小正确
if (!texture || videoWidth != 0) {
if (texture) {
SDL_DestroyTexture(texture);
}
texture = SDL_CreateTexture(renderer,
SDL_PIXELFORMAT_IYUV,
SDL_TEXTUREACCESS_STREAMING,
videoWidth, videoHeight);
if (!texture) {
lastError = "Failed to create texture: " + std::string(SDL_GetError());
return;
}
}
// 更新纹理
SDL_UpdateYUVTexture(texture, nullptr,
frame->data[0], frame->linesize[0],
frame->data[1], frame->linesize[1],
frame->data[2], frame->linesize[2]);
// 获取窗口大小
int windowWidth, windowHeight;
SDL_GetWindowSize(window, &windowWidth, &windowHeight);
// 计算显示区域(保持宽高比)
double videoRatio = (double)videoWidth / videoHeight;
double windowRatio = (double)windowWidth / windowHeight;
int displayWidth, displayHeight;
if (videoRatio > windowRatio) {
displayWidth = windowWidth;
displayHeight = (int)(windowWidth / videoRatio);
} else {
displayHeight = windowHeight;
displayWidth = (int)(windowHeight * videoRatio);
}
// 计算居中位置
int x = (windowWidth - displayWidth) / 2;
int y = (windowHeight - displayHeight) / 2;
// 清空渲染器
SDL_RenderClear(renderer);
// 复制纹理到渲染器
SDL_Rect rect = {x, y, displayWidth, displayHeight};
SDL_RenderCopy(renderer, texture, nullptr, &rect);
// 显示
SDL_RenderPresent(renderer);
} catch (const std::exception& e) {
lastError = "Display frame exception: " + std::string(e.what());
}
}
FFmpegStreamDecoder::Statistics FFmpegStreamDecoder::GetStatistics() const {
Statistics stats;
stats.totalFramesDecoded = totalFramesDecoded;
stats.totalFramesDisplayed = totalFramesDisplayed;
stats.droppedFrames = droppedFrames;
stats.decodeErrors = decodeErrors;
stats.totalInputBytes = totalInputBytes;
stats.averageDecodeTime = totalFramesDecoded > 0 ?
(double)totalDecodeTime / totalFramesDecoded : 0;
return stats;
}
void FFmpegStreamDecoder::ResetStatistics() {
totalFramesDecoded = 0;
totalFramesDisplayed = 0;
droppedFrames = 0;
decodeErrors = 0;
totalInputBytes = 0;
totalDecodeTime = 0;
}
四 调用示例
1.设置参数并初始化
cpp
//设置参数并初始化
m_ffDecoder = std::make_unique<FFmpegStreamDecoder>();
m_ffDecoder->SetMaxInputQueueSize(80);
m_ffDecoder->SetMaxOutputQueueSize(25);
m_ffDecoder->SetFrameRate(25);
m_ffDecoder->SetDecCallback(1,m_param.decCb);
m_ffDecoder->OpenStream((WId)m_param.renderWnd);
2.输入流
cpp
if(m_ffDecoder){
m_ffDecoder->InputStream(ba.constData(),ba.size());
}