流程
-
根据硬件名称,查询是否是支持的类型
const char *device_name = "qsv"; //cuda
enum AVHWDeviceType type = av_hwdevice_find_type_by_name(device_name);
if(type == AV_HWDEVICE_TYPE_NONE)
{
//如果一个硬件类型是不支持的,打印所有支持的硬件名称
printf("Device type %s is not supported.\n", device_name);
fprintf(stderr, "Available device types:");
while((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)
{
fprintf(stderr, " %s", av_hwdevice_get_type_name(type));
}
fprintf(stderr, "\n");
} -
通用过程
avformat_open_input
avformat_find_stream_info
av_find_best_stream -
获取硬件编码器
AVCodec *pCodec;
for (i = 0;; i++) {
const AVCodecHWConfig *config = avcodec_get_hw_config(pCodec, i);
if (!config) {
fprintf(stderr, "Decoder %s does not support device type %s.\n",
pCodec->name, av_hwdevice_get_type_name(type));
return;
}
if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX &&
config->device_type == type) {
hw_pix_fmt = config->pix_fmt;
break;
}
} -
查找解码器
if (!(pCodecCtx = avcodec_alloc_context3(pCodec)))
return; -
复制参数
video = pFormatCtx->streams[videoStream];
if (avcodec_parameters_to_context(pCodecCtx, video->codecpar) < 0)
return;pCodecCtx->get_format = get_hw_format;
-
初始化硬件解码器与打开解码器
if (hw_decoder_init(pCodecCtx, type) < 0) return;
//打开解码器
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
printf("Could not open codec.\n");
return;
} -
读取视频包,发送视频包到解码器,解码
-
将显存中的内容复制内存中
ret = av_hwframe_transfer_data(swFrame, pFrame, 0);
if(ret < 0)
{
qDebug() << "Error transferring the data to system memory";
break;
}
完整的代码
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameRGB, *swFrame, *tempFrame;
uint8_t *out_buffer;
AVPacket packet;
AVStream *video = NULL;
static struct SwsContext *img_convert_ctx;
int videoStream, i, numBytes;
int ret, got_picture;
avformat_network_init();
//Allocate an AVFormatContext.
pFormatCtx = avformat_alloc_context();
const char *device_name = "cuda"; //cuda
enum AVHWDeviceType type = av_hwdevice_find_type_by_name(device_name);
if(type == AV_HWDEVICE_TYPE_NONE)
{
printf("Device type %s is not supported.\n", device_name);
fprintf(stderr, "Available device types:");
while((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)
fprintf(stderr, " %s", av_hwdevice_get_type_name(type));
fprintf(stderr, "\n");
}
AVDictionary *opt = nullptr;
// av_dict_set(&opt,"buffer_size","1024000",0);
// av_dict_set(&opt,"max_delay","0",0);
av_dict_set(&opt,"rtsp_transport","tcp",0);
av_dict_set(&opt,"stimeout","5000000",0);
if (avformat_open_input(&pFormatCtx, mFileName.toUtf8().data(), NULL, NULL) != 0) {
printf("can't open the file. \n");
return;
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
printf("Could't find stream infomation.\n");
return;
}
ret = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &pCodec, 0);
if (ret < 0) {
fprintf(stderr, "Cannot find a video stream in the input file\n");
return;
}
videoStream = ret;
for (i = 0;; i++) {
const AVCodecHWConfig *config = avcodec_get_hw_config(pCodec, i);
if (!config) {
fprintf(stderr, "Decoder %s does not support device type %s.\n",
pCodec->name, av_hwdevice_get_type_name(type));
return;
}
if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX &&
config->device_type == type) {
hw_pix_fmt = config->pix_fmt;
break;
}
}
///查找解码器
if (!(pCodecCtx = avcodec_alloc_context3(pCodec)))
return;
video = pFormatCtx->streams[videoStream];
if (avcodec_parameters_to_context(pCodecCtx, video->codecpar) < 0)
return;
pCodecCtx->get_format = get_hw_format;
if (hw_decoder_init(pCodecCtx, type) < 0)
return;
///打开解码器
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
printf("Could not open codec.\n");
return;
}
pFrame = av_frame_alloc();
pFrameRGB = av_frame_alloc();
tempFrame = av_frame_alloc();
swFrame = av_frame_alloc();
///这里我们改成了 将解码后的YUV数据转换成RGB32
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
AV_PIX_FMT_NV12, pCodecCtx->width, pCodecCtx->height,
AV_PIX_FMT_RGB32, SWS_BICUBIC, NULL, NULL, NULL);
numBytes = avpicture_get_size(AV_PIX_FMT_RGB32, pCodecCtx->width,pCodecCtx->height);
out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
avpicture_fill((AVPicture *) pFrameRGB, out_buffer, AV_PIX_FMT_RGB32,
pCodecCtx->width, pCodecCtx->height);
av_dump_format(pFormatCtx, 0, mFileName.toUtf8().data(), 0); //输出视频信息
while (1)
{
if (av_read_frame(pFormatCtx, &packet) < 0)
{
break; //这里认为视频读取完了
}
if (packet.stream_index == videoStream) {
ret = avcodec_send_packet(pCodecCtx, &packet);
if (ret < 0) {
printf("decode error.\n");
return;
}
while (1) {
ret = avcodec_receive_frame(pCodecCtx, pFrame);
if(ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
{
break;
}else if(ret < 0){
qDebug() << "Error while decoding";
break;
}
ret = av_hwframe_transfer_data(swFrame, pFrame, 0);
if(ret < 0)
{
qDebug() << "Error transferring the data to system memory";
break;
}
sws_scale(img_convert_ctx,
(uint8_t const * const *) swFrame->data,
swFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data,
pFrameRGB->linesize);
static int index = 0;
qDebug() << "frame" << index++;
}
}
av_packet_unref(&packet);
msleep(30); //停一停 不然放的太快了
}
av_free(out_buffer);
av_free(pFrameRGB);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
-
hw_decoder_init
static enum AVPixelFormat hw_pix_fmt;
static AVBufferRef *hw_device_ctx = NULL;
static int hw_decoder_init(AVCodecContext *ctx, const enum AVHWDeviceType type)
{
int err = 0;if ((err = av_hwdevice_ctx_create(&hw_device_ctx, type, NULL, NULL, 0)) < 0) { fprintf(stderr, "Failed to create specified HW device.\n"); return err; } ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx); return err;
}
-
get_hw_format
static enum AVPixelFormat get_hw_format(AVCodecContext *ctx,
const enum AVPixelFormat *pix_fmts)
{
const enum AVPixelFormat *p;for (p = pix_fmts; *p != -1; p++) { if (*p == hw_pix_fmt) return *p; } fprintf(stderr, "Failed to get HW surface format.\n"); return AV_PIX_FMT_NONE;
}