1、调用FFmpeg接口
打开Qt,新建工程;
在工程的pro文件中,添加如下字段;
TEMPLATE = app
CONFIG += console
CONFIG -= app_bundle
CONFIG -= qt
SOURCES += \
main.c
INCLUDEPATH += /home/wxw/ffmpeg_build/include
LIBS += /home/wxw/ffmpeg_build/lib/libavcodec.a \
/home/wxw/ffmpeg_build/lib/libavdevice.a \
/home/wxw/ffmpeg_build/lib/libavfilter.a \
/home/wxw/ffmpeg_build/lib/libavformat.a \
/home/wxw/ffmpeg_build/lib/libavutil.a \
/home/wxw/ffmpeg_build/lib/libswresample.a \
/home/wxw/ffmpeg_build/lib/libswscale.a
主要目的是:添加FFMEEG的库路径和头文件路径,实际地址根据不同的客户端环境,会有差异。(动态库或静态库,都可选)
在main文件中,直接调用ffmpeg的api,编译运行即可。如下:
#include <stdio.h>
// 包含ffmpeg头⽂件
#include "libavutil/avutil.h"
int main()
{
printf("Hello FFMPEG, version is %s\n", av_version_info());
return 0;
}
2、安装SDL库
sdl 下载地址:https://www.libsdl.org/download-2.0.php
下载SDL源码库,SDL2-2.0.10.tar.gz 2.
解压,进入目录中,然后依次执⾏命令
./autogen.sh
#实际路径依环境不同而定,指定编译安装的库目录和bin文件目录
./configure --prefix=/home/wxw/ffmpeg_build --bindir=/home/wxw/bin
make
sudo make install
3、Qt调用SDL库显示一个空窗口
打开Qt,新建工程;
在工程的pro文件中,添加如下字段;
TEMPLATE = app
CONFIG += console
CONFIG -= app_bundle
CONFIG -= qt
SOURCES += \
main.c
INCLUDEPATH += /home/wxw/ffmpeg_build/include
LIBS += /home/wxw/ffmpeg_build/lib/libSDL2.so
主要目的是:添加SDL的库路径和头文件路径,实际地址根据不同的客户端环境,会有差异。
在main.c,编译运行会显示一个空窗口。代码如下:
#include <stdio.h>
#include "SDL2/SDL.h"
int main()
{
//声明一个窗口句柄
SDL_Window *window_ptr = NULL;
printf("test SDL demo 1.\n");
//创建窗口
SDL_Init(SDL_INIT_VIDEO);
window_ptr = SDL_CreateWindow("Basic Window", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, 640, 480,
SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE);
if(window_ptr == NULL)
{
printf("%s: Call 'SDL_CreateWindow' failure. \n", __FUNCTION__);
goto ERR_EXIT;
}
SDL_Delay(3000);
//销毁窗口,释放资源
SDL_DestroyWindow(window_ptr);
SDL_Quit();
ERR_EXIT:
return 0;
}
4、Qt调用SDL库播放yuv视频
打开Qt,新建工程;
在工程的pro文件中,添加如下字段;
TEMPLATE = app
CONFIG += console
CONFIG -= app_bundle
CONFIG -= qt
SOURCES += \
main.c
INCLUDEPATH += /home/wxw/ffmpeg_build/include
LIBS += /home/wxw/ffmpeg_build/lib/libSDL2.so
主要目的是:添加SDL的库路径和头文件路径,实际地址根据不同的客户端环境,会有差异。
在main.c,编译运行会显示一个YUV视频播放窗口。重点是:SDL库的初始化和操作流程、YUV数据读取和渲染、窗口调整操作、刷新逻辑等。
代码如下:
#include <stdio.h>
#include <string.h>
#include "SDL2/SDL.h"
//自定义消息类型
#define REFRESH_EVENT (SDL_USEREVENT + 1) // 请求画面刷新事件
#define QUIT_EVENT (SDL_USEREVENT + 2) // 退出事件
// YUV像素分辨率
#define YUV_WIDTH 320
#define YUV_HEIGHT 240
//定义YUV格式
#define YUV_FORMAT SDL_PIXELFORMAT_IYUV
int s_thread_exit = 0; // 退出标志 = 1则退出
int refresh_video_timer(void *data)
{
while (!s_thread_exit)
{
SDL_Event event;
event.type = REFRESH_EVENT;
SDL_PushEvent(&event);
SDL_Delay(40);
}
s_thread_exit = 0;
//push quit event
SDL_Event event;
event.type = QUIT_EVENT;
SDL_PushEvent(&event);
return 0;
}
int main(int argc, char* argv[])
{
//初始化 SDL
if(SDL_Init(SDL_INIT_VIDEO))
{
fprintf( stderr, "Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}
// SDL
SDL_Event event; // 事件
SDL_Rect rect; // 矩形
SDL_Window *window = NULL; // 窗口
SDL_Renderer *renderer = NULL; // 渲染
SDL_Texture *texture = NULL; // 纹理
SDL_Thread *timer_thread = NULL; // 请求刷新线程
uint32_t pixformat = YUV_FORMAT; // YUV420P,即是SDL_PIXELFORMAT_IYUV
// 1. YUV的分辨率
int video_width = YUV_WIDTH;
int video_height = YUV_HEIGHT;
// 2.显示窗口的分辨率
int win_width = YUV_WIDTH;
int win_height = YUV_WIDTH;
// YUV文件句柄
FILE *video_fd = NULL;
const char *yuv_path = "/home/wxw/QtProject/build-test_ffmpeg-Desktop_Qt_5_12_10_GCC_64bit-Debug/yuv420p_320x240.yuv";
size_t video_buff_len = 0;
uint8_t *video_buf = NULL; //读取数据后先把放到buffer里面
// 我们测试的文件是YUV420P格式
uint32_t y_frame_len = video_width * video_height;
uint32_t u_frame_len = video_width * video_height / 4;
uint32_t v_frame_len = video_width * video_height / 4;
uint32_t yuv_frame_len = y_frame_len + u_frame_len + v_frame_len;
//创建窗口
window = SDL_CreateWindow("Simplest YUV Player",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
video_width, video_height,
SDL_WINDOW_OPENGL|SDL_WINDOW_RESIZABLE);
if(!window)
{
fprintf(stderr, "SDL: could not create window, err:%s\n",SDL_GetError());
goto _FAIL;
}
// 基于窗口创建渲染器
renderer = SDL_CreateRenderer(window, -1, 0);
// 基于渲染器创建纹理
texture = SDL_CreateTexture(renderer,
pixformat,
SDL_TEXTUREACCESS_STREAMING,
video_width,
video_height);
// 分配空间
video_buf = (uint8_t*)malloc(yuv_frame_len);
if(!video_buf)
{
fprintf(stderr, "Failed to alloce yuv frame space!\n");
goto _FAIL;
}
// 打开YUV文件
video_fd = fopen(yuv_path, "rb");
if( !video_fd )
{
fprintf(stderr, "Failed to open yuv file\n");
goto _FAIL;
}
// 创建请求刷新线程
timer_thread = SDL_CreateThread(refresh_video_timer,
NULL,
NULL);
while (1)
{
// 收取SDL系统里面的事件
SDL_WaitEvent(&event);
if(event.type == REFRESH_EVENT) // 画面刷新事件
{
video_buff_len = fread(video_buf, 1, yuv_frame_len, video_fd);
if(video_buff_len <= 0)
{
fprintf(stderr, "Failed to read data from yuv file!\n");
goto _FAIL;
}
// 设置纹理的数据 video_width = 320, plane
SDL_UpdateTexture(texture, NULL, video_buf, video_width);
// 显示区域,可以通过修改w和h进行缩放
rect.x = 0;
rect.y = 0;
float w_ratio = win_width * 1.0 /video_width;
float h_ratio = win_height * 1.0 /video_height;
// 320x240 怎么保持原视频的宽高比例
rect.w = video_width * w_ratio;
rect.h = video_height * h_ratio;
// rect.w = video_width * 0.5;
// rect.h = video_height * 0.5;
// 清除当前显示
SDL_RenderClear(renderer);
// 将纹理的数据拷贝给渲染器
SDL_RenderCopy(renderer, texture, NULL, &rect);
// 显示
SDL_RenderPresent(renderer);
}
else if(event.type == SDL_WINDOWEVENT)
{
//If Resize
SDL_GetWindowSize(window, &win_width, &win_height);
printf("SDL_WINDOWEVENT win_width:%d, win_height:%d\n",win_width, win_height );
}
else if(event.type == SDL_QUIT) //退出事件
{
s_thread_exit = 1;
}
else if(event.type == QUIT_EVENT)
{
break;
}
}
_FAIL:
s_thread_exit = 1; // 保证线程能够退出
// 释放资源
if(timer_thread)
SDL_WaitThread(timer_thread, NULL); // 等待线程退出
if(video_buf)
free(video_buf);
if(video_fd)
fclose(video_fd);
if(texture)
SDL_DestroyTexture(texture);
if(renderer)
SDL_DestroyRenderer(renderer);
if(window)
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
5、Qt解析yuv视频获取AAC音频
打开Qt,新建工程;
在工程的pro文件中,添加如下字段;
TEMPLATE = app
CONFIG += console
CONFIG -= app_bundle
CONFIG -= qt
SOURCES += \
main.c
INCLUDEPATH += /home/wxw/ffmpeg_build/include
LIBS += \
/home/wxw/ffmpeg_build/lib/libavformat.a \
/home/wxw/ffmpeg_build/lib/libavcodec.a \
/home/wxw/ffmpeg_build/lib/libavdevice.a \
/home/wxw/ffmpeg_build/lib/libavfilter.a \
/home/wxw/ffmpeg_build/lib/libavutil.a \
/home/wxw/ffmpeg_build/lib/libswresample.a \
/home/wxw/ffmpeg_build/lib/libswscale.a \
/home/wxw/ffmpeg_build/lib/libmp3lame.a \
/home/wxw/ffmpeg_build/lib/libfdk-aac.a \
/home/wxw/ffmpeg_build/lib/libopus.a \
/home/wxw/ffmpeg_build/lib/libvpx.a \
/home/wxw/ffmpeg_build/lib/libx265.a \
/home/wxw/ffmpeg_build/lib/libx264.a
LIBS += -lpthread -lnuma -lz -lm -lva-drm -lva -lva-x11 -lvdpau -lX11 -lXext -ldl -lvorbisenc -lvorbis
主要目的是:添加FFMPEG的库路径和头文件路径,实际地址根据不同的客户端环境,会有差异。
在main.c,编译运行会读取输入文件并抽出音频数据,存放到一个新文件中。重点是:FFMPEG接口调用初始化,获取文件信息,读取文件内容并查找音频分量,将原始音频数据拼接上ADTS头存放到aac文件中等。
运行时,需要在指令中添加目标文件的地址,这里用了绝对路径,相对路径不知为何行不通,先不管了,这些不重要。如下图:
代码如下:
#include <stdio.h>
#include "libavutil/log.h"
#include "libavformat/avio.h"
#include "libavformat/avformat.h"
#define ADTS_HEADER_LEN 7;
const int sampling_frequencies[] = {
96000, // 0x0
88200, // 0x1
64000, // 0x2
48000, // 0x3
44100, // 0x4
32000, // 0x5
24000, // 0x6
22050, // 0x7
16000, // 0x8
12000, // 0x9
11025, // 0xa
8000 // 0xb
// 0xc d e f是保留的
};
int adts_header(char * const p_adts_header, const int data_length,
const int profile, const int samplerate,
const int channels)
{
int sampling_frequency_index = 3; // 默认使用48000hz
int adtsLen = data_length + 7;
int frequencies_size = sizeof(sampling_frequencies) / sizeof(sampling_frequencies[0]);
int i = 0;
for(i = 0; i < frequencies_size; i++)
{
if(sampling_frequencies[i] == samplerate)
{
sampling_frequency_index = i;
break;
}
}
if(i >= frequencies_size)
{
printf("unsupport samplerate:%d\n", samplerate);
return -1;
}
p_adts_header[0] = 0xff; //syncword:0xfff 高8bits
p_adts_header[1] = 0xf0; //syncword:0xfff 低4bits
p_adts_header[1] |= (0 << 3); //MPEG Version:0 for MPEG-4,1 for MPEG-2 1bit
p_adts_header[1] |= (0 << 1); //Layer:0 2bits
p_adts_header[1] |= 1; //protection absent:1 1bit
p_adts_header[2] = (profile)<<6; //profile:profile 2bits
p_adts_header[2] |= (sampling_frequency_index & 0x0f)<<2; //sampling frequency index:sampling_frequency_index 4bits
p_adts_header[2] |= (0 << 1); //private bit:0 1bit
p_adts_header[2] |= (channels & 0x04)>>2; //channel configuration:channels 高1bit
p_adts_header[3] = (channels & 0x03)<<6; //channel configuration:channels 低2bits
p_adts_header[3] |= (0 << 5); //original:0 1bit
p_adts_header[3] |= (0 << 4); //home:0 1bit
p_adts_header[3] |= (0 << 3); //copyright id bit:0 1bit
p_adts_header[3] |= (0 << 2); //copyright id start:0 1bit
p_adts_header[3] |= ((adtsLen & 0x1800) >> 11); //frame length:value 高2bits
p_adts_header[4] = (uint8_t)((adtsLen & 0x7f8) >> 3); //frame length:value 中间8bits
p_adts_header[5] = (uint8_t)((adtsLen & 0x7) << 5); //frame length:value 低3bits
p_adts_header[5] |= 0x1f; //buffer fullness:0x7ff 高5bits
p_adts_header[6] = 0xfc; //11111100 //buffer fullness:0x7ff 低6bits
// number_of_raw_data_blocks_in_frame:
// 表示ADTS帧中有number_of_raw_data_blocks_in_frame + 1个AAC原始帧。
return 0;
}
int main(int argc, char *argv[])
{
int ret = -1;
char errors[1024];
char *in_filename = NULL;
char *aac_filename = NULL;
FILE *aac_fd = NULL;
int audio_index = -1;
int len = 0;
AVFormatContext *ifmt_ctx = NULL;
AVPacket pkt;
// 设置打印级别
av_log_set_level(AV_LOG_DEBUG);
if(argc < 3)
{
av_log(NULL, AV_LOG_DEBUG, "the count of parameters should be more than three!\n");
return -1;
}
in_filename = argv[1]; // 输入文件
aac_filename = argv[2]; // 输出文件
if(in_filename == NULL || aac_filename == NULL)
{
av_log(NULL, AV_LOG_DEBUG, "src or dts file is null, plz check them!\n");
return -1;
}
aac_fd = fopen(aac_filename, "wb");
if (!aac_fd)
{
av_log(NULL, AV_LOG_DEBUG, "Could not open destination file %s\n", aac_filename);
return -1;
}
// 打开输入文件
if((ret = avformat_open_input(&ifmt_ctx, in_filename, NULL, NULL)) < 0)
{
av_strerror(ret, errors, 1024);
av_log(NULL, AV_LOG_DEBUG, "Could not open source file: %s, %d(%s)\n",
in_filename,
ret,
errors);
return -1;
}
// 获取解码器信息
if((ret = avformat_find_stream_info(ifmt_ctx, NULL)) < 0)
{
av_strerror(ret, errors, 1024);
av_log(NULL, AV_LOG_DEBUG, "failed to find stream information: %s, %d(%s)\n",
in_filename,
ret,
errors);
return -1;
}
// dump媒体信息
av_dump_format(ifmt_ctx, 0, in_filename, 0);
// 初始化packet
av_init_packet(&pkt);
// 查找audio对应的steam index
audio_index = av_find_best_stream(ifmt_ctx, AVMEDIA_TYPE_AUDIO, -1, -1, NULL, 0);
if(audio_index < 0)
{
av_log(NULL, AV_LOG_DEBUG, "Could not find %s stream in input file %s\n",
av_get_media_type_string(AVMEDIA_TYPE_AUDIO),
in_filename);
return AVERROR(EINVAL);
}
// 打印AAC级别
printf("audio profile:%d, FF_PROFILE_AAC_LOW:%d\n",
ifmt_ctx->streams[audio_index]->codecpar->profile,
FF_PROFILE_AAC_LOW);
if(ifmt_ctx->streams[audio_index]->codecpar->codec_id != AV_CODEC_ID_AAC)
{
printf("the media file no contain AAC stream, it's codec_id is %d\n",
ifmt_ctx->streams[audio_index]->codecpar->codec_id);
goto failed;
}
// 读取媒体文件,并把aac数据帧写入到本地文件
while(av_read_frame(ifmt_ctx, &pkt) >=0 )
{
if(pkt.stream_index == audio_index)
{
char adts_header_buf[7] = {0};
adts_header(adts_header_buf, pkt.size,
ifmt_ctx->streams[audio_index]->codecpar->profile,
ifmt_ctx->streams[audio_index]->codecpar->sample_rate,
ifmt_ctx->streams[audio_index]->codecpar->channels);
fwrite(adts_header_buf, 1, 7, aac_fd); // 写adts header , ts流不适用,ts流分离出来的packet带了adts header
len = fwrite( pkt.data, 1, pkt.size, aac_fd); // 写adts data
if(len != pkt.size)
{
av_log(NULL, AV_LOG_DEBUG, "warning, length of writed data isn't equal pkt.size(%d, %d)\n",
len,
pkt.size);
}
}
av_packet_unref(&pkt);
}
failed:
// 关闭输入文件
if(ifmt_ctx)
{
avformat_close_input(&ifmt_ctx);
}
if(aac_fd)
{
fclose(aac_fd);
}
return 0;
}
另:关于ADTS格式定义,详见《AAC文件的ADTS格式解析》