一、简介
FFmpeg 是一个开源的多媒体处理框架,非常适用于处理音视频的录制、转换、流化和播放。
二、代码
示例代码读取一个本地视频文件,解码并将二进制文件保存下来。
注意: 代码中仅展示了 YUV420P 格式,其他 NV12/NV21 等格式可相应修改。
1. yuv_decoder.cpp
cpp
#include <iostream>
#include <iomanip>
#include <fstream>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/imgutils.h>
#include <libavutil/rational.h>
}
// 打印媒体信息表格
void print_media_info(AVFormatContext* fmt_ctx, int video_stream) {
std::cout << "\n\033[1;36m======= Media Stream Analysis =======\033[0m\n";
std::cout << std::left << std::setw(20) << "Container Format:"
<< fmt_ctx->iformat->long_name << " (" << fmt_ctx->iformat->name << ")\n";
std::cout << std::setw(20) << "Duration:"
<< (fmt_ctx->duration / AV_TIME_BASE) << " seconds\n";
std::cout << std::setw(20) << "Bitrate:"
<< (fmt_ctx->bit_rate / 1000) << " Kbps\n";
if (video_stream != -1) {
AVStream* stream = fmt_ctx->streams[video_stream];
std::cout << "\n\033[1;33m[Video Stream #" << video_stream << "]\033[0m\n";
std::cout << std::setw(20) << "Codec:"
<< avcodec_get_name(stream->codecpar->codec_id)
<< " (ID:" << stream->codecpar->codec_id << ")\n";
std::cout << std::setw(20) << "Resolution:"
<< stream->codecpar->width << "x" << stream->codecpar->height << "\n";
std::cout << std::setw(20) << "Pixel Format:"
<< av_get_pix_fmt_name((AVPixelFormat)stream->codecpar->format) << "\n";
std::cout << std::setw(20) << "Framerate:"
<< av_q2d(stream->avg_frame_rate) << " fps\n";
std::cout << std::setw(20) << "Timebase:"
<< stream->time_base.num << "/" << stream->time_base.den << "\n";
}
std::cout << "\033[1;36m=====================================\033[0m\n\n";
}
// 验证YUV420P格式
void validate_yuv420p(const AVFrame* frame) {
if (frame->format != AV_PIX_FMT_YUV420P || frame->format != AV_PIX_FMT_YUVJ420P) {
std::cerr << "\033[1;31mERROR: Unexpected pixel format: "
<< av_get_pix_fmt_name((AVPixelFormat)frame->format)
<< "\033[0m" << std::endl;
}
if (frame->linesize[1] != frame->linesize[2]) {
std::cerr << "\033[1;31mERROR: UV plane stride mismatch: U="
<< frame->linesize[1] << " V=" << frame->linesize[2]
<< "\033[0m" << std::endl;
}
}
int main(int argc, char** argv) {
if (argc < 2) {
std::cerr << "Usage: " << argv[0] << " <input_file>" << std::endl;
return -1;
}
AVFormatContext* fmt_ctx = nullptr;
if (avformat_open_input(&fmt_ctx, argv[1], nullptr, nullptr) != 0) {
std::cerr << "Could not open input file" << std::endl;
return -1;
}
if (avformat_find_stream_info(fmt_ctx, nullptr) < 0) {
std::cerr << "Could not find stream information" << std::endl;
avformat_close_input(&fmt_ctx);
return -1;
}
int video_stream_index = -1;
for (unsigned int i = 0; i < fmt_ctx->nb_streams; i++) {
if (fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
video_stream_index = i;
break;
}
}
if (video_stream_index == -1) {
std::cerr << "Could not find video stream" << std::endl;
avformat_close_input(&fmt_ctx);
return -1;
}
print_media_info(fmt_ctx, video_stream_index); // 打印媒体信息
const AVCodec* codec = avcodec_find_decoder(fmt_ctx->streams[video_stream_index]->codecpar->codec_id);
if (!codec) {
std::cerr << "Unsupported codec" << std::endl;
avformat_close_input(&fmt_ctx);
return -1;
}
AVCodecContext* codec_ctx = avcodec_alloc_context3(codec);
if (avcodec_parameters_to_context(codec_ctx, fmt_ctx->streams[video_stream_index]->codecpar) < 0) {
std::cerr << "Could not initialize codec context" << std::endl;
avcodec_free_context(&codec_ctx);
avformat_close_input(&fmt_ctx);
return -1;
}
if (avcodec_open2(codec_ctx, codec, nullptr) < 0) {
std::cerr << "Could not open codec" << std::endl;
avcodec_free_context(&codec_ctx);
avformat_close_input(&fmt_ctx);
return -1;
}
std::cout << "\033[1;32mDecoder Initialized Successfully!\033[0m\n";
std::cout << "Output Format: " << av_get_pix_fmt_name(codec_ctx->pix_fmt) << "\n\n";
AVFrame* frame = av_frame_alloc();
AVPacket* pkt = av_packet_alloc();
int frame_count = 0;
while (av_read_frame(fmt_ctx, pkt) >= 0) {
if (pkt->stream_index == video_stream_index) {
if (avcodec_send_packet(codec_ctx, pkt) < 0) {
std::cerr << "Error sending packet" << std::endl;
break;
}
int ret = 0;
while (ret >= 0) {
ret = avcodec_receive_frame(codec_ctx, frame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) break;
std::cout << "Decoded Frame #" << frame_count
<< " (PTS: " << frame->pts
<< ", Format: " << av_get_pix_fmt_name((AVPixelFormat)frame->format)
<< ")\r" << std::flush;
validate_yuv420p(frame); // 格式校验,可略过
char filename[20];
snprintf(filename, sizeof(filename), "%04d.yuv", frame_count++);
std::ofstream file(filename, std::ios::binary);
if (file.is_open()) {
// 写入Y平面
for (int y = 0; y < frame->height; y++) {
file.write(reinterpret_cast<char*>(frame->data[0] + y * frame->linesize[0]), frame->width);
}
// 写入U平面
for (int y = 0; y < frame->height / 2; y++) {
file.write(reinterpret_cast<char*>(frame->data[1] + y * frame->linesize[1]), frame->width / 2);
}
// 写入V平面
for (int y = 0; y < frame->height / 2; y++) {
file.write(reinterpret_cast<char*>(frame->data[2] + y * frame->linesize[2]), frame->width / 2);
}
}
}
}
av_packet_unref(pkt);
}
std::cout << "\n\n\033[1;32mDecoding Completed! Total frames: " << frame_count << "\033[0m\n";
av_packet_free(&pkt);
av_frame_free(&frame);
avcodec_free_context(&codec_ctx);
avformat_close_input(&fmt_ctx);
return 0;
}
2. CMakeLists.txt
java
cmake_minimum_required(VERSION 3.10)
project(FFmpegYUVDecoder)
set(CMAKE_CXX_STANDARD 11)
find_package(PkgConfig REQUIRED)
pkg_check_modules(AVCODEC REQUIRED libavcodec)
pkg_check_modules(AVFORMAT REQUIRED libavformat)
pkg_check_modules(AVUTIL REQUIRED libavutil)
add_executable(yuv_decoder yuv_decoder.cpp)
target_include_directories(yuv_decoder PRIVATE
${AVCODEC_INCLUDE_DIRS}
${AVFORMAT_INCLUDE_DIRS}
${AVUTIL_INCLUDE_DIRS}
)
target_link_libraries(yuv_decoder
${AVCODEC_LIBRARIES}
${AVFORMAT_LIBRARIES}
${AVUTIL_LIBRARIES}
)
依赖的库在安装 ffmpeg 后应该都有,用到的就 libavcodec,libavformat,libavutil三个。
3. 文件目录结构
|------ CMakeLists.txt
|------ yuv_decoder.cpp
三、编译和运行
1. 安装 ffmpeg
如果还没有安装过,可以如下安装
bash
# Ubuntu/Debian
sudo apt update && sudo apt install ffmpeg
# 查看版本号
ffmpeg -version
如果缺少某个库,可以执行
bash
sudo apt install libavcodec-dev libavformat-dev libavutil-dev
2. 编译
bash
mkdir build
cd build
cmake ..
make -j4
3. 运行
只有一个输入参数,及输入视频的文件路径,输出路径默认在当前路径。
bash
./yuv_decoder input.mp4
4. 查看
可以使用 ffplay 命令查看保持的 yuv 数据是否正确。以 1920x1080 大小,yuv420p格式为例:
bash
ffplay -video_size 1920x1080 -pixel_format yuv420p 0001.yuv