在之前的学习中,通过AVFrame来保存为图片来认识了AVFrame,
[FFmpeg学习]从视频中获取图片_ffmpeg 获取图片-CSDN博客
在获取到AVFrame时,还可以调用SDL方法来进行展现,实现播放效果。
参考资料
SDL,ffmpeg实现简单视频播放器_ffmpeg sdl 播放器-CSDN博客
SDL2 简单介绍以及Windows开发环境搭建-CSDN博客
这里只显示了视频,没有处理声音,
cpp
// ffmpegTest.cpp : 此文件包含 "main" 函数。程序执行将在此处开始并结束。
//
#include <iostream>
/*
int main()
{
std::cout << "Hello World!\n";
}*/
extern "C" {
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include <libavutil/log.h>
#include <libavformat/avformat.h>
}
using namespace std;
//要使用FFmpeg库从MP4文件中提取一张图片,你需要使用FFmpeg的解码器来读取视频帧,并将其保存为图片。以下是一个简单的示例程序,演示如何使用FFmpeg库从MP4文件中提取一张图片:
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "SDL.h"
int savePicture(AVFrame* pFrame, char* out_name) {//编码保存图片
int width = pFrame->width;
int height = pFrame->height;
AVCodecContext* pCodeCtx = NULL;
AVFormatContext* pFormatCtx = avformat_alloc_context();
// 设置输出文件格式
pFormatCtx->oformat = av_guess_format("mjpeg", NULL, NULL);
// 创建并初始化输出AVIOContext
if (avio_open(&pFormatCtx->pb, out_name, AVIO_FLAG_READ_WRITE) < 0) {
printf("Couldn't open output file.");
return -1;
}
// 构建一个新stream
AVStream* pAVStream = avformat_new_stream(pFormatCtx, 0);
if (pAVStream == NULL) {
return -1;
}
AVCodecParameters* parameters = pAVStream->codecpar;
parameters->codec_id = pFormatCtx->oformat->video_codec;
parameters->codec_type = AVMEDIA_TYPE_VIDEO;
parameters->format = AV_PIX_FMT_YUVJ420P;
parameters->width = pFrame->width;
parameters->height = pFrame->height;
const AVCodec* pCodec = avcodec_find_encoder(pAVStream->codecpar->codec_id); //查找编码器
if (!pCodec) {
printf("Could not find encoder\n");
return -1;
}
pCodeCtx = avcodec_alloc_context3(pCodec); //为AVCodecContext分配内存
if (!pCodeCtx) {
fprintf(stderr, "Could not allocate video codec context\n");
exit(1);
}
if ((avcodec_parameters_to_context(pCodeCtx, pAVStream->codecpar)) < 0) {
fprintf(stderr, "Failed to copy %s codec parameters to decoder context\n",
av_get_media_type_string(AVMEDIA_TYPE_VIDEO));
return -1;
}
// AVRational tmp = { 1, 25 };
pCodeCtx->time_base = { 1, 25 };
if (avcodec_open2(pCodeCtx, pCodec, NULL) < 0) { //打开编码器
printf("Could not open codec.");
return -1;
}
int ret = avformat_write_header(pFormatCtx, NULL);
if (ret < 0) {
printf("write_header fail\n");
return -1;
}
int y_size = width * height;
//Encode
// 给AVPacket分配足够大的空间
AVPacket pkt;
av_new_packet(&pkt, y_size * 3);
// 编码数据
ret = avcodec_send_frame(pCodeCtx, pFrame);
if (ret < 0) {
printf("Could not avcodec_send_frame.");
return -1;
}
// 得到编码后数据
ret = avcodec_receive_packet(pCodeCtx, &pkt);
if (ret < 0) {
printf("Could not avcodec_receive_packet");
return -1;
}
ret = av_write_frame(pFormatCtx, &pkt);
if (ret < 0) {
printf("Could not av_write_frame");
return -1;
}
av_packet_unref(&pkt);
//Write Trailer
av_write_trailer(pFormatCtx);
avcodec_close(pCodeCtx);
avio_close(pFormatCtx->pb);
avformat_free_context(pFormatCtx);
return 0;
}
class SDLHandle
{
public:
SDLHandle(int w, int h)
{
m_rect.x = 0;
m_rect.y = 0;
m_rect.w = w;
m_rect.h = h;
SdlInit();
}
~SDLHandle()
{
if (m_pTexture)
{
SDL_DestroyTexture(m_pTexture);
}
if (m_pRender)
{
SDL_DestroyRenderer(m_pRender);
}
if (m_pWnd)
{
SDL_DestroyWindow(m_pWnd);
}
SDL_Quit();
}
bool CreateSDLWindow(const char* title, Uint32 flag)
{
m_pWnd = SDL_CreateWindow(title, SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, m_rect.w, m_rect.h, flag);
if (!m_pWnd)
{
printf("CreateWindows error:%s.\n", SDL_GetError());
return false;
}
m_pRender = SDL_CreateRenderer(m_pWnd, -1, 0);
if (!m_pRender)
{
return false;
}
m_pTexture = SDL_CreateTexture(m_pRender, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, m_rect.w, m_rect.h);
if (!m_pTexture)
{
return false;
}
return true;
}
void UpdateTexture(AVFrame* pFrame)
{
if (!pFrame)
{
return;
}
//SDL_UpdateTexture(m_pTexture, &m_rect, pFrame->data[0], pFrame->linesize[0]);
SDL_UpdateYUVTexture(m_pTexture, &m_rect, pFrame->data[0], pFrame->linesize[0], pFrame->data[1], pFrame->linesize[1], pFrame->data[2], pFrame->linesize[2]);
SDL_RenderClear(m_pRender);
SDL_RenderCopy(m_pRender, m_pTexture, nullptr, &m_rect);
SDL_RenderPresent(m_pRender);
SDL_Delay(40);
}
private:
bool SdlInit()
{
if (SDL_Init(SDL_INIT_AUDIO | SDL_INIT_VIDEO | SDL_INIT_TIMER) < 0)
{
printf("sdl_init error:%s\n", SDL_GetError());
return false;
}
return true;
}
private:
SDL_Renderer* m_pRender = nullptr;
SDL_Window* m_pWnd = nullptr;
SDL_Texture* m_pTexture = nullptr;
SDL_Rect m_rect;
};
int getpic4() {
std::string filename = "test.mp4"; // 输入MP4文件名
std::string outputFilename = "output4.jpg"; // 输出图片文件名
SDL_Window* pScreen = nullptr; // 播放窗口
SDL_Renderer* pRender = nullptr; // 渲染器
SDL_Texture* pSDLTexture = nullptr; // 纹理
SDLHandle* m_pSDlHandle = nullptr;
AVFormatContext* formatContext = nullptr;
if (avformat_open_input(&formatContext, filename.c_str(), nullptr, nullptr) != 0) {
std::cerr << "Error opening input file" << std::endl;
return -1;
}
if (avformat_find_stream_info(formatContext, nullptr) < 0) {
std::cerr << "Error finding stream information" << std::endl;
avformat_close_input(&formatContext);
return -1;
}
const AVCodec* codec = nullptr;
int videoStreamIndex = -1;
for (unsigned int i = 0; i < formatContext->nb_streams; i++) {
if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStreamIndex = i;
codec = avcodec_find_decoder(formatContext->streams[i]->codecpar->codec_id);
break;
}
}
if (videoStreamIndex == -1 || codec == nullptr) {
std::cerr << "Error finding video stream or decoder" << std::endl;
avformat_close_input(&formatContext);
return -1;
}
AVCodecContext* codecContext = avcodec_alloc_context3(codec);
if (codecContext == nullptr) {
std::cerr << "Error allocating codec context" << std::endl;
avformat_close_input(&formatContext);
return -1;
}
if (avcodec_parameters_to_context(codecContext, formatContext->streams[videoStreamIndex]->codecpar) < 0) {
std::cerr << "Error setting codec parameters" << std::endl;
avcodec_free_context(&codecContext);
avformat_close_input(&formatContext);
return -1;
}
if (avcodec_open2(codecContext, codec, nullptr) < 0) {
std::cerr << "Error opening codec" << std::endl;
avcodec_free_context(&codecContext);
avformat_close_input(&formatContext);
return -1;
}
m_pSDlHandle = new SDLHandle(codecContext->width, codecContext->height);
if (!m_pSDlHandle->CreateSDLWindow("SDL_TEXT", SDL_WINDOW_OPENGL))
{
printf("CreateSDLWindow error:%s\n", SDL_GetError());
return -1;
}
AVPacket packet;
av_init_packet(&packet);
// 查找目标时间戳所对应的帧
AVFrame* frame = av_frame_alloc();
bool foundTargetFrame = false;
int count = 0;
while (av_read_frame(formatContext, &packet) >= 0) {
if (packet.stream_index == videoStreamIndex) {
int response = avcodec_send_packet(codecContext, &packet);
if (response < 0) {
std::cerr << "Error sending packet to decoder" << std::endl;
break;
}
count++;
response = avcodec_receive_frame(codecContext, frame);
if (response == AVERROR(EAGAIN) || response == AVERROR_EOF) {
continue;
}
else if (response < 0) {
std::cerr << "Error receiving frame from decoder" << std::endl;
break;
}
m_pSDlHandle->UpdateTexture(frame);
}
av_packet_unref(&packet);
}
if (!foundTargetFrame) {
std::cerr << "Target frame not found" << std::endl;
av_frame_free(&frame);
avcodec_free_context(&codecContext);
avformat_close_input(&formatContext);
return -1;
}
// 清理资源
av_frame_free(&frame);
av_packet_unref(&packet);
avcodec_free_context(&codecContext);
return 1;
}
#undef main
int main(int argc, char* argv[]) {
av_log(NULL, AV_LOG_INFO, "...Hello world\n");
getpic4();
return 0;
}