在Android平台上合成视频一般使用MediaCodec进行硬编码,使用MediaMuxer进行封装,但是因为MediaMuxer支持格式有限,一般会采用ffmpeg封装,比如监控一般使用mpeg2ts格式而非MP4,这是因为两者对帧时pts等信息封装差异导致应用场景不同。
数据流转
相机帧数据输出到编码器mediacodec经过h264编码输出压缩数据 MediaCodec.BufferInfo
经由Mpeg2TsUtils传递给ffmpeg ,ffmpeg初始化AVFormatContext 输出上下文添加视频流
将编码器输出帧重新封装AVPacket 写入输出流。
1.1 编码器输出对接
定义 IFrameMuxer 实现编码输出流封装 ,并定义Mpeg2TsMuxer 实现mpeg2ts格式封装
1 IFrameMuxer
java
public abstract class IFrameMuxer {
protected String filePath;
protected int format;
protected int orientationHint;
public IFrameMuxer(String filePath, int format, int orientationHint) {
this.orientationHint = orientationHint;
this.filePath = filePath;
this.format = format;
}
.....
public abstract void start();
public abstract void stop();
public abstract void writeSampleData(boolean videoTrack, ByteBuffer outputBuffer, MediaCodec.BufferInfo bufferInfo);
public abstract int addTrack(MediaFormat mediaFormatChanged, boolean video);
public abstract void config(ByteBuffer configBuffer);
}
2 Mpeg2TsMuxer
java
package com.tyearlin.camera2;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.util.Log;
import java.nio.ByteBuffer;
import nl.bravobit.mpeg2ts.Mpeg2TsUtils;
public class Mpeg2TsMuxer extends IFrameMuxer {
private static final String TAG = "Mpeg2TsMuxer";
public volatile boolean running = false;
public Mpeg2TsMuxer(String filePath, int format, int orientationHint) {
super(filePath, format, orientationHint);
Log.i(TAG, "Mpeg2TsMuxer:");
Mpeg2TsUtils.init(filePath, 1280, 1024);
}
@Override
public int addTrack(MediaFormat outputFormat, boolean isVideo) {
return 0;
}
@Override
public void config(ByteBuffer buffer) {
int size = buffer.capacity();
Mpeg2TsUtils.config(buffer,size);
}
@Override
public void start() {
if (running) {
Log.e(TAG, "has on running ");
throw new IllegalStateException("Muxer is running should stop first ");
}
Mpeg2TsUtils.start();
running = true;
}
@Override
public void stop() {
running = false;
Mpeg2TsUtils.stop();
}
@Override
public void writeSampleData(boolean videoTrack, ByteBuffer outputBuffer, MediaCodec.BufferInfo bufferInfo) {
if(!running) {
Log.e(TAG,"not running");
return;
}
long size = bufferInfo.size;
long pts = bufferInfo.presentationTimeUs;
boolean keyFrame = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
Mpeg2TsUtils.write_encoded_frame(outputBuffer, size, pts, keyFrame);
}
}
3 对接mediacodec 输出
编码器部分通过注册监听回调 调用Mpeg2TsMuxer 对应方法。
1.2 JNI 方法实现
jni承接上下文
1.2.1初始化 ffmpeg相关上下文
cpp
extern "C"
JNIEXPORT jlong JNICALL
Java_nl_bravobit_mpeg2ts_Mpeg2TsUtils_init(JNIEnv *env, jclass clazz, jstring path, jint width,
jint height) {
av_log_set_callback(av_log_android_print_callback);
// av_log_test();
//创建输出路径
const char *file_path = env->GetStringUTFChars(path, JNI_FALSE);
H2642Ts *h2642Ts = H2642Ts::instance();
h2642Ts->av_init(file_path, width, height);
env->ReleaseStringUTFChars(path, file_path);
return reinterpret_cast<long>(h2642Ts);
}
1.2.2 写入每一帧h264编码数据
mpeg2ts还支持其它格式 android自带 MediaMuxer不支持mpeg2ts ,虽然framwork层已有mpeg2ts write实现但未对sdk开放接口,也可以修改固件支持
cpp
extern "C"
JNIEXPORT jint JNICALL
Java_nl_bravobit_mpeg2ts_Mpeg2TsUtils_write_1encoded_1frame(JNIEnv *env, jclass clazz,
jobject buffer, jlong size, jlong pts,
jboolean is_key_frame) {
//封装 h264 流
H2642Ts *h2642Ts = H2642Ts::instance();
uint8_t *data = static_cast<uint8_t *>(env->GetDirectBufferAddress(buffer));
int ret = h2642Ts->write_packet(data, size, pts, is_key_frame);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "write_packet failed ret= %d", ret);
}
return ret;
}
1.3 mpeg2ts封装实现
定义H2642Ts.cpp实现整个封装功能 直接通过调用 ffmpeg 实现
cpp
//
// Created by Q 2023/7/28.
//
#ifndef CAMERA2STREAMGET_H2642TS_H
#define CAMERA2STREAMGET_H2642TS_H
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
#include <libavutil/timestamp.h>
}
#include <thread>
using namespace std;
class H2642Ts {
public:
void init();
static H2642Ts *instance();
void av_init(const char *path, int width, int height);
/**
* 写入编码数据帧
*/
int write_packet(uint8_t *data, long size, long pts, bool isKeyFrame);
int write_strem_params(uint8_t *data, int size);
void printf_packet(AVFormatContext *fmt_ctx, AVPacket *pkt) ;
int stop();
int start();
void release();
private:
AVFormatContext *ofmt_ctx;
AVStream *out_stream;
static H2642Ts *m_pInstance;
static std::mutex m_Mutex ;
AVFormatContext *createAvFormatOutContext(const char *path);
AVStream *createAvStream(int width, int height);
H2642Ts();
~H2642Ts();
};
#endif //CAMERA2STREAMGET_H2642TS_H
1.3.1 创建 createAvFormatOutContext
指定输出文件格式及路径 ffmpeg会根据avformat_alloc_output_context2 传入的format name "mpegts"通过av_guess_format方法遍历
muxer_list 匹配对应的 AVOutputFormat ,
cpp
AVFormatContext *H2642Ts::createAvFormatOutContext(const char *path) {
AVFormatContext *ofmt_ctx;
av_log(nullptr, AV_LOG_INFO,
"createAvFormatOutContext: path = %s ",path);
int ret = avformat_alloc_output_context2(&ofmt_ctx, nullptr, "mpegts", path);
av_log(ofmt_ctx, AV_LOG_INFO,
"createAvFormatOutContext: ret = %d ",ret);
if (ret < 0) {
av_log(nullptr, AV_LOG_ERROR,
"avformat_alloc_output_context2: Something went really wrong .\n");
return nullptr;
}
int result = avio_open(&ofmt_ctx->pb, path, AVIO_FLAG_READ_WRITE);
if(result < 0) {
av_log(ofmt_ctx,AV_LOG_INFO,"SingleAudioRecorder::StartRecord avio_open ret=%d", result);
avformat_free_context(ofmt_ctx);
return nullptr ;
}
return ofmt_ctx;
}
1.3.2 添加输出流
这里 avformat_new_stream创建 并自动添加到AVFormatContext-> streams列表内
cpp
AVStream *H2642Ts::createAvStream(int width, int height) {
av_log(ofmt_ctx, AV_LOG_INFO,
"createAvStream");
AVStream *stream = avformat_new_stream(ofmt_ctx, nullptr);
if (!stream) {
av_log(ofmt_ctx, AV_LOG_ERROR,
"Failed allocating output stream .\n");
return nullptr;
}
av_log(ofmt_ctx, AV_LOG_INFO, " stream index = %d .\n", stream->index);
AVCodecParameters *codecpar = stream->codecpar;
codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
codecpar->codec_id = AV_CODEC_ID_H264;
codecpar->width = width;
codecpar->height = height;
return stream;
}
1.3.3 写入头部信息
cpp
int H2642Ts::start() {
if (!ofmt_ctx) {
av_log(nullptr, AV_LOG_ERROR,
"ofmt_ctx is null: Something went really wrong .\n");
return -1;
}
av_log(ofmt_ctx, AV_LOG_INFO,
"start");
// AVDictionary *dict;
// int ret = av_dict_set(&dict, "tsflags", "faststart", 0);
// if (ret < 0) {
// av_log(ofmt_ctx, AV_LOG_ERROR,
// "av_dict_set ret = %d : Something went really wrong .\n", ret);
// return ret;
// }
if (!ofmt_ctx->streams || !out_stream) {
av_log(ofmt_ctx, AV_LOG_ERROR,
"ofmt_ctx->streams not ready : Something went really wrong .\n");
return -1;
}
return avformat_write_header(ofmt_ctx, nullptr);
}
1.3.4 配置输出流参数(主要sps/pps)
cpp
int H2642Ts::write_strem_params(uint8_t *data, int size) {
if (!out_stream) {
av_log(ofmt_ctx, AV_LOG_ERROR,
"out_stream is null");
return -1;
}
AVCodecParameters *codecpar = out_stream->codecpar;
if (codecpar == nullptr) {
av_log(ofmt_ctx, AV_LOG_INFO, "codecpar is null");
return -1;
}
codecpar->extradata = (uint8_t *) av_mallocz(size + AV_INPUT_BUFFER_PADDING_SIZE);
// codecpar->extradata = (uint8_t *) av_mallocz(size );
memcpy(codecpar->extradata, data, size);
codecpar->extradata_size = size;
return 0;
}
注意 padding_size
/** * Extra binary data needed for initializing the decoder, codec-dependent. * * Must be allocated with av_malloc() and will be freed by * avcodec_parameters_free(). The allocated size of extradata must be at * least extradata_size + AV_INPUT_BUFFER_PADDING_SIZE, with the padding * bytes zeroed. */ uint8_t *extradata;
1.3.5 写入编码帧
cpp
int H2642Ts::write_packet(uint8_t *data, long size, long pts, bool isKeyFrame) {
// 获取视频编码数据地址
av_log(ofmt_ctx, AV_LOG_INFO,
"call write_packet:size:%d", size);
AVPacket *packet = av_packet_alloc();
av_init_packet(packet);
packet->stream_index = out_stream->index;
packet->data = data;
packet->size = size;
packet->pts = av_rescale_q(pts, {1, 1000000}, out_stream->time_base);
packet->dts = packet->pts;
// packet->duration =
packet->flags |= isKeyFrame ? AV_PKT_FLAG_KEY : 0;
av_log(ofmt_ctx, AV_LOG_INFO,
"write_packet: start");
int ret = av_interleaved_write_frame(ofmt_ctx, packet);
printf_packet(ofmt_ctx,packet);
av_packet_unref(packet);
av_packet_free(&packet);
av_log(ofmt_ctx, AV_LOG_INFO,
"write_packet: end");
return ret;
}
对于频繁处理的对象可以创建全局变量动态更换属性 而不是每次都创建
1.3.6 结束写入尾部信息
cpp
int H2642Ts::stop() {
if (!ofmt_ctx) {
av_log(nullptr, AV_LOG_ERROR,
"ofmt_ctx is null: Something went really wrong .\n");
return -1;
}
av_log(ofmt_ctx, AV_LOG_INFO,
"stop");
av_write_trailer(ofmt_ctx);
avio_closep(&ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
return 0;
}