Android Native 层实现跨进程 YUV 视频帧共享:基于抽象 Unix Socket 的高效通信方案。基于AOSP13源码或者lineage20 或相近版本。非hook 或者lsp 等插件方案。
1.引言
在某些定制化 Android 应用场景中,我们可能需要动态替换系统相机的预览画面 ------ 例如:虚拟摄像头、AR 贴图、录屏镜像、隐私保护等。这类需求的核心在于:如何从一个 APK 应用中生成 YUV 图像,并实时传递给另一个 Native 进程进行渲染替换。
本文将介绍一种高效、稳定、无需 Binder 的跨进程通信方案:使用抽象 Unix Socket 在 Android Native 层传输 YUV 帧数据 ,并结合 libyuv
实现格式转换与缩放,最终实现预览缓冲区的无缝替换。
2.场景需求
我们有以下两个模块:
- APK 服务端:负责采集或生成 YUV 图像(如摄像头、视频解码、AI 生成等)。
- rom 模块:修改camra相关核心代码,实时获取外部 YUV 数据进行替换。
目标:APK →rom camera 进程,实时传输 YUV 帧(I420 格式),延迟低、稳定性高。
3.APK服务端核心代码及思路
首先在apk中启动一个localsocket 接收 rom中camera 服务的请求
cpp
// v_cam_server.cpp
#include "v_cam_server.h"
#include <sys/socket.h>
#include <sys/un.h>
#include <unistd.h>
#include <cstring>
#include <thread>
#include <condition_variable>
#include <cerrno>
#include <android/log.h>
#define LOG_TAG "VCamServer"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
VCamServer::VCamServer() {
latest_frame_ = av_frame_alloc();
}
VCamServer::~VCamServer() {
stopServer();
if (latest_frame_) av_frame_free(&latest_frame_);
}
void VCamServer::setCurrentFrame(AVFrame* frame) {
std::lock_guard<std::mutex> lock(frame_mutex_);
av_frame_unref(latest_frame_);
av_frame_move_ref(latest_frame_, frame);
frame_ready_ = true;
frame_cond_.notify_one();
}
void VCamServer::startServer(int width, int height) {
if (server_running_) return;
width_ = width;
height_ = height;
server_running_ = true;
server_thread_ = std::thread(&VCamServer::serverLoop, this);
}
void VCamServer::stopServer() {
server_running_ = false;
if (server_thread_.joinable()) {
server_thread_.join();
}
}
void VCamServer::serverLoop() {
const char* SOCKET_NAME = "vcam_yuv_server"; // 抽象 socket 名:@vcam_yuv_server
int server_fd = socket(AF_UNIX, SOCK_STREAM, 0);
if (server_fd < 0) {
LOGE("创建 socket 失败");
return;
}
struct sockaddr_un addr{};
addr.sun_family = AF_UNIX;
addr.sun_path[0] = '\0'; // 抽象命名空间
strncpy(addr.sun_path + 1, SOCKET_NAME, sizeof(addr.sun_path) - 2);
// ✅ 修复:抽象 socket 地址长度 = 1 + name length
socklen_t len = 1 + strlen(SOCKET_NAME);
if (bind(server_fd, (struct sockaddr*)&addr, len) < 0) {
LOGE("bind 失败: %s", strerror(errno));
close(server_fd);
return;
}
if (listen(server_fd, 1) < 0) {
LOGE("listen 失败");
close(server_fd);
return;
}
LOGI("✅ 抽象 socket 服务启动: @%s", SOCKET_NAME);
while (server_running_) {
fd_set read_fds;
FD_ZERO(&read_fds);
FD_SET(server_fd, &read_fds);
struct timeval tv{1, 0}; // 1秒超时
int ret = select(server_fd + 1, &read_fds, nullptr, nullptr, &tv);
if (ret > 0 && FD_ISSET(server_fd, &read_fds)) {
int client_fd = accept(server_fd, nullptr, nullptr);
if (client_fd >= 0) {
LOGI("ROM 客户端连接,准备发送帧");
// 等待最新帧
AVFrame* frame = nullptr;
{
std::unique_lock<std::mutex> lock(frame_mutex_);
frame_cond_.wait(lock, [this] { return frame_ready_; });
if (latest_frame_->data[0]) {
frame = av_frame_clone(latest_frame_);
frame_ready_ = false;
}
}
if (frame) {
// 协议:header[width, height, magic]
int header[3] = { width_, height_, 0x12345678 };
if (send(client_fd, header, sizeof(header), 0) == sizeof(header)) {
// 发 Y
for (int i = 0; i < height_; i++) {
send(client_fd, frame->data[0] + i * frame->linesize[0], width_, 0);
}
// 发 U
for (int i = 0; i < height_/2; i++) {
send(client_fd, frame->data[1] + i * frame->linesize[1], width_/2, 0);
}
// 发 V
for (int i = 0; i < height_/2; i++) {
send(client_fd, frame->data[2] + i * frame->linesize[2], width_/2, 0);
}
}
av_frame_free(&frame);
}
close(client_fd);
}
}
}
close(server_fd);
LOGI("服务已停止");
}
其次apk启动线程获取rtmp 视频流画面并解码为yuv数据
cpp
#include "v_cam_decoder.h"
#include <fstream>
#include <unistd.h>
VCamDecoder::VCamDecoder()
: format_ctx_(nullptr), codec_ctx_(nullptr), frame_(nullptr),
packet_(nullptr), sws_ctx_(nullptr), video_stream_index_(-1), running_(false) {
avformat_network_init();
}
VCamDecoder::~VCamDecoder() {
stop();
if (sws_ctx_) sws_freeContext(sws_ctx_);
if (frame_) av_frame_free(&frame_);
if (packet_) av_packet_free(&packet_);
if (codec_ctx_) avcodec_free_context(&codec_ctx_);
if (format_ctx_) avformat_close_input(&format_ctx_);
avformat_network_deinit();
}
bool VCamDecoder::init(const char* rtmp_url, int width, int height, int fps) {
// 初始化成功后,启动 socket 服务
server_.startServer(width, height); // 启动服务
rtmp_url_ = rtmp_url;
width_ = width;
height_ = height;
fps_ = fps;
// 打开输入流
if (avformat_open_input(&format_ctx_, rtmp_url_.c_str(), nullptr, nullptr) != 0) {
LOGE("无法打开输入流: %s", rtmp_url_.c_str());
return false;
}
if (avformat_find_stream_info(format_ctx_, nullptr) < 0) {
LOGE("无法获取流信息");
return false;
}
// 查找视频流
for (int i = 0; i < format_ctx_->nb_streams; i++) {
if (format_ctx_->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
video_stream_index_ = i;
break;
}
}
if (video_stream_index_ == -1) {
LOGE("未找到视频流");
return false;
}
// 获取解码器
AVCodecParameters* codec_par = format_ctx_->streams[video_stream_index_]->codecpar;
const AVCodec* codec = avcodec_find_decoder(codec_par->codec_id);
if (!codec) {
LOGE("找不到解码器");
return false;
}
codec_ctx_ = avcodec_alloc_context3(codec);
if (avcodec_parameters_to_context(codec_ctx_, codec_par) < 0) {
LOGE("无法复制编解码器参数");
return false;
}
if (avcodec_open2(codec_ctx_, codec, nullptr) < 0) {
LOGE("无法打开解码器");
return false;
}
// 分配帧和包
frame_ = av_frame_alloc();
packet_ = av_packet_alloc();
if (!frame_ || !packet_) {
LOGE("内存分配失败");
return false;
}
LOGI("解码器初始化成功: %s", rtmp_url_.c_str());
return true;
}
bool VCamDecoder::writeYUVFrame(AVFrame* frame) {
const char* tmp_path = "/data/local/tmp/0.yuv";
const char* output_path = "/data/local/tmp/1.yuv";
std::ofstream out_file(tmp_path, std::ios::binary);
if (!out_file) {
LOGE("无法打开临时文件");
return false;
}
// 写入YUV数据 (YUV420P格式)
for (int i = 0; i < height_; i++) {
out_file.write(reinterpret_cast<const char*>(frame->data[0] + i * frame->linesize[0]), width_);
}
for (int i = 0; i < height_ / 2; i++) {
out_file.write(reinterpret_cast<const char*>(frame->data[1] + i * frame->linesize[1]), width_ / 2);
}
for (int i = 0; i < height_ / 2; i++) {
out_file.write(reinterpret_cast<const char*>(frame->data[2] + i * frame->linesize[2]), width_ / 2);
}
out_file.close();
// 原子性重命名
if (rename(tmp_path, output_path) != 0) {
LOGE("重命名失败");
return false;
}
return true;
}
bool VCamDecoder::decodeFrame() {
int ret = av_read_frame(format_ctx_, packet_);
if (ret < 0) {
if (ret == AVERROR_EOF) {
LOGI("流结束");
} else {
LOGE("读取帧失败: %d", ret);
}
return false;
}
if (packet_->stream_index != video_stream_index_) {
av_packet_unref(packet_);
return true;
}
ret = avcodec_send_packet(codec_ctx_, packet_);
if (ret < 0) {
LOGE("发送包失败");
av_packet_unref(packet_);
return false;
}
while (ret >= 0) {
ret = avcodec_receive_frame(codec_ctx_, frame_);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
} else if (ret < 0) {
LOGE("解码错误");
break;
}
// 成功解码一帧
// if (!writeYUVFrame(frame_)) {
// LOGE("写入YUV帧失败");
// }
// ✅ 替代 writeYUVFrame:推给 server
server_.setCurrentFrame(frame_);
av_frame_unref(frame_);
}
av_packet_unref(packet_);
return true;
}
void VCamDecoder::decodeLoop() {
const int64_t frame_duration = 1000000 / fps_; // 微秒
while (running_) {
auto start_time = std::chrono::high_resolution_clock::now();
if (!decodeFrame()) {
// 解码失败,短暂等待后继续
std::this_thread::sleep_for(std::chrono::milliseconds(100));
continue;
}
auto end_time = std::chrono::high_resolution_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::microseconds>(end_time - start_time).count();
LOGI("解码耗时: %ld μs", elapsed);
if (elapsed < frame_duration) {
int64_t remaining = frame_duration - elapsed;
usleep(remaining);
} else {
LOGI("解码耗时超过帧间隔: %ld μs > %ld μs", elapsed, frame_duration);
}
}
}
void VCamDecoder::start() {
if (running_) return;
running_ = true;
decode_thread_ = std::thread(&VCamDecoder::decodeLoop, this);
LOGI("开始解码");
}
void VCamDecoder::stop() {
running_ = false;
if (decode_thread_.joinable()) {
decode_thread_.join();
}
LOGI("停止解码");
}
4.rom端核心服务代码修改
rom的核心修改地方为:frameworks/av/services/camera/libcameraservice/device3/Camera3Stream.cpp
在status_t Camera3Stream::returnBuffer 方法中替换为我们解码的yuv数据。
cpp
status_t Camera3Stream::returnBuffer(const camera_stream_buffer &buffer,
nsecs_t timestamp, nsecs_t readoutTimestamp, bool timestampIncreasing,
const std::vector<size_t>& surface_ids, uint64_t frameNumber, int32_t transform) {
ATRACE_HFR_CALL();
Mutex::Autolock l(mLock);
// 1. 检查缓冲区有效性(原有逻辑)
if (!isOutstandingBuffer(buffer)) {
ALOGE("%s: Stream %d: Returning an unknown buffer.", __FUNCTION__, mId);
return BAD_VALUE;
}
// 2. 新增:仅在缓冲区状态正常时替换 YUV 数据
if (buffer.status == CAMERA_BUFFER_STATUS_OK && buffer.buffer != nullptr) {
GraphicBufferMapper &mapper = GraphicBufferMapper::get();
android_ycbcr ycbcr = {};
status_t lockStatus = mapper.lockYCbCr(
*buffer.buffer,
GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
Rect(getWidth(), getHeight()),
&ycbcr
);
if (lockStatus == OK) {
// 替换 YUV 数据
gImageReplacer.replaceYUVBuffer(ycbcr, getWidth(), getHeight());
mapper.unlock(*buffer.buffer);
} else {
ALOGW("%s: Failed to lock buffer for YUV replacement: %s",
__FUNCTION__, strerror(-lockStatus));
}
}
// 3. 继续原有逻辑
removeOutstandingBuffer(buffer);
camera_stream_buffer b = buffer;
if (timestampIncreasing && timestamp != 0 && timestamp <= mLastTimestamp) {
ALOGE("%s: Stream %d: timestamp %" PRId64 " is not increasing. Prev timestamp %" PRId64,
__FUNCTION__, mId, timestamp, mLastTimestamp);
b.status = CAMERA_BUFFER_STATUS_ERROR;
}
mLastTimestamp = timestamp;
status_t res = returnBufferLocked(b, timestamp, readoutTimestamp, transform, surface_ids);
if (res == OK) {
fireBufferListenersLocked(b, /*acquired*/false, /*output*/true, timestamp, frameNumber);
}
mOutputBufferReturnedSignal.signal();
return res;
}
cpp
int pullYUVFromAPK(std::vector<uint8_t>& y_data,
std::vector<uint8_t>& u_data,
std::vector<uint8_t>& v_data,
uint32_t& out_width, uint32_t& out_height) {
const char* SOCKET_NAME = "vcam_yuv_server";
int sock = socket(AF_UNIX, SOCK_STREAM, 0);
if (sock < 0) {
ALOGE("pullYUVFromAPK ❌ 创建 socket 失败: %s", strerror(errno));
return -1;
}
struct sockaddr_un addr{};
addr.sun_family = AF_UNIX;
addr.sun_path[0] = '\0'; // 抽象命名空间
strncpy(addr.sun_path + 1, SOCKET_NAME, sizeof(addr.sun_path) - 2);
// 正确计算抽象 socket 地址长度
socklen_t len = 1 + strlen(SOCKET_NAME); // 不需要 offsetof
if (connect(sock, (struct sockaddr*)&addr, len) < 0) {
close(sock);
ALOGE("pullYUVFromAPK ❌ connect 失败: %s", strerror(errno));
return -1;
}
// 发送请求帧
send(sock, "R", 1, 0);
// 接收 header: width, height, magic
int header[3];
if (recv(sock, header, sizeof(header), 0) != sizeof(header)) {
close(sock);
ALOGE("pullYUVFromAPK ❌ 接收 header 失败");
return -1;
}
int width = header[0], height = header[1];
if (header[2] != 0x12345678 || width <= 0 || height <= 0) {
close(sock);
ALOGE("pullYUVFromAPK ❌ 无效 header: %dx%d, magic=0x%x", width, height, header[2]);
return -1;
}
size_t y_size = width * height;
size_t uv_size = (width / 2) * (height / 2);
y_data.resize(y_size);
u_data.resize(uv_size);
v_data.resize(uv_size);
auto recvAll = [&](void* buf, size_t len) -> bool {
size_t total = 0;
while (total < len) {
ssize_t n = recv(sock, (char*)buf + total, len - total, 0);
if (n <= 0) {
ALOGE("pullYUVFromAPK ❌ recv 失败: n=%zd, errno=%s", n, strerror(errno));
return false;
}
total += n;
}
return true;
};
bool success = recvAll(y_data.data(), y_size) &&
recvAll(u_data.data(), uv_size) &&
recvAll(v_data.data(), uv_size);
close(sock);
if (success) {
out_width = width;
out_height = height;
return 0;
} else {
ALOGE("pullYUVFromAPK ❌ 接收 YUV 数据不完整");
return -1;
}
}
void replaceYUVBuffer(const android_ycbcr &ycbcr, uint32_t dstWidth, uint32_t dstHeight) {
auto startTime = std::chrono::high_resolution_clock::now();
ALOGD("【YUV】开始替换预览缓冲区 -> 目标分辨率: %ux%u", dstWidth, dstHeight);
// === 替代文件读取:通过 socket 拉取 YUV 数据 ===
std::vector<uint8_t> srcY, srcU, srcV;
uint32_t srcWidth = 0, srcHeight = 0;
if (pullYUVFromAPK(srcY, srcU, srcV, srcWidth, srcHeight) != 0) {
ALOGE("【ERROR】从 APK 拉取 YUV 数据失败");
return;
}
ALOGD("【YUV】成功拉取源帧: %ux%u", srcWidth, srcHeight);
if (srcY.empty() || srcU.empty() || srcV.empty()) {
ALOGE("【ERROR】YUV 数据为空");
return;
}
const uint8_t* pSrcY = srcY.data();
const uint8_t* pSrcU = srcU.data();
const uint8_t* pSrcV = srcV.data();
// === 以下逻辑完全保持不变 ===
uint8_t* dstY = static_cast<uint8_t*>(ycbcr.y);
uint8_t* dstUV = static_cast<uint8_t*>(ycbcr.cb);
int libyuvResult = 0;
if (srcWidth == dstWidth && srcHeight == dstHeight) {
ALOGD("【YUV】直接格式转换: I420 -> NV12");
libyuvResult = libyuv::I420ToNV12(
pSrcY, srcWidth,
pSrcU, srcWidth / 2,
pSrcV, srcWidth / 2,
dstY, ycbcr.ystride,
dstUV, ycbcr.cstride,
dstWidth, dstHeight
);
} else {
ALOGD("【YUV】分步处理: 缩放 -> 格式转换");
std::vector<uint8_t> tempY(dstWidth * dstHeight);
std::vector<uint8_t> tempU((dstWidth / 2) * (dstHeight / 2));
std::vector<uint8_t> tempV((dstWidth / 2) * (dstHeight / 2));
libyuvResult = libyuv::I420Scale(
pSrcY, srcWidth,
pSrcU, srcWidth / 2,
pSrcV, srcWidth / 2,
srcWidth, srcHeight,
tempY.data(), dstWidth,
tempU.data(), dstWidth / 2,
tempV.data(), dstWidth / 2,
dstWidth, dstHeight,
libyuv::kFilterBilinear
);
if (libyuvResult == 0) {
libyuvResult = libyuv::I420ToNV12(
tempY.data(), dstWidth,
tempU.data(), dstWidth / 2,
tempV.data(), dstWidth / 2,
dstY, ycbcr.ystride,
dstUV, ycbcr.cstride,
dstWidth, dstHeight
);
}
}
if (libyuvResult != 0) {
ALOGE("【ERROR】libyuv处理失败,错误码: %d", libyuvResult);
return;
}
ALOGD("【YUV】缓冲区替换成功完成!");
auto endTime = std::chrono::high_resolution_clock::now();
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(endTime - startTime);
ALOGD("【YUV】处理耗时: %lld ms", duration.count());
}
5.效果
使用测试视频 将桌面推送到rtmp服务器
命令如下
bash
ffmpeg -f x11grab -framerate 30 -video_size 1920x1080 -i $DISPLAY -vf "scale=1600:1200" -c:v libx264 -preset ultrafast -tune zerolatency -pix_fmt yuv420p -b:v 2500k -maxrate 2500k -bufsize 5000k -f flv "rtmp://192.168.1.241:1935/live/desktop_stream"
QQ2025825-182222