一.整体架构
Unity WebGL应用 → RenderTexture → 纹理数据 → JavaScript → 视频编码 → 视频文件
1.核心组件关系

2.详细实现原理
2.1RenderTexture到字节数组的转换过程
cs
// 步骤1: 激活目标RenderTexture
RenderTexture.active = targetTexture;
// 步骤2: 渲染到Texture
camera.Render(); // 可选,如果使用相机渲染
// 步骤3: 读取像素数据到Texture2D
texture.ReadPixels(new Rect(0, 0, width, height), 0, 0);
texture.Apply();
// 步骤4: 获取原始字节数据
byte[] rgbaData = texture.GetRawTextureData();
// 步骤5: 格式转换 (RGBA32 → RGB)
byte[] rgbData = ConvertRGBA32ToRGB(rgbaData);
2.2内存布局的变化
RGBA32格式(每个像素4个字节)
R1, G1, B1, A1, R2, G2, B2, A2, ...
转换为RGB格式(每个像素3个字节)
R1, G1, B1, R2, G2, B2, ...
3.跨语言调用流程
cs
// C#端调用JavaScript
[DllImport("__Internal")]
private static extern void CaptureFrame(byte[] data, int length);
// 实际调用时:
CaptureFrame(rgbData, rgbData.Length);
4.Java端视频编码原理
javascript
function processFrameToVideo(rgbData, width, height) {
// 1. 创建Canvas作为中间载体
const canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext('2d');
// 2. 创建ImageData对象
const imageData = ctx.createImageData(width, height);
// 3. RGB转RGBA格式转换
for (let i = 0, j = 0; i < imageData.data.length; i += 4, j += 3) {
imageData.data[i] = rgbData[j]; // R
imageData.data[i + 1] = rgbData[j + 1]; // G
imageData.data[i + 2] = rgbData[j + 2]; // B
imageData.data[i + 3] = 255; // A (不透明度)
}
// 4. 绘制到Canvas
ctx.putImageData(imageData, 0, 0);
return canvas;
}
5.内存管理策略
cs
public class OptimizedRecorder : MonoBehaviour
{
private Texture2D reusableTexture;
private byte[] reusableBuffer;
void InitializeBuffers(int width, int height)
{
// 预分配内存,避免GC
if (reusableTexture == null ||
reusableTexture.width != width ||
reusableTexture.height != height)
{
reusableTexture = new Texture2D(width, height, TextureFormat.RGBA32, false);
reusableBuffer = new byte[width * height * 3]; // RGB缓冲区
}
}
void CaptureFrameOptimized()
{
// 重用纹理和缓冲区
RenderTexture.active = targetTexture;
reusableTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0);
reusableTexture.Apply();
// 直接操作原生内存(高级用法)
var nativeArray = reusableTexture.GetRawTextureData<byte>();
// ... 处理数据
}
}
6.JavaScripts异步处理策略
javascript
// JavaScript端使用Worker进行后台编码
function createVideoWorker() {
const worker = new Worker('video-encoder-worker.js');
worker.onmessage = function(e) {
if (e.data.type === 'encoded') {
const blob = e.data.blob;
// 处理完成的视频
triggerDownload(blob);
}
};
return worker;
}
// 在主线程中发送帧数据
function sendFrameToWorker(worker, frameData, width, height) {
worker.postMessage({
type: 'frame',
data: frameData,
width: width,
height: height
}, [frameData.buffer]); // 转移所有权,避免复制
}
7.完整使用流程
7.1unity端代码
cs
using System;
using System.Collections;
using System.Runtime.InteropServices;
using UnityEngine;
public class MultiRenderTextureRecorder : MonoBehaviour
{
[Header("录制器设置")]
public string recorderId; // 录制器唯一标识
public RenderTexture targetRenderTexture;
public int frameRate = 30;
public bool autoDownload = true;
[Header("状态信息")]
[SerializeField] private bool isRecording = false;
[SerializeField] private float recordingStartTime;
private const int MAX_RECORDING_TIME = 300;
private Texture2D captureTexture;
private bool isInitialized = false;
// JavaScript 插件函数声明
[DllImport("__Internal")]
private static extern void InitializeRecorder(string recorderId, int width, int height, int frameRate);
[DllImport("__Internal")]
private static extern void StartRecording(string recorderId);
[DllImport("__Internal")]
private static extern void StopRecording(string recorderId);
[DllImport("__Internal")]
private static extern void AddVideoFrame(string recorderId, System.IntPtr data, int width, int height);
[DllImport("__Internal")]
private static extern void DownloadRecordedVideo(string recorderId);
IEnumerator Start()
{
// 等待一帧确保WebGL环境完全加载
yield return new WaitForEndOfFrame();
// 初始化录制器
InitializeRecorder();
}
void InitializeRecorder()
{
if (targetRenderTexture == null)
{
Debug.LogError($"录制器 {recorderId} 未指定RenderTexture!");
return;
}
if (string.IsNullOrEmpty(recorderId))
{
Debug.LogError("录制器ID不能为空!");
return;
}
#if !UNITY_EDITOR && UNITY_WEBGL
try
{
InitializeRecorder(recorderId,
targetRenderTexture.width,
targetRenderTexture.height,
frameRate);
isInitialized = true;
Debug.Log($"成功初始化录制器: {recorderId}");
}
catch (Exception e)
{
Debug.LogError($"初始化录制器 {recorderId} 失败: {e.Message}");
}
#else
isInitialized = true; // 在编辑器中标记为已初始化
Debug.Log($"录制器 {recorderId} 初始化完成 (编辑器模式)");
#endif
}
void Update()
{
if (!isInitialized) return;
if (isRecording)
{
// 检查录制时长
if (Time.time - recordingStartTime >= MAX_RECORDING_TIME)
{
StopRecording();
return;
}
// 捕获当前帧
CaptureAndSendFrame();
}
}
// 开始录制
public void StartRecording()
{
if (!isInitialized)
{
Debug.LogError($"录制器 {recorderId} 未正确初始化!");
return;
}
if (isRecording)
{
Debug.LogWarning($"录制器 {recorderId} 已经在录制中");
return;
}
if (targetRenderTexture == null)
{
Debug.LogError($"录制器 {recorderId} 未指定RenderTexture!");
return;
}
#if !UNITY_EDITOR && UNITY_WEBGL
try
{
StartRecording(recorderId);
}
catch (Exception e)
{
Debug.LogError($"开始录制 {recorderId} 失败: {e.Message}");
return;
}
#endif
isRecording = true;
recordingStartTime = Time.time;
Debug.Log($"开始录制: {recorderId}");
}
// 停止录制
public void StopRecording()
{
if (!isInitialized)
{
Debug.LogError($"录制器 {recorderId} 未正确初始化!");
return;
}
if (!isRecording)
{
Debug.LogWarning($"录制器 {recorderId} 未在录制中");
return;
}
#if !UNITY_EDITOR && UNITY_WEBGL
try
{
StopRecording(recorderId);
}
catch (Exception e)
{
Debug.LogError($"停止录制 {recorderId} 失败: {e.Message}");
return;
}
#endif
isRecording = false;
Debug.Log($"停止录制: {recorderId}");
if (autoDownload)
{
StartCoroutine(DelayedDownload());
}
}
// 获取录制器状态
public bool IsRecording()
{
return isRecording;
}
// 获取录制时间
public float GetRecordingTime()
{
return isRecording ? Time.time - recordingStartTime : 0f;
}
// 获取录制器ID
public string GetRecorderId()
{
return recorderId;
}
private IEnumerator DelayedDownload()
{
yield return new WaitForSeconds(1f);
DownloadVideo();
}
public void DownloadVideo()
{
if (!isInitialized)
{
Debug.LogError($"录制器 {recorderId} 未正确初始化!");
return;
}
#if !UNITY_EDITOR && UNITY_WEBGL
try
{
DownloadRecordedVideo(recorderId);
}
catch (Exception e)
{
Debug.LogError($"下载视频 {recorderId} 失败: {e.Message}");
}
#else
Debug.Log($"下载视频: {recorderId} (编辑器模式)");
#endif
}
private void CaptureAndSendFrame()
{
// 创建或重用纹理
if (captureTexture == null)
{
captureTexture = new Texture2D(
targetRenderTexture.width,
targetRenderTexture.height,
TextureFormat.RGBA32,
false
);
}
RenderTexture previous = RenderTexture.active;
RenderTexture.active = targetRenderTexture;
captureTexture.ReadPixels(
new Rect(0, 0, targetRenderTexture.width, targetRenderTexture.height), 0, 0);
captureTexture.Apply();
RenderTexture.active = previous;
// 翻转纹理数据
Color32[] pixels = captureTexture.GetPixels32();
FlipTextureVertically(pixels, captureTexture.width, captureTexture.height);
captureTexture.SetPixels32(pixels);
captureTexture.Apply();
byte[] frameData = captureTexture.GetRawTextureData();
#if !UNITY_EDITOR && UNITY_WEBGL
GCHandle frameDataHandle = GCHandle.Alloc(frameData, GCHandleType.Pinned);
AddVideoFrame(recorderId,
frameDataHandle.AddrOfPinnedObject(),
captureTexture.width,
captureTexture.height);
frameDataHandle.Free();
#endif
}
private void FlipTextureVertically(Color32[] pixels, int width, int height)
{
for (int y = 0; y < height / 2; y++)
{
for (int x = 0; x < width; x++)
{
int topIndex = y * width + x;
int bottomIndex = (height - 1 - y) * width + x;
Color32 temp = pixels[topIndex];
pixels[topIndex] = pixels[bottomIndex];
pixels[bottomIndex] = temp;
}
}
}
void OnDestroy()
{
if (isRecording)
{
StopRecording();
}
if (captureTexture != null)
{
Destroy(captureTexture);
}
}
}
7.2WebGLRecorder.jslib
javascript
mergeInto(LibraryManager.library, {
// 初始化录制器
InitializeRecorder: function (recorderIdPtr, width, height, frameRate) {
var recorderId = Pointer_stringify(recorderIdPtr);
// 确保 Module.recorders 对象存在
if (typeof Module.recorders === 'undefined') {
Module.recorders = {};
}
// 初始化录制器设置
Module.recorders[recorderId] = {
width: width,
height: height,
frameRate: frameRate,
recordedChunks: [],
canvas: null,
context: null,
mediaRecorder: null,
isInitialized: true
};
console.log("初始化录制器: " + recorderId + " - " + width + "x" + height + " @" + frameRate + "fps");
},
StartRecording: function (recorderIdPtr) {
var recorderId = Pointer_stringify(recorderIdPtr);
// 检查录制器是否存在
if (typeof Module.recorders === 'undefined' || !Module.recorders[recorderId]) {
console.error("录制器未初始化: " + recorderId);
return;
}
var recorder = Module.recorders[recorderId];
try {
// 创建Canvas用于视频帧处理
recorder.canvas = document.createElement('canvas');
recorder.canvas.width = recorder.width;
recorder.canvas.height = recorder.height;
recorder.context = recorder.canvas.getContext('2d');
// 配置MediaRecorder
var stream = recorder.canvas.captureStream(recorder.frameRate);
var options = {
mimeType: 'video/webm;codecs=vp9',
videoBitsPerSecond: 2500000
};
// 尝试不同的编码格式
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
options.mimeType = 'video/webm;codecs=vp8';
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
options.mimeType = 'video/webm';
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
options.mimeType = 'video/mp4';
console.warn('VP8/VP9不支持,使用默认编码: ' + recorderId);
}
}
}
recorder.mediaRecorder = new MediaRecorder(stream, options);
recorder.recordedChunks = [];
recorder.mediaRecorder.ondataavailable = function(event) {
if (event.data.size > 0) {
recorder.recordedChunks.push(event.data);
}
};
recorder.mediaRecorder.onstop = function() {
console.log("录制完成: " + recorderId + ", 数据块数量: " + recorder.recordedChunks.length);
};
// 开始录制
recorder.mediaRecorder.start(1000);
console.log("开始录制视频: " + recorderId);
} catch (error) {
console.error("启动录制失败: " + recorderId, error);
}
},
StopRecording: function (recorderIdPtr) {
var recorderId = Pointer_stringify(recorderIdPtr);
// 检查录制器是否存在
if (typeof Module.recorders === 'undefined' || !Module.recorders[recorderId]) {
console.error("录制器未初始化: " + recorderId);
return;
}
var recorder = Module.recorders[recorderId];
if (recorder.mediaRecorder && recorder.mediaRecorder.state !== 'inactive') {
recorder.mediaRecorder.stop();
console.log("停止录制: " + recorderId);
}
},
AddVideoFrame: function (recorderIdPtr, dataPtr, width, height) {
var recorderId = Pointer_stringify(recorderIdPtr);
// 检查录制器是否存在
if (typeof Module.recorders === 'undefined' || !Module.recorders[recorderId]) {
return;
}
var recorder = Module.recorders[recorderId];
if (!recorder.context || !recorder.mediaRecorder || recorder.mediaRecorder.state !== 'recording') {
return;
}
try {
// 从Unity获取RGBA数据
var data = new Uint8Array(Module.HEAPU8.buffer, dataPtr, width * height * 4);
// 创建ImageData对象
var imageData = new ImageData(new Uint8ClampedArray(data), width, height);
// 绘制到Canvas
recorder.context.putImageData(imageData, 0, 0);
} catch (error) {
console.error("处理视频帧失败: " + recorderId, error);
}
},
DownloadRecordedVideo: function (recorderIdPtr) {
var recorderId = Pointer_stringify(recorderIdPtr);
// 检查录制器是否存在
if (typeof Module.recorders === 'undefined' || !Module.recorders[recorderId]) {
console.error("录制器未初始化: " + recorderId);
return;
}
var recorder = Module.recorders[recorderId];
if (recorder.recordedChunks.length === 0) {
console.warn("没有录制的数据可下载: " + recorderId);
return;
}
try {
// 合并数据块
var blob = new Blob(recorder.recordedChunks, { type: 'video/webm' });
var url = URL.createObjectURL(blob);
// 创建下载链接
var a = document.createElement('a');
a.style.display = 'none';
a.href = url;
// 生成文件名
var date = new Date();
var filename = recorderId + '-recording-' + date.toISOString().replace(/[:.]/g, '-') + '.webm';
a.download = filename;
// 触发下载
document.body.appendChild(a);
a.click();
// 清理
setTimeout(function() {
document.body.removeChild(a);
URL.revokeObjectURL(url);
console.log("视频下载完成: " + recorderId);
}, 100);
} catch (error) {
console.error("下载视频失败: " + recorderId, error);
}
}
});
WebGLRecorder.jslib放入Assets -> Plugins -> WebGL
8.总结
以上实现原理展示了从Unity的RenderTexture到最终视频文件的完整技术路径,涵盖了数据流、内存管理和性能优化的关键细节。打包运行可录制单个RenderTexture画面也可以支持同时录制多个RenderTexture画面。建议不超过5个RenderTexture画面,防止出现性能瓶颈。
8.1性能瓶颈解决方案(频繁纹理调用或跨语言调用)
1.降低录制分辨率
2.降低录制帧率
3.使用多线程Web Worker
8.2帧数据占用内存
1.流式处理,不保存所有帧
2.及时清理不再使用的数据
8.3浏览器兼容性
1.使用第三方库编码备用