OpenGL是一个图形API,并不是一个独立的平台。包含了一系列可以操作图形、图像的函数。基于Texture纹理强大的功能,本篇文章实现Android OpenGL Texture C++ 预览Camera视频流的功能。
项目github地址:https://github.com/wangyongyao1989/WyFFmpeg
一、代码实现步骤及图示预览:
二、Camera数据获取:
Android Camera可以获取图片数据的视频流信息。
1、 打开Camera后,获取ImageReader读取视频对应的相片数据。
java
/**
* Opens the camera.
*/
@SuppressLint({"WrongConstant", "MissingPermission"})
public void openCamera() {
if (checkSelfPermission(mContext, Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
return;
}
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth()
, mPreviewSize.getHeight()
, ImageFormat.YUV_420_888, IMAGE_BUFFER_SIZE);
mImageReader.setOnImageAvailableListener(mVideoCapture
, mBackgroundHandler);
Log.i(TAG, "openCamera");
CameraManager manager = (CameraManager)
mContext.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500
, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting " +
"to lock camera opening.");
}
manager.openCamera(mCameraId, mStateCallback
, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "Cannot " +
"access the camera " + e);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while " +
"trying to lock camera opening.", e);
}
}
2、在监听ImageReader.OnImageAvailableListener中读取ImageReader并转换成YUV_420_888的数据。
java
@Override
public void onImageAvailable(ImageReader imageReader) {
Image image = imageReader.acquireLatestImage();
if (image != null) {
if (mPreviewFrameHandler != null) {
mPreviewFrameHandler.onPreviewFrame(YUV_420_888_data(image), image.getWidth(), image.getHeight());
}
image.close();
}
}
private static byte[] YUV_420_888_data(Image image) {
final int imageWidth = image.getWidth();
final int imageHeight = image.getHeight();
final Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[imageWidth * imageHeight *
ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8];
int offset = 0;
for (int plane = 0; plane < planes.length; ++plane) {
final ByteBuffer buffer = planes[plane].getBuffer();
final int rowStride = planes[plane].getRowStride();
// Experimentally, U and V planes have |pixelStride| = 2, which
// essentially means they are packed.
final int pixelStride = planes[plane].getPixelStride();
final int planeWidth = (plane == 0) ? imageWidth : imageWidth / 2;
final int planeHeight = (plane == 0) ? imageHeight : imageHeight / 2;
if (pixelStride == 1 && rowStride == planeWidth) {
// Copy whole plane from buffer into |data| at once.
buffer.get(data, offset, planeWidth * planeHeight);
offset += planeWidth * planeHeight;
} else {
// Copy pixels one by one respecting pixelStride and rowStride.
byte[] rowData = new byte[rowStride];
for (int row = 0; row < planeHeight - 1; ++row) {
buffer.get(rowData, 0, rowStride);
for (int col = 0; col < planeWidth; ++col) {
data[offset++] = rowData[col * pixelStride];
}
}
// Last row is special in some devices and may not contain the full
// |rowStride| bytes of data.
// See http://developer.android.com/reference/android/media/Image.Plane.html#getBuffer()
buffer.get(rowData, 0, Math.min(rowStride, buffer.remaining()));
for (int col = 0; col < planeWidth; ++col) {
data[offset++] = rowData[col * pixelStride];
}
}
}
return data;
}
三、设置OpenGL的使用场景:
GLTextureCPlusVideoPlayerView继承GLSurfcaeView,实现GLSurfcaeView.Renderer接口:
java
package com.wangyongyao.glplay.view;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import com.wangyongyao.glplay.OpenGLPlayCallJni;
import com.wangyongyao.glplay.camerahelper.camerahelper.CameraDataHelper;
import com.wangyongyao.glplay.camerahelper.camerahelper.CameraDataListener;
import com.wangyongyao.glplay.utils.OpenGLPlayFileUtils;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/**
* author : wangyongyao https://github.com/wangyongyao1989
* Create Time : 2024/9/3 23:57
* Descibe : MyyFFmpeg com.example.myyffmpeg.utils
*/
public class GLTextureCPlusVideoPlayerView extends GLSurfaceView
implements GLSurfaceView.Renderer, CameraDataListener {
private static String TAG = GLTextureCPlusVideoPlayerView.class.getSimpleName();
private OpenGLPlayCallJni mJniCall;
private Context mContext;
private int mWidth;
private int mHeight;
private CameraDataHelper mCameraHelper;
public GLTextureCPlusVideoPlayerView(Context context, OpenGLPlayCallJni jniCall) {
super(context);
mContext = context;
mJniCall = jniCall;
init();
}
public GLTextureCPlusVideoPlayerView(Context context, AttributeSet attrs) {
super(context, attrs);
mContext = context;
init();
}
private void init() {
getHolder().addCallback(this);
setEGLContextClientVersion(3);
setEGLConfigChooser(8, 8, 8, 8, 16, 0);
String fragPath = OpenGLPlayFileUtils.getModelFilePath(mContext
, "texture_video_play_frament.glsl");
String vertexPath = OpenGLPlayFileUtils.getModelFilePath(mContext
, "texture_video_play_vert.glsl");
String picSrc1 = OpenGLPlayFileUtils.getModelFilePath(mContext
, "wall.jpg");
mCameraHelper = new CameraDataHelper(getContext(), this);
mCameraHelper.startCamera();
mJniCall.glTextureVideoPlayCreate(0, vertexPath, fragPath);
setRenderer(this);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
private void stopCameraPreview() {
mCameraHelper.destroy();
}
public void onDrawFrame(GL10 gl) {
if (mJniCall != null) {
mJniCall.glTextureVideoPlayRender();
}
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
Log.e(TAG, "onSurfaceChanged width:" + width + ",height" + height);
if (mJniCall != null) {
mJniCall.glTextureVideoPlayInit(null, null, width, height);
}
mWidth = width;
mHeight = height;
mCameraHelper.initialize(width, height);
}
@Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
Log.e(TAG, "onSurfaceCreated:");
}
@Override
public void onPreviewFrame(byte[] yuvData, int width, int height) {
mJniCall.glTextureVideoPlayDraw(yuvData, width, height, 90);
requestRender();
}
public void destroyRender() {
mJniCall.glTextureVideoPlayDestroy();
stopCameraPreview();
}
}
这里需要注意的是要设置setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY),等待onPreviewFrame回调数据之后在进行Texture的帧渲染。
java
/**
* The renderer only renders
* when the surface is created, or when {@link #requestRender} is called.
*
* @see #getRenderMode()
* @see #setRenderMode(int)
* @see #requestRender()
*/
public final static int RENDERMODE_WHEN_DIRTY = 0;
四、JNI层把Java的数据传入C++层:
1、Java层实现:
定义的执行流程的顺序方法:glTextureVideoPlayeCreate -> glTextureVideoPlayeInit -> glTextureVideoPlayeCreate -> glTextureVideoPlayeDraw -> glTextureVideoPlayeRender 。
java
/*********************** OpenGL Texture显示视频********************/
public void glTextureVideoPlayCreate(int type, String vertexPath, String fragPath) {
native_texture_video_play_create(type, vertexPath, fragPath);
}
public void glTextureVideoPlayDestroy() {
native_texture_video_play_destroy();
}
public void glTextureVideoPlayInit(Surface surface, AssetManager assetManager
, int width, int height) {
native_texture_video_play_init(surface, assetManager, width, height);
}
public void glTextureVideoPlayRender() {
native_texture_video_play_render();
}
public void glTextureVideoPlayDraw(byte[] data, int width, int height, int rotation) {
native_texture_video_play_draw(data, width, height, rotation);
}
public void glTextureVideoPlaySetParameters(int params) {
native_texture_video_play_set_parameters(params);
}
public int glTextureVideoPlayGetParameters() {
return native_texture_video_play_get_parameters();
}
private native void native_texture_video_play_create(int type, String vertexPath
, String fragPath);
private native void native_texture_video_play_destroy();
private native void native_texture_video_play_init(Surface surface
, AssetManager assetManager
, int width, int height);
private native void native_texture_video_play_render();
private native void native_texture_video_play_draw(byte[] data, int width
, int height, int rotation);
private native void native_texture_video_play_set_parameters(int params);
private native int native_texture_video_play_get_parameters();
2、JNI层实现:
cpp
/*********************** OpenGL Texture预览Camera视频********************/
extern "C"
JNIEXPORT void JNICALL
cpp_texture_video_play_creat(JNIEnv *env, jobject thiz, jint type,
jstring vertex,
jstring frag) {
const char *vertexPath = env->GetStringUTFChars(vertex, nullptr);
const char *fragPath = env->GetStringUTFChars(frag, nullptr);
if (textureVideoRender == nullptr)
textureVideoRender = new OpenglesTexureVideoRender();
textureVideoRender->setSharderPath(vertexPath, fragPath);
env->ReleaseStringUTFChars(vertex, vertexPath);
env->ReleaseStringUTFChars(frag, fragPath);
}
extern "C"
JNIEXPORT void JNICALL
cpp_texture_video_play_destroy(JNIEnv *env, jobject thiz) {
}
extern "C"
JNIEXPORT void JNICALL
cpp_texture_video_play_init(JNIEnv *env, jobject thiz,
jobject surface,
jobject assetManager,
jint width,
jint height) {
if (textureVideoRender != nullptr) {
ANativeWindow *window = surface ? ANativeWindow_fromSurface(env, surface) : nullptr;
auto *aAssetManager = assetManager ? AAssetManager_fromJava(env, assetManager) : nullptr;
textureVideoRender->init(window, aAssetManager, (size_t) width, (size_t) height);
}
}
extern "C"
JNIEXPORT void JNICALL
cpp_texture_video_play_render(JNIEnv *env, jobject thiz) {
if (textureVideoRender != nullptr) {
textureVideoRender->render();
}
}
extern "C"
JNIEXPORT void JNICALL
cpp_texture_video_play_draw(JNIEnv *env, jobject obj, jbyteArray data, jint width, jint height,
jint rotation) {
jbyte *bufferPtr = env->GetByteArrayElements(data, nullptr);
jsize arrayLength = env->GetArrayLength(data);
if (textureVideoRender != nullptr) {
textureVideoRender->draw((uint8_t *) bufferPtr, (size_t) arrayLength, (size_t) width,
(size_t) height,
rotation);
}
env->ReleaseByteArrayElements(data, bufferPtr, 0);
}
extern "C"
JNIEXPORT void JNICALL
cpp_texture_video_play_setParameters(JNIEnv *env, jobject thiz, jint p) {
if (textureVideoRender != nullptr) {
textureVideoRender->setParameters((uint32_t) p);
}
}
extern "C"
JNIEXPORT jint JNICALL
cpp_texture_video_play_getParameters(JNIEnv *env, jobject thiz) {
if (textureVideoRender != nullptr) {
textureVideoRender->getParameters();
}
return 0;
}
static const JNINativeMethod methods[] = {
/*********************** OpenGL Texture显示视频********************/
{"native_texture_video_play_create", "(I"
"Ljava/lang/String;"
"Ljava/lang/String;)V", (void *) cpp_texture_video_play_creat},
{"native_texture_video_play_destroy", "()V", (void *) cpp_texture_video_play_destroy},
{"native_texture_video_play_init", "(Landroid/view/Surface;"
"Landroid/content/res"
"/AssetManager;II)V", (void *) cpp_texture_video_play_init},
{"native_texture_video_play_render", "()V", (void *) cpp_texture_video_play_render},
{"native_texture_video_play_draw", "([BIII)V", (void *) cpp_texture_video_play_draw},
{"native_texture_video_play_set_parameters", "(I)V", (void *) cpp_texture_video_play_setParameters},
{"native_texture_video_play_get_parameters", "()I", (void *) cpp_texture_video_play_getParameters},
};
// 定义注册方法
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
LOGD("动态注册");
JNIEnv *env;
if ((vm)->GetEnv((void **) &env, JNI_VERSION_1_6) != JNI_OK) {
LOGD("动态注册GetEnv fail");
return JNI_ERR;
}
// 获取类引用
jclass clazz = env->FindClass(rtmp_class_name);
// 注册native方法
jint regist_result = env->RegisterNatives(clazz, methods,
sizeof(methods) / sizeof(methods[0]));
if (regist_result) { // 非零true 进if
LOGE("动态注册 fail regist_result = %d", regist_result);
} else {
LOGI("动态注册 success result = %d", regist_result);
}
return JNI_VERSION_1_6;
}
五、C++层的OpenGL Texture渲染实现:
这里OpenGL相关的代码基于我的github项目:GitHub - wangyongyao1989/AndroidLearnOpenGL: OpenGL基础及运用
抽取过来进行实现的。
感兴趣的可以去阅读我的关于OpenGL的相关的博客:https://blog.csdn.net/wangyongyao1989/category_6943979.html?spm=1001.2014.3001.5482
1、着色器程序:
着色器程序是GLSL的文件,把存放文件夹地址传入C++层的OpenGLShader.cpp中。
- texture_video_play_vert.glsl顶点着色器:
cpp
#version 320 es
out vec2 v_texcoord;
in vec4 position;
in vec2 texcoord;
void main() {
v_texcoord = texcoord;
gl_Position = position;
}
- texture_video_play_fragment.glsl片段着色器:
cpp
#version 320 es
precision mediump float;
in vec2 v_texcoord;
uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
out vec4 gl_FragColor;
void main() {
float y, u, v, r, g, b;
y = texture(s_textureY, v_texcoord).r;
u = texture(s_textureU, v_texcoord).r;
v = texture(s_textureV, v_texcoord).r;
u = u - 0.5;
v = v - 0.5;
r = y + 1.403 * v;
g = y - 0.344 * u - 0.714 * v;
b = y + 1.770 * u;
gl_FragColor = vec4(r, g, b, 1.0);
}
2、OpenGLShader.cpp中着色器程序编译、连接、使用。
cpp
//
// Created by MMM on 2024/8/8.
//
#include "OpenGLShader.h"
GLuint
OpenGLShader::createProgram() {
vertexShader = loadShader(GL_VERTEX_SHADER, gVertexShaderCode);
LOGI("=====gVertexShaderCode :%s", gVertexShaderCode);
LOGI("======gFragmentShaderCode :%s", gFragmentShaderCode);
if (!vertexShader) {
checkGlError("loadShader GL_VERTEX_SHADER");
return 0;
}
fraShader = loadShader(GL_FRAGMENT_SHADER, gFragmentShaderCode);
if (!fraShader) {
checkGlError("loadShader GL_FRAGMENT_SHADER");
return 0;
}
shaderId = glCreateProgram(); //创建一个着色程序对象
if (shaderId) {
glAttachShader(shaderId, vertexShader); //把着色器附加到了程序对象上
checkGlError("glAttachShader");
glAttachShader(shaderId, fraShader);
checkGlError("glAttachShader");
glLinkProgram(shaderId); //链接程序对象
GLint linkStatus = GL_FALSE;
glGetProgramiv(shaderId, GL_LINK_STATUS, &linkStatus); //检测链接着色器程序是否失败
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(shaderId, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) {
char *buf = (char *) malloc(bufLength);
if (buf) {
glGetProgramInfoLog(shaderId, bufLength, NULL, buf);
LOGE("Could not link shaderId:\n%s\n", buf);
free(buf);
}
}
glDeleteProgram(shaderId); //
shaderId = 0;
}
}
return shaderId;
}
/**
* 加载着色器
* @param shaderType
* @param pSource
* @return
*/
GLuint OpenGLShader::loadShader(GLenum shaderType, const char *pSource) {
GLuint shader = glCreateShader(shaderType); //创建着色器
if (shader) {
glShaderSource(shader, 1, &pSource, NULL); //着色器源码附加到着色器对象上
glCompileShader(shader); //编译着着色器
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen) {
char *buf = (char *) malloc(infoLen);
if (buf) {
glGetShaderInfoLog(shader, infoLen, NULL, buf);
LOGE("Could not compile shader %d:\n%s\n",
shaderType, buf);
free(buf);
}
glDeleteShader(shader); //删除着色器对象
shader = 0;
}
}
}
return shader;
}
bool OpenGLShader::getSharderPath(const char *vertexPath, const char *fragmentPath) {
ifstream vShaderFile;
ifstream fShaderFile;
// ensure ifstream objects can throw exceptions:
vShaderFile.exceptions(ifstream::failbit | ifstream::badbit);
fShaderFile.exceptions(ifstream::failbit | ifstream::badbit);
try {
// open files
vShaderFile.open(vertexPath);
fShaderFile.open(fragmentPath);
stringstream vShaderStream, fShaderStream;
// read file's buffer contents into streams
vShaderStream << vShaderFile.rdbuf();
fShaderStream << fShaderFile.rdbuf();
// close file handlers
vShaderFile.close();
fShaderFile.close();
// convert stream into string
vertexCode = vShaderStream.str();
fragmentCode = fShaderStream.str();
}
catch (ifstream::failure &e) {
LOGE("Could not getSharderPath error :%s", e.what());
return false;
}
gVertexShaderCode = vertexCode.c_str();
gFragmentShaderCode = fragmentCode.c_str();
return true;
}
void OpenGLShader::printGLString(const char *name, GLenum s) {
const char *v = (const char *) glGetString(s);
LOGI("OpenGL %s = %s\n", name, v);
}
void OpenGLShader::checkGlError(const char *op) {
for (GLint error = glGetError(); error; error = glGetError()) {
LOGI("after %s() glError (0x%x)\n", op, error);
}
}
OpenGLShader::~OpenGLShader() {
if (vertexShader) {
glDeleteShader(vertexShader);
}
if (fraShader) {
glDeleteShader(fraShader);
}
vertexCode.clear();
fragmentCode.clear();
gVertexShaderCode = nullptr;
gFragmentShaderCode = nullptr;
}
OpenGLShader::OpenGLShader() {
}
3、OpenGLTextureVideoRender.cpp进行YUV的Texture渲染:
大致流程为:createProgram() -> createTextures() -> draw() -> render()。
- createProgram()创建程序,获取着色器中的输入顶点坐标、输入纹理顶点坐标及uniform的参数:
cpp
int
OpenglesTexureVideoRender::createProgram() {
m_program = lightColorShader->createProgram();
m_vertexShader = lightColorShader->vertexShader;
m_pixelShader = lightColorShader->fraShader;
LOGI("OpenglesTexureVideoRender createProgram m_program:%d", m_program);
if (!m_program) {
LOGE("Could not create program.");
return 0;
}
//Get Uniform Variables Location
m_vertexPos = (GLuint) glGetAttribLocation(m_program, "position");
m_textureYLoc = glGetUniformLocation(m_program, "s_textureY");
m_textureULoc = glGetUniformLocation(m_program, "s_textureU");
m_textureVLoc = glGetUniformLocation(m_program, "s_textureV");
m_textureLoc = (GLuint) glGetAttribLocation(m_program, "texcoord");
return m_program;
}
- createTextures()分别创建YUV三个通道的纹理:
cpp
bool OpenglesTexureVideoRender::createTextures() {
auto widthY = (GLsizei) m_width;
auto heightY = (GLsizei) m_height;
glActiveTexture(GL_TEXTURE0);
glGenTextures(1, &m_textureIdY);
glBindTexture(GL_TEXTURE_2D, m_textureIdY);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widthY, heightY, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
nullptr);
if (!m_textureIdY) {
// check_gl_error("Create Y texture");
return false;
}
GLsizei widthU = (GLsizei) m_width / 2;
GLsizei heightU = (GLsizei) m_height / 2;
glActiveTexture(GL_TEXTURE1);
glGenTextures(1, &m_textureIdU);
glBindTexture(GL_TEXTURE_2D, m_textureIdU);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widthU, heightU, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
nullptr);
if (!m_textureIdU) {
// check_gl_error("Create U texture");
return false;
}
GLsizei widthV = (GLsizei) m_width / 2;
GLsizei heightV = (GLsizei) m_height / 2;
glActiveTexture(GL_TEXTURE2);
glGenTextures(1, &m_textureIdV);
glBindTexture(GL_TEXTURE_2D, m_textureIdV);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widthV, heightV, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
nullptr);
if (!m_textureIdV) {
// check_gl_error("Create V texture");
return false;
}
return true;
}
- draw()分离出YUV的每个通道的数据集:
cpp
void OpenglesTexureVideoRender::draw(uint8_t *buffer, size_t length
, size_t width, size_t height,
float rotation) {
m_length = length;
m_rotation = rotation;
video_frame frame{};
frame.width = width;
frame.height = height;
frame.stride_y = width;
frame.stride_uv = width / 2;
frame.y = buffer;
frame.u = buffer + width * height;
frame.v = buffer + width * height * 5 / 4;
updateFrame(frame);
}
void OpenglesTexureVideoRender::updateFrame(const video_frame &frame) {
m_sizeY = frame.width * frame.height;
m_sizeU = frame.width * frame.height / 4;
m_sizeV = frame.width * frame.height / 4;
if (m_pDataY == nullptr || m_width != frame.width || m_height != frame.height) {
m_pDataY = std::make_unique<uint8_t[]>(m_sizeY + m_sizeU + m_sizeV);
m_pDataU = m_pDataY.get() + m_sizeY;
m_pDataV = m_pDataU + m_sizeU;
isProgramChanged = true;
}
m_width = frame.width;
m_height = frame.height;
if (m_width == frame.stride_y) {
memcpy(m_pDataY.get(), frame.y, m_sizeY);
} else {
uint8_t *pSrcY = frame.y;
uint8_t *pDstY = m_pDataY.get();
for (int h = 0; h < m_height; h++) {
memcpy(pDstY, pSrcY, m_width);
pSrcY += frame.stride_y;
pDstY += m_width;
}
}
if (m_width / 2 == frame.stride_uv) {
memcpy(m_pDataU, frame.u, m_sizeU);
memcpy(m_pDataV, frame.v, m_sizeV);
} else {
uint8_t *pSrcU = frame.u;
uint8_t *pSrcV = frame.v;
uint8_t *pDstU = m_pDataU;
uint8_t *pDstV = m_pDataV;
for (int h = 0; h < m_height / 2; h++) {
memcpy(pDstU, pSrcU, m_width / 2);
memcpy(pDstV, pSrcV, m_width / 2);
pDstU += m_width / 2;
pDstV += m_width / 2;
pSrcU += frame.stride_uv;
pSrcV += frame.stride_uv;
}
}
isDirty = true;
}
- render()每个渲染时更新YUV三个纹理:
cpp
void OpenglesTexureVideoRender::render() {
// LOGI("OpenglesTexureVideoRender render");
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
if (!updateTextures() || !useProgram()) return;
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
bool OpenglesTexureVideoRender::updateTextures() {
if (!m_textureIdY
&& !m_textureIdU
&& !m_textureIdV
&& !createTextures()) return false;
// LOGI("OpenglesTexureVideoRender updateTextures");
if (isDirty) {
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_textureIdY);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_width, (GLsizei) m_height, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pDataY.get());
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, m_textureIdU);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_width / 2, (GLsizei) m_height / 2,
0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pDataU);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, m_textureIdV);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_width / 2, (GLsizei) m_height / 2,
0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pDataV);
isDirty = false;
return true;
}
return false;
}