FFmpeg解码视频数据OpenGL Texture播放

系列文章:

FFmpeg解码音频数据AudioTrack/OpenSL播放

FFmpeg解码视频数据ANativeWindow播放

FFmpeg解码视频数据OpenGL Texture播放


上一篇介绍了FFmpeg解码视频数据后,用ANativeWindow进行视频的播放。以此为基础拓展到OpenGL Texture纹理进行视频的播放。

FFmpeg解码视频数据OpenGL Texture播放

一、GL C++层运行环境:

在前面所写的博客 OpenGL 自定义SurfaceView Texture C++预览Camera视频 中已经分析了GLSurfaceView的源码,感兴趣的可以查阅。

GLSurfaceView源码分析:

在setRenderer()方法着手分析,在这个方法中创建了EGLConfigChooser、EGLContextFactory、EGLWindowSurfaceFactory、GLThread。

EGLConfigChooser: 选择一个具有指定r、g、b、a尺寸以及至少指定深度和模具尺寸的配置;

EGLContextFactory:创建GLConext的环境,这里的GLConext就是OpenGL运行所需的上下文。

EGLWindowSurfaceFactory:在surface中EGLDisplay与显示NativeWindow创建相关联的联系。

GLThread: GLThread其核心就是维护一个子线程,在子线程中运行着与OpenGL相关的操作,其中GLConext一定要在这个线程中保持唯一。在线程采用弱应用的方式来维护着GLSurfaceView的持有。

C++层面的GL运行环境:

EglCore.cpp:

维护创建出EGLDisplay、EGLConfig、EGLConext的创建、上下文切换的操作类,代码如下:

cpp 复制代码
//  Author : wangyongyao https://github.com/wangyongyao1989
// Created by MMM on 2024/10/11.
// 来源于google开源项目https://github.com/google/grafika 中的EglCore.java类的改造。


#include "EglCore.h"
#include <assert.h>

EglCore::EglCore() {
    init(NULL, 0);
}


EglCore::~EglCore() {
    release();
}

/**
 * 构造方法
 * @param sharedContext
 * @param flags
 */
EglCore::EglCore(EGLContext sharedContext, int flags) {
    init(sharedContext, flags);
}

/**
 * 初始化
 * @param sharedContext
 * @param flags
 * @return
 */
bool EglCore::init(EGLContext sharedContext, int flags) {
    assert(mEGLDisplay == EGL_NO_DISPLAY);
    if (mEGLDisplay != EGL_NO_DISPLAY) {
        LOGE("EGL already set up");
        return false;
    }
    if (sharedContext == NULL) {
        sharedContext = EGL_NO_CONTEXT;
    }

    mEGLDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
    assert(mEGLDisplay != EGL_NO_DISPLAY);
    if (mEGLDisplay == EGL_NO_DISPLAY) {
        LOGE("unable to get EGL14 display.\n");
        return false;
    }

    if (!eglInitialize(mEGLDisplay, 0, 0)) {
        mEGLDisplay = EGL_NO_DISPLAY;
        LOGE("unable to initialize EGL14");
        return false;
    }

    // 尝试使用GLES3
    if ((flags & FLAG_TRY_GLES3) != 0) {
        EGLConfig config = getConfig(flags, 3);
        if (config != NULL) {
            int attrib3_list[] = {
                    EGL_CONTEXT_CLIENT_VERSION, 3,
                    EGL_NONE
            };
            EGLContext context = eglCreateContext(mEGLDisplay, config,
                                                  sharedContext, attrib3_list);
            checkEglError("eglCreateContext");
            if (eglGetError() == EGL_SUCCESS) {
                mEGLConfig = config;
                mEGLContext = context;
                mGlVersion = 3;
            }
        }
    }
    // 如果GLES3没有获取到,则尝试使用GLES2
    if (mEGLContext == EGL_NO_CONTEXT) {
        EGLConfig config = getConfig(flags, 2);
        assert(config != NULL);
        int attrib2_list[] = {
                EGL_CONTEXT_CLIENT_VERSION, 2,
                EGL_NONE
        };
        EGLContext context = eglCreateContext(mEGLDisplay, config,
                                              sharedContext, attrib2_list);
        checkEglError("eglCreateContext");
        if (eglGetError() == EGL_SUCCESS) {
            mEGLConfig = config;
            mEGLContext = context;
            mGlVersion = 2;
        }
    }

    // 获取eglPresentationTimeANDROID方法的地址
    eglPresentationTimeANDROID = (EGL_PRESENTATION_TIME_ANDROIDPROC)
            eglGetProcAddress("eglPresentationTimeANDROID");
    if (!eglPresentationTimeANDROID) {
        LOGE("eglPresentationTimeANDROID is not available!");
    }

    

    int values[1] = {0};
    eglQueryContext(mEGLDisplay, mEGLContext, EGL_CONTEXT_CLIENT_VERSION, values);
    LOGD("EGLContext created, client version %d", values[0]);

    return true;
}


/**
 * 获取合适的EGLConfig
 * @param flags
 * @param version
 * @return
 */
EGLConfig EglCore::getConfig(int flags, int version) {
    int renderableType = EGL_OPENGL_ES2_BIT;
    if (version >= 3) {
        renderableType |= EGL_OPENGL_ES3_BIT_KHR;
    }
    int attribList[] = {
            EGL_RED_SIZE, 8,
            EGL_GREEN_SIZE, 8,
            EGL_BLUE_SIZE, 8,
            EGL_ALPHA_SIZE, 8,
            //EGL_DEPTH_SIZE, 16,
            //EGL_STENCIL_SIZE, 8,
            EGL_RENDERABLE_TYPE, renderableType,
            EGL_NONE, 0,      // placeholder for recordable [@-3]
            EGL_NONE
    };
    int length = sizeof(attribList) / sizeof(attribList[0]);
    if ((flags & FLAG_RECORDABLE) != 0) {
        attribList[length - 3] = EGL_RECORDABLE_ANDROID;
        attribList[length - 2] = 1;
    }
    EGLConfig configs = NULL;
    int numConfigs;
    if (!eglChooseConfig(mEGLDisplay, attribList, &configs, 1, &numConfigs)) {
        LOGW("unable to find RGB8888 / %d  EGLConfig", version);
        return NULL;
    }
    return configs;
}

/**
 * 释放资源
 */
void EglCore::release() {
    if (mEGLDisplay != EGL_NO_DISPLAY) {
        eglMakeCurrent(mEGLDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
        eglDestroyContext(mEGLDisplay, mEGLContext);
        eglReleaseThread();
        eglTerminate(mEGLDisplay);
    }

    mEGLDisplay = EGL_NO_DISPLAY;
    mEGLContext = EGL_NO_CONTEXT;
    mEGLConfig = NULL;
}

/**
 * 获取EGLContext
 * @return
 */
EGLContext EglCore::getEGLContext() {
    return mEGLContext;
}

/**
 * 销毁EGLSurface
 * @param eglSurface
 */
void EglCore::releaseSurface(EGLSurface eglSurface) {
    eglDestroySurface(mEGLDisplay, eglSurface);
}

/**
 * 创建EGLSurface
 * @param surface
 * @return
 */
EGLSurface EglCore::createWindowSurface(ANativeWindow *surface) {
    assert(surface != NULL);
    if (surface == NULL) {
        LOGE("ANativeWindow is NULL!");
        return NULL;
    }
    int surfaceAttribs[] = {
            EGL_NONE
    };

    ANativeWindow_acquire(surface);
    ANativeWindow_setBuffersGeometry(surface, 0, 0, AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM);

    LOGD("eglCreateWindowSurface start");
    EGLSurface eglSurface = eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface, surfaceAttribs);
    checkEglError("eglCreateWindowSurface");
    assert(eglSurface != NULL);
    if (eglSurface == NULL) {
        LOGE("EGLSurface is NULL!");
        return NULL;
    }
    return eglSurface;
}

/**
 * 创建离屏渲染的EGLSurface
 * @param width
 * @param height
 * @return
 */
EGLSurface EglCore::createOffscreenSurface(int width, int height) {
    int surfaceAttribs[] = {
            EGL_WIDTH, width,
            EGL_HEIGHT, height,
            EGL_NONE
    };
    EGLSurface eglSurface = eglCreatePbufferSurface(mEGLDisplay, mEGLConfig, surfaceAttribs);
    assert(eglSurface != NULL);
    if (eglSurface == NULL) {
        LOGE("Surface was null");
        return NULL;
    }
    return eglSurface;
}

/**
 * 切换到当前的上下文
 * @param eglSurface
 */
void EglCore::makeCurrent(EGLSurface eglSurface) {
    if (mEGLDisplay == EGL_NO_DISPLAY) {
        LOGD("Note: makeCurrent w/o display.\n");
    }
    if (!eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) {
        // TODO 抛出异常
        LOGD("Note: eglMakeCurrent error.\n");
    }
}

/**
 * 切换到某个上下文
 * @param drawSurface
 * @param readSurface
 */
void EglCore::makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) {
    if (mEGLDisplay == EGL_NO_DISPLAY) {
        LOGD("Note: makeCurrent w/o display.\n");
    }
    if (!eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) {
        // TODO 抛出异常
    }
}

/**
 *
 */
void EglCore::makeNothingCurrent() {
    if (!eglMakeCurrent(mEGLDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT)) {
        // TODO 抛出异常
    }
}

/**
 * 交换显示
 * @param eglSurface
 * @return
 */
bool EglCore::swapBuffers(EGLSurface eglSurface) {
    return eglSwapBuffers(mEGLDisplay, eglSurface);
}

/**
 * 设置显示时间戳pts
 * @param eglSurface
 * @param nsecs
 */
void EglCore::setPresentationTime(EGLSurface eglSurface, long nsecs) {
    eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs);
}

/**
 * 是否处于当前上下文
 * @param eglSurface
 * @return
 */
bool EglCore::isCurrent(EGLSurface eglSurface) {
    return mEGLContext == eglGetCurrentContext() &&
           eglSurface == eglGetCurrentSurface(EGL_DRAW);
}

/**
 * 查询surface
 * @param eglSurface
 * @param what
 * @return
 */
int EglCore::querySurface(EGLSurface eglSurface, int what) {
    int value;
    eglQuerySurface(mEGLContext, eglSurface, what, &value);
    return value;
}

/**
 * 查询字符串
 * @param what
 * @return
 */
const char* EglCore::queryString(int what) {
    return eglQueryString(mEGLDisplay, what);
}

/**
 * 获取GLES版本号
 * @return
 */
int EglCore::getGlVersion() {
    return mGlVersion;
}

/**
 * 检查是否出错
 * @param msg
 */
void EglCore::checkEglError(const char *msg) {
    int error;
    if ((error = eglGetError()) != EGL_SUCCESS) {
        // TODO 抛出异常
        LOGE("%s: EGL error: %x", msg, error);
    }
}

EglSurfaceBase.cpp:

维护着Surface的创建显示、EGLContext的切换、交换到前台显示、释放EGLSurface具体操作,代码如下:

cpp 复制代码
//  Author : wangyongyao https://github.com/wangyongyao1989
// Created by MMM on 2024/10/11.
// 来源于google开源项目https://github.com/google/grafika 中的 EglSurfaceBase.java 类的改造。


#include "EglSurfaceBase.h"

#include <assert.h>
#include <GLES2/gl2.h>


EglSurfaceBase::EglSurfaceBase(EglCore *eglCore) : mEglCore(eglCore) {
    mEglSurface = EGL_NO_SURFACE;
}

/**
 * 创建显示的Surface
 * @param nativeWindow
 */
void EglSurfaceBase::createWindowSurface(ANativeWindow *nativeWindow) {
    assert(mEglSurface == EGL_NO_SURFACE);
    if (mEglSurface != EGL_NO_SURFACE) {
        LOGI("surface already created\n");
        return;
    }
    mEglSurface = mEglCore->createWindowSurface(nativeWindow);
}

/**
 * 创建离屏surface
 * @param width
 * @param height
 */
void EglSurfaceBase::createOffscreenSurface(int width, int height) {
    assert(mEglSurface == EGL_NO_SURFACE);
    if (mEglSurface != EGL_NO_SURFACE) {
        LOGE("surface already created\n");
        return;
    }
    mEglSurface = mEglCore->createOffscreenSurface(width, height);
    mWidth = width;
    mHeight = height;
}

/**
 * 获取宽度
 * @return
 */
int EglSurfaceBase::getWidth() {
    if (mWidth < 0) {
        return mEglCore->querySurface(mEglSurface, EGL_WIDTH);
    } else {
        return mWidth;
    }
}

/**
 * 获取高度
 * @return
 */
int EglSurfaceBase::getHeight() {
    if (mHeight < 0) {
        return mEglCore->querySurface(mEglSurface, EGL_HEIGHT);
    } else {
        return mHeight;
    }
}

/**
 * 释放EGLSurface
 */
void EglSurfaceBase::releaseEglSurface() {
    mEglCore->releaseSurface(mEglSurface);
    mEglSurface = EGL_NO_SURFACE;
    mWidth = mHeight = -1;
}

/**
 * 切换到当前EGLContext
 */
void EglSurfaceBase::makeCurrent() {
    mEglCore->makeCurrent(mEglSurface);
}

/**
* Makes our EGL context and surface current for drawing, using the supplied surface
* for reading.
*/
void EglSurfaceBase::makeCurrentReadFrom(EglSurfaceBase readSurface) {
    mEglCore->makeCurrent(mEglSurface,readSurface.mEglSurface);
}

/**
 * 交换到前台显示
 * @return
 */
bool EglSurfaceBase::swapBuffers() {
    bool result = mEglCore->swapBuffers(mEglSurface);
    if (!result) {
        LOGD("WARNING: swapBuffers() failed");
    }
    return result;
}

/**
 * 设置当前时间戳
 * @param nsecs
 */
void EglSurfaceBase::setPresentationTime(long nsecs) {
    mEglCore->setPresentationTime(mEglSurface, nsecs);
}

/**
 * 获取当前像素
 * @return
 */
char* EglSurfaceBase::getCurrentFrame() {
    char *pixels = NULL;
    glReadPixels(0, 0, getWidth(), getHeight(), GL_RGBA, GL_UNSIGNED_BYTE, pixels);
    return pixels;
}

WindowSurface.cpp:

对EglSurfaceBase.cpp的封装,只提供创建Surface、销毁Surface、重建Surface三个方法对外使用,代码如下:

cpp 复制代码
//  Author : wangyongyao https://github.com/wangyongyao1989
// Created by MMM on 2024/10/11.
// 来源于google开源项目 https://github.com/google/grafika WindowSurface.java类的改造。


#include "WindowSurface.h"
#include <assert.h>

WindowSurface::WindowSurface(EglCore *eglCore, ANativeWindow *window, bool releaseSurface)
        : EglSurfaceBase(eglCore) {
    mSurface = window;
    createWindowSurface(mSurface);
    mReleaseSurface = releaseSurface;
}

WindowSurface::WindowSurface(EglCore *eglCore, ANativeWindow *window)
        : EglSurfaceBase(eglCore) {
    createWindowSurface(window);
    mSurface = window;
}

void WindowSurface::release() {
    releaseEglSurface();
    if (mSurface != NULL) {
        ANativeWindow_release(mSurface);
        mSurface = NULL;
    }

}

void WindowSurface::recreate(EglCore *eglCore) {
    assert(mSurface != NULL);
    if (mSurface == NULL) {
        LOGE("not yet implemented ANativeWindow");
        return;
    }
    mEglCore = eglCore;
    createWindowSurface(mSurface);
}

二、GL Render渲染环境:

关于OpenGL相关的基础有兴趣可查阅我的相关博客:LearnOpenGL之入门基础

Looper:

维护一个类handler的消息机制,实现线程间的消息交互。

Looper.h代码如下:

cpp 复制代码
//  Author : wangyongyao https://github.com/wangyongyao1989
// Created by MMM on 2024/10/14.
//

#ifndef MYYFFMPEG_LOOPER_H
#define MYYFFMPEG_LOOPER_H

#include <pthread.h>
#include <sys/types.h>
#include <semaphore.h>
#include "LogUtils.h"

struct LooperMessage {
    int what;
    int arg1;
    int arg2;
    void *obj;
    LooperMessage *next;
    bool quit;
};


class Looper {

public:
    Looper();

    Looper &operator=(const Looper &) = delete;

    Looper(Looper &) = delete;

    virtual ~Looper();

    void postMessage(int what, bool flush = false);
    void postMessage(int what, void *obj, bool flush = false);
    void postMessage(int what, int arg1, int arg2, bool flush = false);
    void postMessage(int what, int arg1, int arg2, void *obj, bool flush = false);

    void quit();

    virtual void handleMessage(LooperMessage *msg);

private:
    void addMessage(LooperMessage *msg, bool flush);

    static void *trampoline(void *p);

    void loop(void);

    LooperMessage *head;
    pthread_t worker;
    sem_t headWriteProtect;
    sem_t headDataAvailable;
    bool running;

};

#endif //MYYFFMPEG_LOOPER_H

Looper.cpp代码如下:

cpp 复制代码
#include "Looper.h"
#include <jni.h>
#include <pthread.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <semaphore.h>


struct LooperMessage;
typedef struct LooperMessage LooperMessage;

void *Looper::trampoline(void *p) {
    ((Looper *) p)->loop();
    return NULL;
}

Looper::Looper() {
    head = NULL;

    sem_init(&headDataAvailable, 0, 0);
    sem_init(&headWriteProtect, 0, 1);
    pthread_attr_t attr;
    pthread_attr_init(&attr);

    pthread_create(&worker, &attr, trampoline, this);
    running = true;
}

Looper::~Looper() {
    if (running) {
        LOGE("Looper deleted while still running. Some messages will not be processed");
        quit();
    }
}

void Looper::postMessage(int what, bool flush) {
    postMessage(what, 0, 0, NULL, flush);
}

void Looper::postMessage(int what, void *obj, bool flush) {
    postMessage(what, 0, 0, obj, flush);
}

void Looper::postMessage(int what, int arg1, int arg2, bool flush) {
    postMessage(what, arg1, arg2, NULL, flush);
}

void Looper::postMessage(int what, int arg1, int arg2, void *obj, bool flush) {
    LooperMessage *msg = new LooperMessage();
    msg->what = what;
    msg->obj = obj;
    msg->arg1 = arg1;
    msg->arg2 = arg2;
    msg->next = NULL;
    msg->quit = false;
    addMessage(msg, flush);
}

void Looper::addMessage(LooperMessage *msg, bool flush) {
    sem_wait(&headWriteProtect);
    LooperMessage *h = head;

    if (flush) {
        while (h) {
            LooperMessage *next = h->next;
            delete h;
            h = next;
        }
        h = NULL;
    }
    if (h) {
        while (h->next) {
            h = h->next;
        }
        h->next = msg;
    } else {
        head = msg;
    }
//    LOGE("Looper::addMessage msg->what=%d", msg->what);
    sem_post(&headWriteProtect);
    sem_post(&headDataAvailable);
}

void Looper::loop() {
    while (true) {
        // wait for available message
        sem_wait(&headDataAvailable);

        // get next available message
        sem_wait(&headWriteProtect);
        LooperMessage *msg = head;
        if (msg == NULL) {
            LOGE("Looper::loop() no msg");
            sem_post(&headWriteProtect);
            continue;
        }
        head = msg->next;
        sem_post(&headWriteProtect);

        if (msg->quit) {
            LOGE("Looper::loop() quitting");
            delete msg;
            return;
        }
//        LOGE("Looper::loop() processing msg.what=%d", msg->what);
        handleMessage(msg);
        delete msg;
    }
}

void Looper::quit() {
    LOGE("Looper::quit()");
    LooperMessage *msg = new LooperMessage();
    msg->what = 0;
    msg->obj = NULL;
    msg->next = NULL;
    msg->quit = true;
    addMessage(msg, false);
    void *retval;
    pthread_join(worker, &retval);
    sem_destroy(&headDataAvailable);
    sem_destroy(&headWriteProtect);
    running = false;
}

void Looper::handleMessage(LooperMessage *msg) {
//    LOGE("Looper::handleMessage [what, obj]=[%d, %p]", msg->what, msg->obj);
}

线程间消息传递:

在渲染的过程中把其他的线程中的操作过程及参数通过Looper消息的方式传递到类似于GLThread子线程(GL运行环境)中。代码如下:

cpp 复制代码
void EGLSurfaceViewVideoRender::handleMessage(LooperMessage *msg) {
    Looper::handleMessage(msg);
    switch (msg->what) {
        case MSG_SurfaceCreated: {
            LOGE("EGLSurfaceViewVideoRender::handleMessage MSG_SurfaceCreated");
            OnSurfaceCreated();
        }
            break;
        case MSG_SurfaceChanged:
            LOGE("EGLSurfaceViewVideoRender::handleMessage MSG_SurfaceChanged");
            OnSurfaceChanged(msg->arg1, msg->arg2);
            break;
        case MSG_DrawFrame:
//            LOGE("EGLSurfaceViewVideoRender::handleMessage MSG_DrawFrame");
            OnDrawFrame();
            break;
        case MSG_SurfaceDestroyed:
            LOGE("EGLSurfaceViewVideoRender::handleMessage MSG_SurfaceDestroyed");
            OnSurfaceDestroyed();
            break;
        default:
            break;
    }
}

OpenGL着色器的编译、程序对象及链接:

顶点着色器代码:

cpp 复制代码
#version 320 es

out vec2 v_texcoord;

in vec4 position;
in vec2 texcoord;

void main() {
    v_texcoord = texcoord;
    gl_Position =  position;
}

片元着色器代码:

cpp 复制代码
#version 320 es

precision mediump float;

in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;

// https://stackoverflow.com/questions/26695253/when-switching-to-glsl-300-met-the-following-error
//The predefined variable gl_FragColor does not exist anymore in GLSL ES 3.00.
//out vec4 gl_FragColor;
out vec4 FragColor;

void main() {
     float y, u, v, r, g, b;
     y = texture(s_textureY, v_texcoord).r;
     u = texture(s_textureU, v_texcoord).r;
     v = texture(s_textureV, v_texcoord).r;
     u = u - 0.5;
     v = v - 0.5;
     r = y + 1.403 * v;
     g = y - 0.344 * u - 0.714 * v;
     b = y + 1.770 * u;
     FragColor = vec4(r, g, b, 1.0f);
//    gl_FragColor = vec4(r, g, b, 1.0);

}

GL的着色器加载、程序编译:

OpenGLShader为着色器加载、程序编译提供必要的方法。

OpenGLShader.h代码:
cpp 复制代码
//
// Created by MMM on 2024/8/8.
//

#ifndef ANDROIDLEARNOPENGL_OPENGLSHADER_H
#define ANDROIDLEARNOPENGL_OPENGLSHADER_H

#include <GLES3/gl3.h>              //导入的包为GL3对应的顶点程序和片元程序为:#version 320 es
#include <GLES3/gl3ext.h>
#include <fstream>
#include <sstream>
#include "iostream"
#include "LogUtils.h"
//glm库
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>


using namespace std;


class OpenGLShader {
private:
    //顶点着色器代码
    const char *gVertexShaderCode = nullptr;
    //片段着色器代码
    const char *gFragmentShaderCode = nullptr;

    string vertexCode;
    string fragmentCode;


    GLuint loadShader(GLenum shaderType, const char *pSource);


    void printGLString(const char *name, GLenum s);

    void checkGlError(const char *op);

public:
    GLuint shaderId;
    GLuint vertexShader;  //顶点着色器
    GLuint fraShader;     //片段着色器
    OpenGLShader();

    ~OpenGLShader();

    bool getSharderPath(const char *vertexPath, const char *fragmentPath);

    bool getSharderStringPath(string vertexPath, string fragmentPath);

    GLuint createProgram();

    void use() const {
        glUseProgram(shaderId);
    }

    // utility uniform functions
    // ------------------------------------------------------------------------
    void setBool(const string &name, bool value) const {
        glUniform1i(glGetUniformLocation(shaderId, name.c_str()), (int) value);
    }

    // ------------------------------------------------------------------------
    void setInt(const string &name, int value) const {
        glUniform1i(glGetUniformLocation(shaderId, name.c_str()), value);
    }

    // ------------------------------------------------------------------------
    void setFloat(const string &name, float value) const {
        glUniform1f(glGetUniformLocation(shaderId, name.c_str()), value);
    }

    // ------------------------------------------------------------------------
    void setVec2(const string &name, const glm::vec2 &value) const {
        glUniform2fv(glGetUniformLocation(shaderId, name.c_str()), 1, &value[0]);
    }

    void setVec2(const string &name, float x, float y) const {
        glUniform2f(glGetUniformLocation(shaderId, name.c_str()), x, y);
    }

    // ------------------------------------------------------------------------
    void setVec3(const string &name, const glm::vec3 &value) const {
        glUniform3fv(glGetUniformLocation(shaderId, name.c_str()), 1, &value[0]);
    }

    void setVec3(const string &name, float x, float y, float z) const {
        glUniform3f(glGetUniformLocation(shaderId, name.c_str()), x, y, z);
    }

    // ------------------------------------------------------------------------
    void setVec4(const string &name, const glm::vec4 &value) const {
        glUniform4fv(glGetUniformLocation(shaderId, name.c_str()), 1, &value[0]);
    }

    void setVec4(const string &name, float x, float y, float z, float w) const {
        glUniform4f(glGetUniformLocation(shaderId, name.c_str()), x, y, z, w);
    }

    // ------------------------------------------------------------------------
    void setMat2(const string &name, const glm::mat2 &mat) const {
        glUniformMatrix2fv(glGetUniformLocation(shaderId, name.c_str()), 1, GL_FALSE, &mat[0][0]);
    }

    // ------------------------------------------------------------------------
    void setMat3(const string &name, const glm::mat3 &mat) const {
        glUniformMatrix3fv(glGetUniformLocation(shaderId, name.c_str()), 1, GL_FALSE, &mat[0][0]);
    }

    // ------------------------------------------------------------------------
    void setMat4(const string &name, const glm::mat4 &mat) const {
        glUniformMatrix4fv(glGetUniformLocation(shaderId, name.c_str()), 1, GL_FALSE, &mat[0][0]);
    }

};


#endif //ANDROIDLEARNOPENGL_OPENGLSHADER_H
OpenGLShader.cpp代码:
cpp 复制代码
//
// Created by MMM on 2024/8/8.
//

#include "OpenGLShader.h"

GLuint
OpenGLShader::createProgram() {
    vertexShader = loadShader(GL_VERTEX_SHADER, gVertexShaderCode);
    LOGI("=====gVertexShaderCode :%s", gVertexShaderCode);
    LOGI("======gFragmentShaderCode :%s", gFragmentShaderCode);
    if (!vertexShader) {
        checkGlError("loadShader GL_VERTEX_SHADER");
        return 0;
    }

    fraShader = loadShader(GL_FRAGMENT_SHADER, gFragmentShaderCode);

    if (!fraShader) {
        checkGlError("loadShader GL_FRAGMENT_SHADER");
        return 0;
    }

    shaderId = glCreateProgram();      //创建一个着色程序对象
    if (shaderId) {
        glAttachShader(shaderId, vertexShader);        //把着色器附加到了程序对象上
        checkGlError("glAttachShader");
        glAttachShader(shaderId, fraShader);
        checkGlError("glAttachShader");
        glLinkProgram(shaderId);   //链接程序对象
        GLint linkStatus = GL_FALSE;
        glGetProgramiv(shaderId, GL_LINK_STATUS, &linkStatus);  //检测链接着色器程序是否失败
        if (linkStatus != GL_TRUE) {
            GLint bufLength = 0;
            glGetProgramiv(shaderId, GL_INFO_LOG_LENGTH, &bufLength);
            if (bufLength) {
                char *buf = (char *) malloc(bufLength);
                if (buf) {
                    glGetProgramInfoLog(shaderId, bufLength, NULL, buf);
                    LOGE("Could not link shaderId:\n%s\n", buf);
                    free(buf);
                }
            }
            glDeleteProgram(shaderId);     //
            shaderId = 0;
        }
    }
    return shaderId;
}

/**
 * 加载着色器
 * @param shaderType
 * @param pSource
 * @return
 */
GLuint OpenGLShader::loadShader(GLenum shaderType, const char *pSource) {
    GLuint shader = glCreateShader(shaderType);     //创建着色器
    if (shader) {
        glShaderSource(shader, 1, &pSource, NULL);  //着色器源码附加到着色器对象上
        glCompileShader(shader);                    //编译着着色器
        GLint compiled = 0;
        glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
        if (!compiled) {
            GLint infoLen = 0;
            glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
            if (infoLen) {
                char *buf = (char *) malloc(infoLen);
                if (buf) {
                    glGetShaderInfoLog(shader, infoLen, NULL, buf);
                    LOGE("Could not compile shader %d:\n%s\n",
                         shaderType, buf);
                    free(buf);
                }
                glDeleteShader(shader);     //删除着色器对象
                shader = 0;
            }
        }
    }
    return shader;
}

bool OpenGLShader::getSharderPath(const char *vertexPath, const char *fragmentPath) {
    ifstream vShaderFile;
    ifstream fShaderFile;
    // ensure ifstream objects can throw exceptions:
    vShaderFile.exceptions(ifstream::failbit | ifstream::badbit);
    fShaderFile.exceptions(ifstream::failbit | ifstream::badbit);
    try {
        // open files
        vShaderFile.open(vertexPath);
        fShaderFile.open(fragmentPath);
        stringstream vShaderStream, fShaderStream;
        // read file's buffer contents into streams
        vShaderStream << vShaderFile.rdbuf();
        fShaderStream << fShaderFile.rdbuf();
        // close file handlers
        vShaderFile.close();
        fShaderFile.close();
        // convert stream into string
        vertexCode = vShaderStream.str();
        fragmentCode = fShaderStream.str();
    }
    catch (ifstream::failure &e) {
        LOGE("Could not getSharderPath error :%s", e.what());
        return false;
    }
    gVertexShaderCode = vertexCode.c_str();
    gFragmentShaderCode = fragmentCode.c_str();

    return true;
}

bool OpenGLShader::getSharderStringPath(string vertexPath, string fragmentPath) {
    ifstream vShaderFile;
    ifstream fShaderFile;
    const char *cVertexPath = vertexPath.c_str();
    const char *cFragmentPath = fragmentPath.c_str();

    // ensure ifstream objects can throw exceptions:
    vShaderFile.exceptions(ifstream::failbit | ifstream::badbit);
    fShaderFile.exceptions(ifstream::failbit | ifstream::badbit);
    try {
        // open files
        vShaderFile.open(vertexPath);
        fShaderFile.open(fragmentPath);
        stringstream vShaderStream, fShaderStream;
        // read file's buffer contents into streams
        vShaderStream << vShaderFile.rdbuf();
        fShaderStream << fShaderFile.rdbuf();
        // close file handlers
        vShaderFile.close();
        fShaderFile.close();
        // convert stream into string
        vertexCode = vShaderStream.str();
        fragmentCode = fShaderStream.str();
    }
    catch (ifstream::failure &e) {
        LOGE("Could not getSharderPath error :%s", e.what());
        return false;
    }
    gVertexShaderCode = vertexCode.c_str();
    gFragmentShaderCode = fragmentCode.c_str();

    return true;
}

void OpenGLShader::printGLString(const char *name, GLenum s) {
    const char *v = (const char *) glGetString(s);
    LOGI("OpenGL %s = %s\n", name, v);
}

void OpenGLShader::checkGlError(const char *op) {
    for (GLint error = glGetError(); error; error = glGetError()) {
        LOGI("after %s() glError (0x%x)\n", op, error);
    }
}

OpenGLShader::~OpenGLShader() {
    if (vertexShader) {
        glDeleteShader(vertexShader);
    }
    if (fraShader) {
        glDeleteShader(fraShader);
    }
    vertexCode.clear();
    fragmentCode.clear();

    gVertexShaderCode = nullptr;
    gFragmentShaderCode = nullptr;
}

OpenGLShader::OpenGLShader() {

}

OpenGL程序创建及Uniform参数的获取:

cpp 复制代码
int
EGLSurfaceViewVideoRender::createProgram() {

    m_program = openGlShader->createProgram();
    m_vertexShader = openGlShader->vertexShader;
    m_pixelShader = openGlShader->fraShader;
    LOGI("EGLSurfaceViewVideoRender createProgram m_program:%d", m_program);

    if (!m_program) {
        LOGE("Could not create program.");
        return 0;
    }

    //Get Uniform Variables Location
    m_vertexPos = (GLuint) glGetAttribLocation(m_program, "position");
    m_textureYLoc = glGetUniformLocation(m_program, "s_textureY");
    m_textureULoc = glGetUniformLocation(m_program, "s_textureU");
    m_textureVLoc = glGetUniformLocation(m_program, "s_textureV");
    m_textureLoc = (GLuint) glGetAttribLocation(m_program, "texcoord");
    m_textureSize = glGetUniformLocation(m_program, "texSize");

    return m_program;
}

纹理Texture的附着:

cpp 复制代码
GLuint EGLSurfaceViewVideoRender::useProgram() {
    if (!m_program && !createProgram()) {
        LOGE("Could not use program.");
        return 0;
    }

    if (isProgramChanged) {
        glUseProgram(m_program);
        glVertexAttribPointer(m_vertexPos, 2, GL_FLOAT, GL_FALSE, 0, EGLVerticek);
        glEnableVertexAttribArray(m_vertexPos);

        glUniform1i(m_textureYLoc, 0);
        glUniform1i(m_textureULoc, 1);
        glUniform1i(m_textureVLoc, 2);
        glVertexAttribPointer(m_textureLoc, 2, GL_FLOAT, GL_FALSE, 0, EGLTextureCoord);
        glEnableVertexAttribArray(m_textureLoc);

        if (m_textureSize >= 0) {
            GLfloat size[2];
            size[0] = m_width;
            size[1] = m_height;
            glUniform2fv(m_textureSize, 1, &size[0]);
        }

        isProgramChanged = false;
    }

    return m_program;
}

纹理Texture创建:

cpp 复制代码
bool EGLSurfaceViewVideoRender::createTextures() {
    auto widthY = (GLsizei) m_width;
    auto heightY = (GLsizei) m_height;

    glActiveTexture(GL_TEXTURE0);
    glGenTextures(1, &m_textureIdY);
    glBindTexture(GL_TEXTURE_2D, m_textureIdY);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widthY, heightY, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
                 nullptr);

    if (!m_textureIdY) {
        LOGE("OpenGL Error Create Y texture");
        return false;
    }

    GLsizei widthU = (GLsizei) m_width / 2;
    GLsizei heightU = (GLsizei) m_height / 2;

    glActiveTexture(GL_TEXTURE1);
    glGenTextures(1, &m_textureIdU);
    glBindTexture(GL_TEXTURE_2D, m_textureIdU);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widthU, heightU, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
                 nullptr);

    if (!m_textureIdU) {
        LOGE("OpenGL Error Create U texture");
        return false;
    }

    GLsizei widthV = (GLsizei) m_width / 2;
    GLsizei heightV = (GLsizei) m_height / 2;

    glActiveTexture(GL_TEXTURE2);
    glGenTextures(1, &m_textureIdV);
    glBindTexture(GL_TEXTURE_2D, m_textureIdV);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widthV, heightV, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
                 nullptr);

    if (!m_textureIdV) {
        LOGE("OpenGL Error Create V texture");
        return false;
    }

    return true;
}

纹理Texture数据更新:

cpp 复制代码
bool EGLSurfaceViewVideoRender::updateTextures() {
    if (!m_textureIdY && !m_textureIdU && !m_textureIdV /*&& !createTextures()*/) return false;
//    LOGE("updateTextures m_textureIdY:%d,m_textureIdU:%d,m_textureIdV:%d,===isDirty:%d",
//         m_textureIdY,
//         m_textureIdU, m_textureIdV, isDirty);

    if (isDirty) {
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, m_textureIdY);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_width, (GLsizei) m_height, 0,
                     GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pDataY.get());

        glActiveTexture(GL_TEXTURE1);
        glBindTexture(GL_TEXTURE_2D, m_textureIdU);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_width / 2, (GLsizei) m_height / 2,
                     0,
                     GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pDataU);

        glActiveTexture(GL_TEXTURE2);
        glBindTexture(GL_TEXTURE_2D, m_textureIdV);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_width / 2, (GLsizei) m_height / 2,
                     0,
                     GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pDataV);

        isDirty = false;

        return true;
    }

    return false;
}

GL渲染环境的集成:

上面已经介绍了渲染环境所需的必要流程,现在把整个渲染的过程封装在一个类中。该类提供了OpenGL的运行环境的创建及维护其内部状态,供外给出surfaceCreated、surfaceChanged、render、updateFrame、release、draw、setSharderStringPath方法使用。

EGLSurfaceViewVideoRender.h代码如下:
cpp 复制代码
//  Author : wangyongyao https://github.com/wangyongyao1989
// Created by MMM on 2024/9/6.
//

#ifndef MYYFFMPEG_EGLSurfaceViewVideoRender_H
#define MYYFFMPEG_EGLSurfaceViewVideoRender_H

#endif //MYYFFMPEG_EGLSurfaceViewVideoRender_H

#include <cstdint>
#include <memory>
#include <android/native_window.h>
#include <android/asset_manager.h>
#include "OpenGLShader.h"
#include <EGL/egl.h>
#include <GLES3/gl3.h>
#include "EglCore.h"
#include "WindowSurface.h"
#include "Looper.h"

enum {
    MSG_SurfaceCreated,
    MSG_SurfaceChanged,
    MSG_DrawFrame,
    MSG_SurfaceDestroyed,
};


struct egl_surface_video_frame {
    size_t width;
    size_t height;
    size_t stride_y;
    size_t stride_uv;
    uint8_t *y;
    uint8_t *u;
    uint8_t *v;
};

// Vertices for a full screen quad.
static const float EGLVerticek[8] = {
        -1.f, 1.f,
        -1.f, -1.f,
        1.f, 1.f,
        1.f, -1.f
};

// Texture coordinates for mapping entire texture.
static const float EGLTextureCoord[8] = {
        0, 0,
        0, 1,
        1, 0,
        1, 1
};

static const size_t BIT_RATE = 4000000;   // 4Mbps
static const size_t VIDEO_WIDTH = 1280;
static const size_t VIDEO_HEIGHT = 720;

class EGLSurfaceViewVideoRender : public Looper {

public:
    EGLSurfaceViewVideoRender();

    ~EGLSurfaceViewVideoRender();

    void surfaceCreated(ANativeWindow *window, AAssetManager *assetManager);

    void surfaceChanged(size_t width, size_t height);

    void render();

    void updateFrame(const egl_surface_video_frame &frame);

    void release();

    void draw(uint8_t *buffer, size_t length, size_t width, size_t height, float rotation);

    void setParameters(uint32_t params);

    uint32_t getParameters();

    bool setSharderPath(const char *vertexPath, const char *fragmentPath);

    bool setSharderStringPath(string vertexPath, string fragmentPath);

    void startEncoder(const char *recordPath);

    void stopEncoder();

private:

    void handleMessage(LooperMessage *msg);

    void OnSurfaceCreated();
    void OnSurfaceChanged(int w, int h);
    void OnDrawFrame();
    void OnSurfaceDestroyed();

    bool CreateFrameBufferObj();

    bool createTextures();

    bool updateTextures();

    void deleteTextures();

    int createProgram();

    GLuint useProgram();

    void printGLString(const char *name, GLenum s);

    void checkGlError(const char *op);


    bool isProgramChanged = true;

    void delete_program(GLuint &program);

    GLuint m_program = 0;

    GLuint m_vertexShader = 0;
    GLuint m_pixelShader = 0;

    std::unique_ptr<uint8_t[]> m_pDataY = nullptr;

    uint8_t *m_pDataU = nullptr;
    uint8_t *m_pDataV = nullptr;

    __unused  size_t m_length = 0;
    size_t m_sizeY = 0;
    size_t m_sizeU = 0;
    size_t m_sizeV = 0;

    GLuint m_textureIdY = 0;
    GLuint m_textureIdU = 0;
    GLuint m_textureIdV = 0;

    GLuint m_vertexPos = 0;
    GLuint m_textureLoc = 0;
    GLint m_textureYLoc = 0;
    GLint m_textureULoc = 0;
    GLint m_textureVLoc = 0;
    GLint m_textureSize = 0;

    size_t m_width = 0;
    size_t m_height = 0;
    size_t m_backingWidth = 0;
    size_t m_backingHeight = 0;
    uint32_t m_params = 0;
    float m_rotation = 0;

    bool isDirty;

    OpenGLShader *openGlShader = nullptr;

    EGLDisplay display = nullptr;
    EGLSurface winsurface = nullptr;

    EglCore *m_EglCore = nullptr;
    WindowSurface *m_WindowSurface = nullptr;

    ANativeWindow *m_ANWindow = nullptr;


    size_t offX;
    size_t offY;
    size_t off_right;
    size_t off_bottom;

};
EGLSurfaceViewVideoRender.cpp代码如下:
cpp 复制代码
//  Author : wangyongyao https://github.com/wangyongyao1989
// Created by MMM on 2024/10/13.
//

#include "EGLSurfaceViewVideoRender.h"
#include "OpenGLShader.h"


void EGLSurfaceViewVideoRender::surfaceCreated(ANativeWindow *window, AAssetManager *assetManager) {
    m_ANWindow = window;
    postMessage(MSG_SurfaceCreated, false);
}

void EGLSurfaceViewVideoRender::surfaceChanged(size_t width, size_t height) {
    postMessage(MSG_SurfaceChanged, width, height);
}

void EGLSurfaceViewVideoRender::render() {
    postMessage(MSG_DrawFrame, false);
}

void EGLSurfaceViewVideoRender::release() {
    postMessage(MSG_SurfaceDestroyed, false);
}

void EGLSurfaceViewVideoRender::updateFrame(const egl_surface_video_frame &frame) {
    m_sizeY = frame.width * frame.height;
    m_sizeU = frame.width * frame.height / 4;
    m_sizeV = frame.width * frame.height / 4;

    if (m_pDataY == nullptr || m_width != frame.width || m_height != frame.height) {
        m_pDataY = std::make_unique<uint8_t[]>(m_sizeY + m_sizeU + m_sizeV);
        m_pDataU = m_pDataY.get() + m_sizeY;
        m_pDataV = m_pDataU + m_sizeU;
        isProgramChanged = true;
    }

    m_width = frame.width;
    m_height = frame.height;

    if (m_width == frame.stride_y) {
        memcpy(m_pDataY.get(), frame.y, m_sizeY);
    } else {
        uint8_t *pSrcY = frame.y;
        uint8_t *pDstY = m_pDataY.get();

        for (int h = 0; h < m_height; h++) {
            memcpy(pDstY, pSrcY, m_width);

            pSrcY += frame.stride_y;
            pDstY += m_width;
        }
    }

    if (m_width / 2 == frame.stride_uv) {
        memcpy(m_pDataU, frame.u, m_sizeU);
        memcpy(m_pDataV, frame.v, m_sizeV);
    } else {
        uint8_t *pSrcU = frame.u;
        uint8_t *pSrcV = frame.v;
        uint8_t *pDstU = m_pDataU;
        uint8_t *pDstV = m_pDataV;

        for (int h = 0; h < m_height / 2; h++) {
            memcpy(pDstU, pSrcU, m_width / 2);
            memcpy(pDstV, pSrcV, m_width / 2);

            pDstU += m_width / 2;
            pDstV += m_width / 2;

            pSrcU += frame.stride_uv;
            pSrcV += frame.stride_uv;
        }
    }

    isDirty = true;
}

void
EGLSurfaceViewVideoRender::draw(uint8_t *buffer, size_t length, size_t width, size_t height,
                                float rotation) {
    egl_surface_video_frame frame{};
    frame.width = width;
    frame.height = height;
    frame.stride_y = width;
    frame.stride_uv = width / 2;
    frame.y = buffer;
    frame.u = buffer + width * height;
    frame.v = buffer + width * height * 5 / 4;

    updateFrame(frame);
}

void EGLSurfaceViewVideoRender::setParameters(uint32_t params) {
    m_params = params;
}

uint32_t EGLSurfaceViewVideoRender::getParameters() {
    return m_params;
}

bool EGLSurfaceViewVideoRender::createTextures() {
    auto widthY = (GLsizei) m_width;
    auto heightY = (GLsizei) m_height;

    glActiveTexture(GL_TEXTURE0);
    glGenTextures(1, &m_textureIdY);
    glBindTexture(GL_TEXTURE_2D, m_textureIdY);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widthY, heightY, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
                 nullptr);

    if (!m_textureIdY) {
        LOGE("OpenGL Error Create Y texture");
        return false;
    }

    GLsizei widthU = (GLsizei) m_width / 2;
    GLsizei heightU = (GLsizei) m_height / 2;

    glActiveTexture(GL_TEXTURE1);
    glGenTextures(1, &m_textureIdU);
    glBindTexture(GL_TEXTURE_2D, m_textureIdU);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widthU, heightU, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
                 nullptr);

    if (!m_textureIdU) {
        LOGE("OpenGL Error Create U texture");
        return false;
    }

    GLsizei widthV = (GLsizei) m_width / 2;
    GLsizei heightV = (GLsizei) m_height / 2;

    glActiveTexture(GL_TEXTURE2);
    glGenTextures(1, &m_textureIdV);
    glBindTexture(GL_TEXTURE_2D, m_textureIdV);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widthV, heightV, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
                 nullptr);

    if (!m_textureIdV) {
        LOGE("OpenGL Error Create V texture");
        return false;
    }

    return true;
}

bool EGLSurfaceViewVideoRender::updateTextures() {
    if (!m_textureIdY && !m_textureIdU && !m_textureIdV /*&& !createTextures()*/) return false;
//    LOGE("updateTextures m_textureIdY:%d,m_textureIdU:%d,m_textureIdV:%d,===isDirty:%d",
//         m_textureIdY,
//         m_textureIdU, m_textureIdV, isDirty);

    if (isDirty) {
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, m_textureIdY);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_width, (GLsizei) m_height, 0,
                     GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pDataY.get());

        glActiveTexture(GL_TEXTURE1);
        glBindTexture(GL_TEXTURE_2D, m_textureIdU);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_width / 2, (GLsizei) m_height / 2,
                     0,
                     GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pDataU);

        glActiveTexture(GL_TEXTURE2);
        glBindTexture(GL_TEXTURE_2D, m_textureIdV);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_width / 2, (GLsizei) m_height / 2,
                     0,
                     GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pDataV);

        isDirty = false;

        return true;
    }

    return false;
}

int
EGLSurfaceViewVideoRender::createProgram() {

    m_program = openGlShader->createProgram();
    m_vertexShader = openGlShader->vertexShader;
    m_pixelShader = openGlShader->fraShader;
    LOGI("EGLSurfaceViewVideoRender createProgram m_program:%d", m_program);

    if (!m_program) {
        LOGE("Could not create program.");
        return 0;
    }

    //Get Uniform Variables Location
    m_vertexPos = (GLuint) glGetAttribLocation(m_program, "position");
    m_textureYLoc = glGetUniformLocation(m_program, "s_textureY");
    m_textureULoc = glGetUniformLocation(m_program, "s_textureU");
    m_textureVLoc = glGetUniformLocation(m_program, "s_textureV");
    m_textureLoc = (GLuint) glGetAttribLocation(m_program, "texcoord");
    m_textureSize = glGetUniformLocation(m_program, "texSize");

    return m_program;
}

GLuint EGLSurfaceViewVideoRender::useProgram() {
    if (!m_program && !createProgram()) {
        LOGE("Could not use program.");
        return 0;
    }

    if (isProgramChanged) {
        glUseProgram(m_program);
        glVertexAttribPointer(m_vertexPos, 2, GL_FLOAT, GL_FALSE, 0, EGLVerticek);
        glEnableVertexAttribArray(m_vertexPos);

        glUniform1i(m_textureYLoc, 0);
        glUniform1i(m_textureULoc, 1);
        glUniform1i(m_textureVLoc, 2);
        glVertexAttribPointer(m_textureLoc, 2, GL_FLOAT, GL_FALSE, 0, EGLTextureCoord);
        glEnableVertexAttribArray(m_textureLoc);

        if (m_textureSize >= 0) {
            GLfloat size[2];
            size[0] = m_width;
            size[1] = m_height;
            glUniform2fv(m_textureSize, 1, &size[0]);
        }

        isProgramChanged = false;
    }

    return m_program;
}

bool
EGLSurfaceViewVideoRender::setSharderPath(const char *vertexPath, const char *fragmentPath) {
    openGlShader->getSharderPath(vertexPath, fragmentPath);
    return 0;
}

bool EGLSurfaceViewVideoRender::setSharderStringPath(string vertexPath, string fragmentPath) {
    openGlShader->getSharderStringPath(vertexPath, fragmentPath);
    return 0;
}

EGLSurfaceViewVideoRender::EGLSurfaceViewVideoRender() {
    openGlShader = new OpenGLShader();
}

EGLSurfaceViewVideoRender::~EGLSurfaceViewVideoRender() {
    deleteTextures();
    delete_program(m_program);
    m_vertexShader = 0;
    m_pixelShader = 0;
    if (m_pDataY) {
        m_pDataY = nullptr;
    }
    if (m_pDataU) {
        delete m_pDataU;
        m_pDataU = nullptr;
    }
    if (m_pDataV) {
        delete m_pDataV;
        m_pDataV = nullptr;
    }

    if (openGlShader) {
        delete openGlShader;
        openGlShader = nullptr;
    }

    if (display) {
        display = nullptr;
    }

    if (winsurface) {
        winsurface = nullptr;
    }

    if (m_EglCore) {
        delete m_EglCore;
        m_EglCore = nullptr;
    }

    if (m_WindowSurface) {
        delete m_WindowSurface;
        m_WindowSurface = nullptr;
    }
    quit();
}

void EGLSurfaceViewVideoRender::delete_program(GLuint &program) {
    if (program) {
        glUseProgram(0);
        glDeleteProgram(program);
        program = 0;
    }
}

void EGLSurfaceViewVideoRender::deleteTextures() {
    if (m_textureIdY) {
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, 0);
        glDeleteTextures(1, &m_textureIdY);

        m_textureIdY = 0;
    }

    if (m_textureIdU) {
        glActiveTexture(GL_TEXTURE1);
        glBindTexture(GL_TEXTURE_2D, 0);
        glDeleteTextures(1, &m_textureIdU);

        m_textureIdU = 0;
    }

    if (m_textureIdV) {
        glActiveTexture(GL_TEXTURE2);
        glBindTexture(GL_TEXTURE_2D, 0);
        glDeleteTextures(1, &m_textureIdV);

        m_textureIdV = 0;
    }
}


void EGLSurfaceViewVideoRender::handleMessage(LooperMessage *msg) {
    Looper::handleMessage(msg);
    switch (msg->what) {
        case MSG_SurfaceCreated: {
            LOGE("EGLSurfaceViewVideoRender::handleMessage MSG_SurfaceCreated");
            OnSurfaceCreated();
        }
            break;
        case MSG_SurfaceChanged:
            LOGE("EGLSurfaceViewVideoRender::handleMessage MSG_SurfaceChanged");
            OnSurfaceChanged(msg->arg1, msg->arg2);
            break;
        case MSG_DrawFrame:
//            LOGE("EGLSurfaceViewVideoRender::handleMessage MSG_DrawFrame");
            OnDrawFrame();
            break;
        case MSG_SurfaceDestroyed:
            LOGE("EGLSurfaceViewVideoRender::handleMessage MSG_SurfaceDestroyed");
            OnSurfaceDestroyed();
            break;
        default:
            break;
    }
}

void EGLSurfaceViewVideoRender::OnSurfaceCreated() {
    m_EglCore = new EglCore(eglGetCurrentContext(), FLAG_RECORDABLE);
    if (!m_EglCore) {
        LOGE("new EglCore failed!");
        return;
    }

    LOGE("OnSurfaceCreated m_ANWindow:%p", m_ANWindow);

    m_WindowSurface = new WindowSurface(m_EglCore, m_ANWindow);
    if (!m_EglCore) {
        LOGE("new WindowSurface failed!");
        return;
    }
    m_WindowSurface->makeCurrent();
}

void EGLSurfaceViewVideoRender::OnSurfaceChanged(int w, int h) {
    m_backingWidth = w;
    m_backingHeight = h;
    LOGE("OnSurfaceChanged m_backingWidth:%d,m_backingHeight:%d", m_backingWidth, m_backingHeight);
    float windowAspect = (float) m_backingHeight / (float) m_backingWidth;
    size_t outWidth, outHeight;
    if (VIDEO_HEIGHT > VIDEO_WIDTH * windowAspect) {
        // limited by narrow width; reduce height
        outWidth = VIDEO_WIDTH;
        outHeight = (int) (VIDEO_WIDTH * windowAspect);
    } else {
        // limited by short height; restrict width
        outHeight = VIDEO_HEIGHT;
        outWidth = (int) (VIDEO_HEIGHT / windowAspect);
    }
    LOGE(" outWidth:%d,outHeight:%d", outWidth, outHeight);

    offX = (VIDEO_WIDTH - outWidth) / 2;
    offY = (VIDEO_HEIGHT - outHeight) / 2;
    off_right = offX + outWidth;
    off_bottom = offY + outHeight;
    //Adjusting window 1920x1104 to +14,+0 1252x720
    LOGE("Adjusting window offX:%d,offY:%d,off_right:%d,off_bottom:%d", offX, offY, off_right,
         off_bottom);
    useProgram();
    createTextures();
}

void EGLSurfaceViewVideoRender::OnDrawFrame() {
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
    if (!updateTextures() || !useProgram()) return;

    //窗口显示
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

    LOGE("OnDrawFrame thread:%ld", pthread_self());

    //切换到m_WindowSurface
    m_WindowSurface->makeCurrent();
    m_WindowSurface->swapBuffers();

}

void EGLSurfaceViewVideoRender::OnSurfaceDestroyed() {
    m_vertexShader = 0;
    m_pixelShader = 0;

    if (m_pDataY) {
        m_pDataY = nullptr;
    }

    if (m_pDataU) {
        m_pDataU = nullptr;
    }

    if (m_pDataV) {
        m_pDataV = nullptr;
    }

    if (openGlShader) {
        delete openGlShader;
        openGlShader = nullptr;
    }

    if (display) {
        display = nullptr;
    }

    if (winsurface) {
        winsurface = nullptr;
    }

    if (m_EglCore) {
        delete m_EglCore;
        m_EglCore = nullptr;
    }

    if (m_WindowSurface) {
        delete m_WindowSurface;
        m_WindowSurface = nullptr;
    }

    quit();

}

void EGLSurfaceViewVideoRender::printGLString(const char *name, GLenum s) {
    const char *v = (const char *) glGetString(s);
    LOGI("OpenGL %s = %s\n", name, v);
}

void EGLSurfaceViewVideoRender::checkGlError(const char *op) {
    for (GLint error = glGetError(); error; error = glGetError()) {
        LOGI("after %s() glError (0x%x)\n", op, error);
    }
}

三、FFmpeg解码视频数据OpenGL Texture播放:

承接上一篇文章 FFmpeg解码视频数据ANativeWindow播放 中的FFmpeg解码及渲染分开两个线程的思路,把OpenGL Texture的初始化及对获取到队列中的AVFrame进行渲染的过程进行替换平替。

initEGLRender()替换原有的initANativeWindow(),sendFrameDataToEGL()替换原因的sendFrameDataToANativeWindow(),不用改变其他的流程来实现。

FFmpeg初始化:

代码如下:

cpp 复制代码
bool FFGLPlayer::initFFmpeg(const std::string &filePath) {
    if (avformat_open_input(&mFormatContext, filePath.c_str(), nullptr, nullptr) != 0) {
        LOGE("Could not open file: %s", filePath.c_str());
        return false;
    }

    if (avformat_find_stream_info(mFormatContext, nullptr) < 0) {
        LOGE("Could not find stream information");
        avformat_close_input(&mFormatContext);
        return false;
    }

    mVideoStreamIndex = -1;
    for (unsigned int i = 0; i < mFormatContext->nb_streams; i++) {
        if (mFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            mVideoStreamIndex = i;
            break;
        }
    }

    if (mVideoStreamIndex == -1) {
        LOGE("Could not find video stream");
        avformat_close_input(&mFormatContext);
        return false;
    }

    AVCodecParameters *codecParams = mFormatContext->streams[mVideoStreamIndex]->codecpar;
    const AVCodec *codec = avcodec_find_decoder(codecParams->codec_id);
    if (!codec) {
        LOGE("Unsupported codec");
        avformat_close_input(&mFormatContext);
        return false;
    }

    mCodecContext = avcodec_alloc_context3(codec);
    if (!mCodecContext) {
        LOGE("Could not allocate codec context");
        avformat_close_input(&mFormatContext);
        return false;
    }

    if (avcodec_parameters_to_context(mCodecContext, codecParams) < 0) {
        LOGE("Could not copy codec parameters");
        cleanupFFmpeg();
        return false;
    }

    if (avcodec_open2(mCodecContext, codec, nullptr) < 0) {
        LOGE("Could not open codec");
        cleanupFFmpeg();
        return false;
    }

    mWidth = mCodecContext->width;
    mHeight = mCodecContext->height;
    mSampleFormat = mCodecContext->sample_fmt;
    mDuration = mFormatContext->duration;

    LOGI("GLPlay FFmpeg initialized width: %d, height: %d, duration: %lld",
         mWidth, mHeight, mDuration);

    playMediaInfo = "FFmpeg initialized, width:" + std::to_string(mWidth) +
                    ", height:" + std::to_string(mHeight) +
                    ", duration:" + std::to_string(mDuration) + "\n";
    PostStatusMessage(playMediaInfo.c_str());

    return true;
}

GL环境的初始化:

代码如下:

cpp 复制代码
bool FFGLPlayer::initEGLRender(const string &fragPath, const string &vertexPath) {
    mNativeWindow = ANativeWindow_fromSurface(mEnv, androidSurface);
    if (!mNativeWindow) {
        LOGE("Couldn't get native window from surface");
        return false;
    }
    eglsurfaceViewRender->surfaceCreated(mNativeWindow, nullptr);
    eglsurfaceViewRender->setSharderStringPath(vertexPath, fragPath);
    eglsurfaceViewRender->surfaceChanged(mWidth, mHeight);
    return true;
}

解码线程:

代码如下:

cpp 复制代码
void FFGLPlayer::decodeThread() {
    AVPacket packet;
    AVFrame *frame = av_frame_alloc();
    int ret;

    if (!frame) {
        LOGE("Could not allocate frame");
        return;
    }

    LOGI("Decode thread started");
    PostStatusMessage("Decode thread started");

    while (!mStopRequested && mIsPlaying) {
        pthread_mutex_lock(&mDecodeMutex);
        // 当队列达到最大时,解码等待。
        while (videoFrameQueue.size() >= maxVideoFrames && !mStopRequested && mIsPlaying) {
            LOGD("Waiting for buffer slot, queued: %zu", videoFrameQueue.size());
            playMediaInfo =
                    "Waiting for buffer slot, queued:" + to_string(videoFrameQueue.size()) + " \n";
            PostStatusMessage(playMediaInfo.c_str());
            pthread_cond_wait(&mBufferMaxCond, &mDecodeMutex);
        }

        if (mStopRequested || !mIsPlaying) {
            pthread_mutex_unlock(&mDecodeMutex);
            break;
        }

        ret = av_read_frame(mFormatContext, &packet);
        if (ret < 0) {
            pthread_mutex_unlock(&mDecodeMutex);

            if (ret == AVERROR_EOF) {
                LOGI("End of file reached");
                break;
            } else {
                LOGE("Error reading frame: %d", ret);
                usleep(10000);
                continue;
            }
        }

        if (packet.stream_index == mVideoStreamIndex) {
            ret = avcodec_send_packet(mCodecContext, &packet);
            if (ret < 0) {
                LOGE("Error sending packet to decoder: %d", ret);
                av_packet_unref(&packet);
                pthread_mutex_unlock(&mDecodeMutex);
                continue;
            }

            while (avcodec_receive_frame(mCodecContext, frame) == 0) {
                AVFrame *frameCopy = av_frame_alloc();
                if (!frameCopy) {
                    LOGE("Could not allocate frame copy");
                    continue;
                }
                if (av_frame_ref(frameCopy, frame) >= 0) {
                    videoFrameQueue.push(frameCopy);
                    pthread_cond_signal(&mRenderCond);
                } else {
                    av_frame_free(&frameCopy);
                    pthread_mutex_unlock(&mDecodeMutex);
                }
            }
        }

        av_packet_unref(&packet);
        pthread_mutex_unlock(&mDecodeMutex);
    }

    av_frame_free(&frame);
    LOGI("Decode thread finished");
}

渲染线程:

代码如下:

cpp 复制代码
void FFGLPlayer::renderVideoThread() {
    LOGI("Render thread started");
    PostStatusMessage("Render thread started \n");

    AVRational timeBase = mFormatContext->streams[mVideoStreamIndex]->time_base;
    int64_t lastPts = AV_NOPTS_VALUE;

    while (!mStopRequested && mIsPlaying) {
        pthread_mutex_lock(&mRenderMutex);

        while (videoFrameQueue.empty() && !mStopRequested && mIsPlaying) {
            pthread_cond_wait(&mRenderCond, &mRenderMutex);
        }

        if (mStopRequested || !mIsPlaying) {
            pthread_mutex_unlock(&mRenderMutex);
            break;
        }

        if (!videoFrameQueue.empty()) {
            std::shared_ptr<AVFrame *> framePtr = videoFrameQueue.pop();
            AVFrame *frame = *framePtr;
            pthread_mutex_unlock(&mRenderMutex);

            // 基于时间戳的帧率控制
            if (lastPts != AV_NOPTS_VALUE && frame->pts != AV_NOPTS_VALUE) {
                int64_t ptsDiff = frame->pts - lastPts;
                double timeDiff = av_q2d(timeBase) * ptsDiff * 1000000; // 转换为微秒
                if (timeDiff > 0 && timeDiff < 1000000) { // 合理的帧间隔
                    usleep(static_cast<useconds_t>(timeDiff));
                }
            }
            lastPts = frame->pts;
            sendFrameDataToEGL(frame);
            // 通知解码线程
            if (videoFrameQueue.size() < maxVideoFrames / 2) {
                pthread_cond_signal(&mBufferMaxCond);
            }
        } else {
            pthread_mutex_unlock(&mRenderMutex);
        }
    }

    LOGI("Render thread finished");
}

sendFrameDataToEGL方法:

这里要做一个数据转换把AVFrame转换成 EGLSurfaceViewVideoRender渲染时draw()方法所需要的uint8_t *buffer数据。

数据转换过程如下:

cpp 复制代码
int FFGLPlayer::yuv420p_frame_to_buffer(AVFrame *frame, uint8_t **buffer, int *length) {
    if (!frame || frame->format != AV_PIX_FMT_YUV420P) return -1;
    int width = frame->width;
    int height = frame->height;
    int y_size = width * height;
    int uv_size = y_size / 4;
    *length = y_size + uv_size * 2;
    *buffer = (uint8_t *) av_malloc(*length);
    if (!*buffer) return -1;
    uint8_t *dst = *buffer;

    // 复制Y平面
    for (int y = 0; y < height; y++) {
        memcpy(dst, frame->data[0] + y * frame->linesize[0], width);
        dst += width;
    }

    // 复制U平面
    for (int y = 0; y < height / 2; y++) {
        memcpy(dst, frame->data[1] + y * frame->linesize[1], width / 2);
        dst += width / 2;
    }

    // 复制V平面
    for (int y = 0; y < height / 2; y++) {
        memcpy(dst, frame->data[2] + y * frame->linesize[2], width / 2);
        dst += width / 2;
    }

    return 0;
}

sendFrameDataToEGL()方法代码如下:

cpp 复制代码
int FFGLPlayer::sendFrameDataToEGL(AVFrame *frame) {
    if (!mNativeWindow || !frame) {
        return -1;
    }
    LOGI("sendFrameDataToEGL");

    uint8_t *buffer;
    int length;
    yuv420p_frame_to_buffer(frame, &buffer, &length);
    eglsurfaceViewRender->draw(buffer, length, mWidth, mHeight, 90);
    eglsurfaceViewRender->render();
    return 0;
}

FFGLPlayer完整代码:

FFGLPlayer.h代码:

cpp 复制代码
//  Author : wangyongyao https://github.com/wangyongyao1989
// Created by MMM on 2025/12/6.
//

#ifndef FFMPEGPRACTICE_FFGLPLAYER_H
#define FFMPEGPRACTICE_FFGLPLAYER_H

#include <pthread.h>
#include <atomic>
#include <string>
#include "ThreadSafeQueue.h"
#include "BasicCommon.h"
#include "jni.h"
#include "android/native_window.h"
#include "android/native_window_jni.h"
#include "EGLSurfaceViewVideoRender.h"

class FFGLPlayer {
public:
    FFGLPlayer(JNIEnv *env, jobject thiz);

    ~FFGLPlayer();

    // 初始化播放器
    bool
    init(const string &filePath, const string &fragPath, const string &vertexPath, jobject surface);

    // 开始播放
    bool start();

    // 暂停播放
    bool pause();

    // 停止播放
    void stop();

    // 获取播放状态
    bool isPlaying() const { return mIsPlaying; }

    bool isInitialized() const { return mInitialized; }


private:
    string playMediaInfo;

    JavaVM *mJavaVm = nullptr;
    jobject mJavaObj = nullptr;
    JNIEnv *mEnv = nullptr;

    int mWidth;
    int mHeight;
    uint8_t *mOutbuffer;
    jobject androidSurface = NULL;

    //  NativeWindow;
    ANativeWindow *mNativeWindow = nullptr;

    // EGLRender
    EGLSurfaceViewVideoRender *eglsurfaceViewRender;

    // FFmpeg 相关
    AVFormatContext *mFormatContext;
    AVCodecContext *mCodecContext;
    int mVideoStreamIndex;

    int64_t mDuration;
    AVSampleFormat mSampleFormat;

    // 播放状态
    std::atomic<bool> mIsPlaying;
    std::atomic<bool> mInitialized;
    std::atomic<bool> mStopRequested;

    // 视频帧队列
    ThreadSafeQueue<AVFrame *> videoFrameQueue;

    int maxVideoFrames = 50;

    // 线程同步
    pthread_t mDecodeThread;
    pthread_t mRenderThread;
    pthread_mutex_t mDecodeMutex;
    pthread_cond_t mBufferMaxCond;
    pthread_cond_t mRenderCond;
    pthread_mutex_t mRenderMutex;

    // 私有方法
    bool initFFmpeg(const std::string &filePath);

    bool initEGLRender(const string &fragPath, const string &vertexPath);

    static void *decodeThreadWrapper(void *context);

    void decodeThread();

    static void *renderVideoFrame(void *context);

    void renderVideoThread();

    int sendFrameDataToEGL(AVFrame *frame);

    int yuv420p_frame_to_buffer(AVFrame *frame, uint8_t **buffer, int *length);

    void cleanup();

    void cleanupFFmpeg();

    JNIEnv *GetJNIEnv(bool *isAttach);

    void PostStatusMessage(const char *msg);
};


#endif //FFMPEGPRACTICE_FFGLPLAYER_H

FFGLPlayer.cpp代码:

cpp 复制代码
//  Author : wangyongyao https://github.com/wangyongyao1989
// Created by MMM on 2025/12/6.
//

#include "FFGLPlayer.h"
#include <unistd.h>

FFGLPlayer::FFGLPlayer(JNIEnv *env, jobject thiz)
        : mEnv(nullptr), mJavaObj(nullptr), mFormatContext(nullptr),
          mCodecContext(nullptr), mVideoStreamIndex(-1), mDuration(0),
          mSampleFormat(AV_SAMPLE_FMT_NONE), mWidth(0), mHeight(0),
          mIsPlaying(false), mInitialized(false), mStopRequested(false),
          mNativeWindow(nullptr),
          mOutbuffer(nullptr), mDecodeThread(0), mRenderThread(0) {

    mEnv = env;
    env->GetJavaVM(&mJavaVm);
    mJavaObj = env->NewGlobalRef(thiz);

    eglsurfaceViewRender = new EGLSurfaceViewVideoRender();

    pthread_mutex_init(&mDecodeMutex, nullptr);
    pthread_mutex_init(&mRenderMutex, nullptr);
    pthread_cond_init(&mBufferMaxCond, nullptr);
    pthread_cond_init(&mRenderCond, nullptr);
}

FFGLPlayer::~FFGLPlayer() {
    stop();
    cleanup();

    pthread_mutex_destroy(&mDecodeMutex);
    pthread_mutex_destroy(&mRenderMutex);
    pthread_cond_destroy(&mBufferMaxCond);
    pthread_cond_destroy(&mRenderCond);

    if (androidSurface) {
        mEnv->DeleteLocalRef(androidSurface);
    }
    if (mNativeWindow) {
        ANativeWindow_release(mNativeWindow);
        mNativeWindow = nullptr;
    }

    if (mOutbuffer) {
        av_free(mOutbuffer);
        mOutbuffer = nullptr;
    }

    if (eglsurfaceViewRender) {
        eglsurfaceViewRender = nullptr;
    }

    mEnv->DeleteGlobalRef(mJavaObj);
}

bool FFGLPlayer::init(const string &filePath, const string &fragPath, const string &vertexPath,
                      jobject surface) {
    if (mInitialized) {
        LOGI("Already initialized");
        return true;
    }

    androidSurface = mEnv->NewGlobalRef(surface);

    if (!initFFmpeg(filePath)) {
        LOGE("Failed to initialize FFmpeg");
        PostStatusMessage("Failed to initialize FFmpeg");
        return false;
    }

    if (!initEGLRender(fragPath, vertexPath)) {
        LOGE("Failed to initialize initEGLRender");
        PostStatusMessage("Failed to initialize initEGLRender");
        cleanupFFmpeg();
        return false;
    }

    mInitialized = true;
    LOGI("FFGLPlayer initialized successfully");
    PostStatusMessage("FFGLPlayer initialized successfully");
    return true;
}

bool FFGLPlayer::initFFmpeg(const std::string &filePath) {
    if (avformat_open_input(&mFormatContext, filePath.c_str(), nullptr, nullptr) != 0) {
        LOGE("Could not open file: %s", filePath.c_str());
        return false;
    }

    if (avformat_find_stream_info(mFormatContext, nullptr) < 0) {
        LOGE("Could not find stream information");
        avformat_close_input(&mFormatContext);
        return false;
    }

    mVideoStreamIndex = -1;
    for (unsigned int i = 0; i < mFormatContext->nb_streams; i++) {
        if (mFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            mVideoStreamIndex = i;
            break;
        }
    }

    if (mVideoStreamIndex == -1) {
        LOGE("Could not find video stream");
        avformat_close_input(&mFormatContext);
        return false;
    }

    AVCodecParameters *codecParams = mFormatContext->streams[mVideoStreamIndex]->codecpar;
    const AVCodec *codec = avcodec_find_decoder(codecParams->codec_id);
    if (!codec) {
        LOGE("Unsupported codec");
        avformat_close_input(&mFormatContext);
        return false;
    }

    mCodecContext = avcodec_alloc_context3(codec);
    if (!mCodecContext) {
        LOGE("Could not allocate codec context");
        avformat_close_input(&mFormatContext);
        return false;
    }

    if (avcodec_parameters_to_context(mCodecContext, codecParams) < 0) {
        LOGE("Could not copy codec parameters");
        cleanupFFmpeg();
        return false;
    }

    if (avcodec_open2(mCodecContext, codec, nullptr) < 0) {
        LOGE("Could not open codec");
        cleanupFFmpeg();
        return false;
    }

    mWidth = mCodecContext->width;
    mHeight = mCodecContext->height;
    mSampleFormat = mCodecContext->sample_fmt;
    mDuration = mFormatContext->duration;

    LOGI("GLPlay FFmpeg initialized width: %d, height: %d, duration: %lld",
         mWidth, mHeight, mDuration);

    playMediaInfo = "FFmpeg initialized, width:" + std::to_string(mWidth) +
                    ", height:" + std::to_string(mHeight) +
                    ", duration:" + std::to_string(mDuration) + "\n";
    PostStatusMessage(playMediaInfo.c_str());

    return true;
}

bool FFGLPlayer::initEGLRender(const string &fragPath, const string &vertexPath) {
    mNativeWindow = ANativeWindow_fromSurface(mEnv, androidSurface);
    if (!mNativeWindow) {
        LOGE("Couldn't get native window from surface");
        return false;
    }
    eglsurfaceViewRender->surfaceCreated(mNativeWindow, nullptr);
    eglsurfaceViewRender->setSharderStringPath(vertexPath, fragPath);
    eglsurfaceViewRender->surfaceChanged(mWidth, mHeight);
    return true;
}


bool FFGLPlayer::start() {
    if (!mInitialized) {
        LOGE("Player not initialized");
        PostStatusMessage("Player not initialized \n");
        return false;
    }

    pthread_mutex_lock(&mDecodeMutex);

    if (mIsPlaying) {
        return true;
    }

    mStopRequested = false;
    mIsPlaying = true;
    videoFrameQueue.clear();

    if (pthread_create(&mDecodeThread, nullptr, decodeThreadWrapper, this) != 0) {
        LOGE("Failed to create decode thread");
        PostStatusMessage("Failed to create decode thread");
        mIsPlaying = false;
        return false;
    }

    if (pthread_create(&mRenderThread, nullptr, renderVideoFrame, this) != 0) {
        LOGE("Failed to create render thread");
        PostStatusMessage("Failed to create render thread");
        mIsPlaying = false;
        mStopRequested = true;
        pthread_join(mDecodeThread, nullptr);
        mDecodeThread = 0;
        return false;
    }

    pthread_mutex_unlock(&mDecodeMutex);

    LOGI("Playback started");
    PostStatusMessage("Playback started");
    return true;
}

void FFGLPlayer::stop() {
    if (!mInitialized) {
        return;
    }

    mStopRequested = true;
    mIsPlaying = false;

    // 通知所有等待的线程
    pthread_cond_broadcast(&mBufferMaxCond);
    pthread_cond_broadcast(&mRenderCond);

    // 等待解码线程结束
    if (mDecodeThread) {
        pthread_join(mDecodeThread, nullptr);
        mDecodeThread = 0;
    }


    // 等待解码线程结束
    if (mRenderThread) {
        pthread_join(mRenderThread, nullptr);
        mRenderThread = 0;
    }

    videoFrameQueue.clear();

    LOGI("Playback stopped");
    PostStatusMessage("Playback stopped");
}

void *FFGLPlayer::decodeThreadWrapper(void *context) {
    FFGLPlayer *player = static_cast<FFGLPlayer *>(context);
    player->decodeThread();
    return nullptr;
}

void FFGLPlayer::decodeThread() {
    AVPacket packet;
    AVFrame *frame = av_frame_alloc();
    int ret;

    if (!frame) {
        LOGE("Could not allocate frame");
        return;
    }

    LOGI("Decode thread started");
    PostStatusMessage("Decode thread started");

    while (!mStopRequested && mIsPlaying) {
        pthread_mutex_lock(&mDecodeMutex);
        // 当队列达到最大时,解码等待。
        while (videoFrameQueue.size() >= maxVideoFrames && !mStopRequested && mIsPlaying) {
            LOGD("Waiting for buffer slot, queued: %zu", videoFrameQueue.size());
            playMediaInfo =
                    "Waiting for buffer slot, queued:" + to_string(videoFrameQueue.size()) + " \n";
            PostStatusMessage(playMediaInfo.c_str());
            pthread_cond_wait(&mBufferMaxCond, &mDecodeMutex);
        }

        if (mStopRequested || !mIsPlaying) {
            pthread_mutex_unlock(&mDecodeMutex);
            break;
        }

        ret = av_read_frame(mFormatContext, &packet);
        if (ret < 0) {
            pthread_mutex_unlock(&mDecodeMutex);

            if (ret == AVERROR_EOF) {
                LOGI("End of file reached");
                break;
            } else {
                LOGE("Error reading frame: %d", ret);
                usleep(10000);
                continue;
            }
        }

        if (packet.stream_index == mVideoStreamIndex) {
            ret = avcodec_send_packet(mCodecContext, &packet);
            if (ret < 0) {
                LOGE("Error sending packet to decoder: %d", ret);
                av_packet_unref(&packet);
                pthread_mutex_unlock(&mDecodeMutex);
                continue;
            }

            while (avcodec_receive_frame(mCodecContext, frame) == 0) {
                AVFrame *frameCopy = av_frame_alloc();
                if (!frameCopy) {
                    LOGE("Could not allocate frame copy");
                    continue;
                }
                if (av_frame_ref(frameCopy, frame) >= 0) {
                    videoFrameQueue.push(frameCopy);
                    pthread_cond_signal(&mRenderCond);
                } else {
                    av_frame_free(&frameCopy);
                    pthread_mutex_unlock(&mDecodeMutex);
                }
            }
        }

        av_packet_unref(&packet);
        pthread_mutex_unlock(&mDecodeMutex);
    }

    av_frame_free(&frame);
    LOGI("Decode thread finished");
}

void *FFGLPlayer::renderVideoFrame(void *context) {
    FFGLPlayer *player = static_cast<FFGLPlayer *>(context);
    player->renderVideoThread();
    return nullptr;
}

void FFGLPlayer::renderVideoThread() {
    LOGI("Render thread started");
    PostStatusMessage("Render thread started \n");

    AVRational timeBase = mFormatContext->streams[mVideoStreamIndex]->time_base;
    int64_t lastPts = AV_NOPTS_VALUE;

    while (!mStopRequested && mIsPlaying) {
        pthread_mutex_lock(&mRenderMutex);

        while (videoFrameQueue.empty() && !mStopRequested && mIsPlaying) {
            pthread_cond_wait(&mRenderCond, &mRenderMutex);
        }

        if (mStopRequested || !mIsPlaying) {
            pthread_mutex_unlock(&mRenderMutex);
            break;
        }

        if (!videoFrameQueue.empty()) {
            std::shared_ptr<AVFrame *> framePtr = videoFrameQueue.pop();
            AVFrame *frame = *framePtr;
            pthread_mutex_unlock(&mRenderMutex);

            // 基于时间戳的帧率控制
            if (lastPts != AV_NOPTS_VALUE && frame->pts != AV_NOPTS_VALUE) {
                int64_t ptsDiff = frame->pts - lastPts;
                double timeDiff = av_q2d(timeBase) * ptsDiff * 1000000; // 转换为微秒
                if (timeDiff > 0 && timeDiff < 1000000) { // 合理的帧间隔
                    usleep(static_cast<useconds_t>(timeDiff));
                }
            }
            lastPts = frame->pts;
            sendFrameDataToEGL(frame);
            // 通知解码线程
            if (videoFrameQueue.size() < maxVideoFrames / 2) {
                pthread_cond_signal(&mBufferMaxCond);
            }
        } else {
            pthread_mutex_unlock(&mRenderMutex);
        }
    }

    LOGI("Render thread finished");
}

int FFGLPlayer::sendFrameDataToEGL(AVFrame *frame) {
    if (!mNativeWindow || !frame) {
        return -1;
    }
    LOGI("sendFrameDataToEGL");

    uint8_t *buffer;
    int length;
    yuv420p_frame_to_buffer(frame, &buffer, &length);
    eglsurfaceViewRender->draw(buffer, length, mWidth, mHeight, 90);
    eglsurfaceViewRender->render();
    return 0;
}


void FFGLPlayer::cleanup() {
    cleanupFFmpeg();
    mIsPlaying = false;
    mInitialized = false;
}

void FFGLPlayer::cleanupFFmpeg() {
    if (mCodecContext) {
        avcodec_close(mCodecContext);
        avcodec_free_context(&mCodecContext);
        mCodecContext = nullptr;
    }

    if (mFormatContext) {
        avformat_close_input(&mFormatContext);
        mFormatContext = nullptr;
    }

    mVideoStreamIndex = -1;
}

JNIEnv *FFGLPlayer::GetJNIEnv(bool *isAttach) {
    if (!mJavaVm) {
        LOGE("GetJNIEnv mJavaVm == nullptr");
        return nullptr;
    }

    JNIEnv *env;
    int status = mJavaVm->GetEnv((void **) &env, JNI_VERSION_1_6);

    if (status == JNI_EDETACHED) {
        status = mJavaVm->AttachCurrentThread(&env, nullptr);
        if (status != JNI_OK) {
            LOGE("Failed to attach current thread");
            return nullptr;
        }
        *isAttach = true;
    } else if (status != JNI_OK) {
        LOGE("Failed to get JNIEnv");
        return nullptr;
    } else {
        *isAttach = false;
    }

    return env;
}

void FFGLPlayer::PostStatusMessage(const char *msg) {
    bool isAttach = false;
    JNIEnv *env = GetJNIEnv(&isAttach);
    if (!env) {
        return;
    }

    jmethodID mid = env->GetMethodID(env->GetObjectClass(mJavaObj),
                                     "CppStatusCallback", "(Ljava/lang/String;)V");
    if (mid) {
        jstring jMsg = env->NewStringUTF(msg);
        env->CallVoidMethod(mJavaObj, mid, jMsg);
        env->DeleteLocalRef(jMsg);
    }

    if (isAttach) {
        mJavaVm->DetachCurrentThread();
    }
}


int FFGLPlayer::yuv420p_frame_to_buffer(AVFrame *frame, uint8_t **buffer, int *length) {
    if (!frame || frame->format != AV_PIX_FMT_YUV420P) return -1;
    int width = frame->width;
    int height = frame->height;
    int y_size = width * height;
    int uv_size = y_size / 4;
    *length = y_size + uv_size * 2;
    *buffer = (uint8_t *) av_malloc(*length);
    if (!*buffer) return -1;
    uint8_t *dst = *buffer;

    // 复制Y平面
    for (int y = 0; y < height; y++) {
        memcpy(dst, frame->data[0] + y * frame->linesize[0], width);
        dst += width;
    }

    // 复制U平面
    for (int y = 0; y < height / 2; y++) {
        memcpy(dst, frame->data[1] + y * frame->linesize[1], width / 2);
        dst += width / 2;
    }

    // 复制V平面
    for (int y = 0; y < height / 2; y++) {
        memcpy(dst, frame->data[2] + y * frame->linesize[2], width / 2);
        dst += width / 2;
    }

    return 0;
}

四、效果展示:


五、源码地址:

以上的代码放在本人的GitHub项目

https://github.com/wangyongyao1989/FFmpegPractices

https://github.com/wangyongyao1989/WyFFmpeg

https://github.com/wangyongyao1989/AndroidLearnOpenGL

相关推荐
花启莫你是不是傻4 小时前
鸿蒙下FFmpeg编译流程梳理
华为·ffmpeg·harmonyos
_Cherry|5 小时前
Unity中使用AVPRO播放视频
unity·c#·音视频
wuk9985 小时前
基于帧差分法的视频运动检测MATLAB实现
开发语言·matlab·音视频
Yutengii18 小时前
b站视频下载到电脑本地的方法有哪些
音视频
summerkissyou198720 小时前
Android13-Audio-AudioTrack-播放流程
android·音视频
千里马-horse20 小时前
FFmpeg 工具文档
ffmpeg
Black蜡笔小新21 小时前
安防监控/录像存储EasyCVR视频汇聚平台无法启动的原因排查
音视频
xingqing87y1 天前
祝寿视频怎么制作:4步制作创意祝寿视频
音视频
qq_256247051 天前
Spring Boot + NATS 实战:如何让 IM 系统处理图片/视频像处理文本一样快?
spring boot·后端·音视频