背景
实现直播功能时,需要把带绿幕的摄像头内容和视频,渲染成透明效果,已经基于ffmpeg的功能实现了一版,但是在稳定性,关键帧处理,音视频同步方面,还是有一些问题,因此,基于ijkplayer再做一个demo,做效果对比,顺便学习下优先开源库的方案。
实现思路
ijkplayer支持mediacodec(硬解)直接对接到渲染,也支持使用surfaceview+gles(软解)的方式,所以在这里,我强制使用gles的渲染方式,让我们能够在ijkplayer的渲染流程里面,做自定义的滤镜处理。
实现步骤
为方便调试,这里使用了一个开源的可调试的ikjplayer版本A4ijkplayer,用来做C代码的调试 下面的代码只是部分关键逻辑,真实代码可以参考文章底部贴出来的代码链接
C层修改
JNI接口
ijkplayer_jni.c
#if CUSTOM_GL_FILTER
{ "_setGLFilter", "(Z)V", (void *) IjkMediaPlayer_native_setGLFilter },
#endif
增加属性,传递数据
ijksdl_vout.h
struct SDL_Vout {
SDL_mutex *mutex;
SDL_Class *opaque_class;
SDL_Vout_Opaque *opaque;
SDL_VoutOverlay *(*create_overlay)(int width, int height, int frame_format, SDL_Vout *vout);
void (*free_l)(SDL_Vout *vout);
int (*display_overlay)(SDL_Vout *vout, SDL_VoutOverlay *overlay);
Uint32 overlay_format;
#if CUSTOM_GL_FILTER
int has_filter;
void *mp;
void (* func_onCreated)(void *mp);
void (* func_onSizeChanged)(void *mp, int width, int height);
int (* func_onDrawFrame)(void *mp,int textureId);
void (* func_onTexcoords)(void *mp, float *texcoords);
void (* func_onVertices)(void *mp, float *vertices);
void (* func_onRelease)(void *mp);
#endif
};
internal.h
int has_filter;
void *mp;
void (* func_onCreated)(void *mp);
void (* func_onSizeChanged)(void *mp, int width, int height);
int (* func_onDrawFrame)(void *mp, int textureId);
void (* func_onTexcoords)(void *mp, float *texcoords);
void (* func_onVertices)(void *mp, float *vertices);
void (* func_onRelease)(void *mp);
int view_width;
int view_height;
GLuint frame_buffers[1];
GLuint frame_textures[1];
GLfloat texcoords_test[8];
GLfloat vertices_test[8];
IjkMediaPlayer.c
typedef struct J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer {
jclass id;
jfieldID field_mNativeMediaPlayer;
jfieldID field_mNativeMediaDataSource;
jfieldID field_mNativeAndroidIO;
jmethodID method_postEventFromNative;
jmethodID method_onSelectCodec;
jmethodID method_onNativeInvoke;
#if CUSTOM_GL_FILTER
jmethodID method_onFilterCreated;
jmethodID method_onFilterSizeChanged;
jmethodID method_onFilterDrawFrame;
jmethodID method_onFilterTexcoords;
jmethodID method_onFilterVertices;
jmethodID method_onFilterRelease;
#endif
} J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer;
// 初始化类和方法,在下面的方法中增加代码
int J4A_loadClass__J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer(JNIEnv *env)
//初始化
class_id = class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id;
name = "onFilterCreated";
sign = "(Ljava/lang/Object;)V";
class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterCreated = J4A_GetStaticMethodID__catchAll(env, class_id, name, sign);
if (class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterCreated == NULL)
goto fail;
class_id = class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id;
name = "onFilterSizeChanged";
sign = "(Ljava/lang/Object;II)V";
class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterSizeChanged = J4A_GetStaticMethodID__catchAll(env, class_id, name, sign);
if (class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterSizeChanged == NULL)
goto fail;
class_id = class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id;
name = "onFilterDrawFrame";
sign = "(Ljava/lang/Object;I)I";
class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterDrawFrame = J4A_GetStaticMethodID__catchAll(env, class_id, name, sign);
if (class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterDrawFrame == NULL)
goto fail;
class_id = class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id;
name = "onFilterTexcoords";
sign = "(Ljava/lang/Object;[F)V";
class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterTexcoords = J4A_GetStaticMethodID__catchAll(env, class_id, name, sign);
if (class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterTexcoords == NULL)
goto fail;
class_id = class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id;
name = "onFilterVertices";
sign = "(Ljava/lang/Object;[F)V";
class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterVertices = J4A_GetStaticMethodID__catchAll(env, class_id, name, sign);
if (class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterVertices == NULL)
goto fail;
class_id = class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id;
name = "onFilterRelease";
sign = "(Ljava/lang/Object;)V";
class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterRelease = J4A_GetStaticMethodID__catchAll(env, class_id, name, sign);
if (class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterRelease == NULL)
goto fail;
// 增加C调用JAVA的方法
void J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onFilterCreated__catchAll(JNIEnv *env, jobject weakThiz)
{
(*env)->CallStaticVoidMethod(env, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterCreated, weakThiz);
if (J4A_ExceptionCheck__catchAll(env)) {
return;
}
return;
}
void J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onSizeChanged__catchAll(JNIEnv *env, jobject weakThiz, jint width, jint height)
{
(*env)->CallStaticVoidMethod(env, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterSizeChanged, weakThiz, width, height);
if (J4A_ExceptionCheck__catchAll(env)) {
return;
}
return;
}
int J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onDrawFrame__catchAll(JNIEnv *env, jobject weakThiz, jint textureId)
{
jlong ret_value = (*env)->CallStaticIntMethod(env, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterDrawFrame, weakThiz, textureId);
if (J4A_ExceptionCheck__catchAll(env)) {
return -1;
}
return ret_value;
}
void J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onTexcoords__catchAll(JNIEnv *env, jobject weakThiz, jfloatArray texcoords)
{
(*env)->CallStaticVoidMethod(env, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterTexcoords, weakThiz, texcoords);
if (J4A_ExceptionCheck__catchAll(env)) {
return;
}
return;
}
void J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onVertices__catchAll(JNIEnv *env, jobject weakThiz, jfloatArray vertices)
{
(*env)->CallStaticVoidMethod(env, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterVertices, weakThiz, vertices);
if (J4A_ExceptionCheck__catchAll(env)) {
return;
}
return;
}
void J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onRelease__catchAll(JNIEnv *env, jobject weakThiz)
{
(*env)->CallStaticVoidMethod(env, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onFilterRelease, weakThiz);
if (J4A_ExceptionCheck__catchAll(env)) {
return;
}
return;
}
事件回调
C
void onCreated(void *mp){
JNIEnv *env = NULL;
IjkMediaPlayer *player = (IjkMediaPlayer*)mp;
jobject weak_this = (jobject) player->weak_thiz;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s: onCreated failed\n", __func__);
return;
}
J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onFilterCreated__catchAll(env, weak_this);
}
void onSizeChanged(void *mp, int width, int height){
JNIEnv *env = NULL;
IjkMediaPlayer *player = (IjkMediaPlayer*)mp;
jobject weak_this = (jobject) player->weak_thiz;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s: onSizeChanged failed\n", __func__);
return;
}
J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onSizeChanged__catchAll(env, weak_this, width, height);
}
int onDrawFrame(void *mp, int textureId){
JNIEnv *env = NULL;
IjkMediaPlayer *player = (IjkMediaPlayer*)mp;
jobject weak_this = (jobject) player->weak_thiz;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s: onSizeChanged failed\n", __func__);
return -1;
}
return J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onDrawFrame__catchAll(env, weak_this, textureId);
}
void onTexcoords(void *mp, float *texcoords){
JNIEnv *env = NULL;
IjkMediaPlayer *player = (IjkMediaPlayer*)mp;
jobject weak_this = (jobject) player->weak_thiz;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s: onSizeChanged failed\n", __func__);
return;
}
jfloatArray array = (*env)->NewFloatArray(env,8);
(*env)->SetFloatArrayRegion(env,array, 0, 8, texcoords);
J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onTexcoords__catchAll(env, weak_this, array);
}
void onVertices(void *mp, float *vertices){
JNIEnv *env = NULL;
IjkMediaPlayer *player = (IjkMediaPlayer*)mp;
jobject weak_this = (jobject) player->weak_thiz;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s: onSizeChanged failed\n", __func__);
return;
}
jfloatArray array = (*env)->NewFloatArray(env,8);
(*env)->SetFloatArrayRegion(env,array, 0, 8, vertices);
J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onTexcoords__catchAll(env, weak_this, array);
}
void onRelease(void *mp){
JNIEnv *env = NULL;
IjkMediaPlayer *player = (IjkMediaPlayer*)mp;
jobject weak_this = (jobject) player->weak_thiz;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s: onSizeChanged failed\n", __func__);
return;
}
J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onRelease__catchAll(env, weak_this);
}
static void IjkMediaPlayer_native_setGLFilter(JNIEnv *env, jobject thiz, jboolean hasFilter)
{
IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
if(hasFilter){
ijkmp_android_set_filter(mp, 1, onCreated, onSizeChanged, onDrawFrame, onTexcoords, onVertices, onRelease);
}else{
ijkmp_android_set_filter(mp, 0, NULL, NULL, NULL, NULL, NULL, NULL);
}
}
参数传递
ijkplayer_android.c
void ijkmp_android_set_filter(IjkMediaPlayer *mp,int has_filter,void *onCreated,void *onSizeChanged,void *onDrawFrame,void *onTexcoords,void *onVertices,void *onRelease){
mp->ffplayer->vout->has_filter = has_filter;
mp->ffplayer->vout->mp = mp;
mp->ffplayer->vout->func_onCreated = onCreated;
mp->ffplayer->vout->func_onSizeChanged = onSizeChanged;
mp->ffplayer->vout->func_onDrawFrame = onDrawFrame;
mp->ffplayer->vout->func_onTexcoords = onTexcoords;
mp->ffplayer->vout->func_onVertices = onVertices;
mp->ffplayer->vout->func_onRelease = onRelease;
}
ijksdl_vout_android_nativewindow.c
static SDL_VoutOverlay *func_create_overlay(int width, int height, int frame_format, SDL_Vout *vout)
{
SDL_LockMutex(vout->mutex);
SDL_VoutOverlay *overlay = func_create_overlay_l(width, height, frame_format, vout);
#if CUSTOM_GL_FILTER
overlay->mp = vout->mp;
overlay->has_filter = vout->has_filter;
overlay->func_onCreated = vout->func_onCreated;
overlay->func_onSizeChanged = vout->func_onSizeChanged;
overlay->func_onDrawFrame = vout->func_onDrawFrame;
overlay->func_onTexcoords = vout->func_onTexcoords;
overlay->func_onVertices = vout->func_onVertices;
overlay->func_onRelease = vout->func_onRelease;
#endif
SDL_UnlockMutex(vout->mutex);
return overlay;
}
渲染
renderer.c
//修改方法:
void IJK_GLES2_Renderer_reset(IJK_GLES2_Renderer *renderer)
//增加以下逻辑
if(renderer->frame_textures){
glDeleteTextures(1, renderer->frame_textures);
renderer->frame_textures[0] = 0;
}
if(renderer->frame_buffers){
glDeleteBuffers(1,renderer->frame_buffers);
renderer->frame_buffers[0] = 0;
}
if(renderer->has_filter){
renderer->func_onRelease(renderer->mp);
}
修改
IJK_GLES2_Renderer *IJK_GLES2_Renderer_create_base(const char *fragment_shader_source)
增加
renderer->texcoords_test[0] = 0.0f;
renderer->texcoords_test[1] = 1.0f;
renderer->texcoords_test[2] = 1.0f;
renderer->texcoords_test[3] = 1.0f;
renderer->texcoords_test[4] = 0.0f;
renderer->texcoords_test[5] = 0.0f;
renderer->texcoords_test[6] = 1.0f;
renderer->texcoords_test[7] = 0.0f;
renderer->vertices_test[0] = -1.0f;
renderer->vertices_test[1] = -1.0f;
renderer->vertices_test[2] = 1.0f;
renderer->vertices_test[3] = -1.0f;
renderer->vertices_test[4] = -1.0f;
renderer->vertices_test[5] = 1.0f;
renderer->vertices_test[6] = 1.0f;
renderer->vertices_test[7] = 1.0f;
修改
static void IJK_GLES2_Renderer_Vertices_apply(IJK_GLES2_Renderer *renderer)
增加
if(renderer->has_filter){
renderer->func_onVertices(renderer->mp, renderer->vertices);
}
static void IJK_GLES2_Renderer_Vertices_reloadVertex(IJK_GLES2_Renderer *renderer)
{
#if CUSTOM_GL_FILTER
glVertexAttribPointer(renderer->av4_position, 2, GL_FLOAT, GL_FALSE, 0, renderer->vertices_test); IJK_GLES2_checkError_TRACE("glVertexAttribPointer(av2_texcoord)");
#else
glVertexAttribPointer(renderer->av4_position, 2, GL_FLOAT, GL_FALSE, 0, renderer->vertices); IJK_GLES2_checkError_TRACE("glVertexAttribPointer(av2_texcoord)");
#endif
glEnableVertexAttribArray(renderer->av4_position); IJK_GLES2_checkError_TRACE("glEnableVertexAttribArray(av2_texcoord)");
}
修改
IJK_GLES2_Renderer_TexCoords_cropRight
增加
if(renderer->has_filter){
renderer->func_onTexcoords(renderer->mp, renderer->texcoords);
}
static void IJK_GLES2_Renderer_TexCoords_reloadVertex(IJK_GLES2_Renderer *renderer)
{
#if CUSTOM_GL_FILTER
glVertexAttribPointer(renderer->av2_texcoord, 2, GL_FLOAT, GL_FALSE, 0, renderer->texcoords_test); IJK_GLES2_checkError_TRACE("glVertexAttribPointer(av2_texcoord)");
#else
glVertexAttribPointer(renderer->av2_texcoord, 2, GL_FLOAT, GL_FALSE, 0, renderer->texcoords); IJK_GLES2_checkError_TRACE("glVertexAttribPointer(av2_texcoord)");
#endif
glEnableVertexAttribArray(renderer->av2_texcoord); IJK_GLES2_checkError_TRACE("glEnableVertexAttribArray(av2_texcoord)");
}
增加方法
void IJK_GLES2_Renderer_set_view_size(IJK_GLES2_Renderer *renderer,int width,int height){
renderer->view_width = width;
renderer->view_height = height;
}
修改方法
IJK_GLES2_Renderer_renderOverlay
修改后的结果为
GLboolean IJK_GLES2_Renderer_renderOverlay(IJK_GLES2_Renderer *renderer, SDL_VoutOverlay *overlay)
{
#if CUSTOM_GL_FILTER
#else
if (!renderer || !renderer->func_uploadTexture)
return GL_FALSE;
glClear(GL_COLOR_BUFFER_BIT); IJK_GLES2_checkError_TRACE("glClear");
#endif
GLsizei visible_width = renderer->frame_width;
GLsizei visible_height = renderer->frame_height;
if (overlay) {
visible_width = overlay->w;
visible_height = overlay->h;
if (renderer->frame_width != visible_width ||
renderer->frame_height != visible_height ||
renderer->frame_sar_num != overlay->sar_num ||
renderer->frame_sar_den != overlay->sar_den) {
renderer->frame_width = visible_width;
renderer->frame_height = visible_height;
renderer->frame_sar_num = overlay->sar_num;
renderer->frame_sar_den = overlay->sar_den;
renderer->vertices_changed = 1;
}
renderer->last_buffer_width = renderer->func_getBufferWidth(renderer, overlay);
if (!renderer->func_uploadTexture(renderer, overlay))
return GL_FALSE;
} else {
// NULL overlay means force reload vertice
renderer->vertices_changed = 1;
}
GLsizei buffer_width = renderer->last_buffer_width;
if (renderer->vertices_changed ||
(buffer_width > 0 &&
buffer_width > visible_width &&
buffer_width != renderer->buffer_width &&
visible_width != renderer->visible_width)){
renderer->vertices_changed = 0;
IJK_GLES2_Renderer_Vertices_apply(renderer);
#if CUSTOM_GL_FILTER
#else
IJK_GLES2_Renderer_Vertices_reloadVertex(renderer);
#endif
renderer->buffer_width = buffer_width;
renderer->visible_width = visible_width;
GLsizei padding_pixels = buffer_width - visible_width;
GLfloat padding_normalized = ((GLfloat)padding_pixels) / buffer_width;
IJK_GLES2_Renderer_TexCoords_reset(renderer);
IJK_GLES2_Renderer_TexCoords_cropRight(renderer, padding_normalized);
IJK_GLES2_Renderer_TexCoords_reloadVertex(renderer);
}
#if CUSTOM_GL_FILTER
if (!renderer || !renderer->func_uploadTexture)
return GL_FALSE;
if(renderer->has_filter&&!renderer->frame_buffers[0]&&renderer->frame_width>0&&renderer->frame_height>0){
glGenTextures(1,renderer->frame_textures);
glBindTexture(GL_TEXTURE_2D,renderer->frame_textures[0]);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA,renderer->frame_width,renderer->frame_height,0,GL_RGBA,GL_UNSIGNED_BYTE,NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_2D,0);
glGenFramebuffers(1,renderer->frame_buffers);
glBindFramebuffer(GL_FRAMEBUFFER,renderer->frame_buffers[0]);
glFramebufferTexture2D(GL_FRAMEBUFFER,GL_COLOR_ATTACHMENT0,GL_TEXTURE_2D,renderer->frame_textures[0],0);
glBindFramebuffer(GL_FRAMEBUFFER,0);
renderer->func_onCreated(renderer->mp);
renderer->func_onSizeChanged(renderer->mp, renderer->view_width, renderer->view_height);
}
if(renderer->has_filter&&renderer->frame_buffers[0]){
glBindFramebuffer(GL_FRAMEBUFFER,renderer->frame_buffers[0]);
}
renderer->func_use(renderer);
glViewport(0, 0, renderer->frame_width, renderer->frame_height); IJK_GLES2_checkError_TRACE("glViewport");
glClear(GL_COLOR_BUFFER_BIT); IJK_GLES2_checkError_TRACE("glClear");
// IJK_GLES_Matrix modelViewProj;
// IJK_GLES2_loadOrtho(&modelViewProj, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, 1.0f);
// glUniformMatrix4fv(renderer->um4_mvp, 1, GL_FALSE, modelViewProj.m); IJK_GLES2_checkError_TRACE("glUniformMatrix4fv(um4_mvp)");
IJK_GLES2_Renderer_TexCoords_reloadVertex(renderer);
IJK_GLES2_Renderer_Vertices_reloadVertex(renderer);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); IJK_GLES2_checkError_TRACE("glDrawArrays");
if(renderer->has_filter&&renderer->frame_buffers[0]){
glBindFramebuffer(GL_FRAMEBUFFER,0);
int result = renderer->func_onDrawFrame(renderer->mp, renderer->frame_textures[0]);
if (result < 0 || result == renderer->frame_textures[0]) {
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); IJK_GLES2_checkError_TRACE("glDrawArrays");
}
}
#else
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); IJK_GLES2_checkError_TRACE("glDrawArrays");
#endif
return GL_TRUE;
}
ijksdl_egl.c
修改
IJK_EGL_prepareRenderer
底部增加
IJK_GLES2_Renderer_set_view_size(opaque->renderer,egl->width, egl->height);
JAVA层
接口定义
IjkFilter.java
public interface IjkFilter {
void onCreated();
void onSizeChanged(int width, int height);
int onDrawFrame(int textureId);
void onTexcoords(float[] texcoords);
void onVertices(float[] vertices);
void onRelease();
boolean enable();
}
滤镜实现
ini
public class GLGreenVideoFilter implements IjkFilter {
private Context context;
private FloatBuffer vertexBuffer;
private FloatBuffer textureVertexBuffer;
private int programId = 0;
private int aPositionHandle;
private int uTextureSamplerHandle;
private int aTextureCoordHandle;
//初始化必需要为false,因为打开需要额外的设置
private boolean enabled = false;
public GLGreenVideoFilter(Context context){
this.context = context;
final float[] vertexData = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f
};
final float[] textureVertexData = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f
};
vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);
textureVertexBuffer = ByteBuffer.allocateDirect(textureVertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureVertexData);
textureVertexBuffer.position(0);
}
@Override
public boolean enable() {
return enabled;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
@Override
public void onCreated() {
String vertexShader = ShaderUtils.readRawTextFile(context, R.raw.test_vertext_shader);
String fragmentShader = ShaderUtils.readRawTextFile(context, R.raw.test_fragment_sharder);
programId = ShaderUtils.createProgram(vertexShader, fragmentShader);
aPositionHandle = GLES20.glGetAttribLocation(programId, "aPosition");
uTextureSamplerHandle = GLES20.glGetUniformLocation(programId, "tex");
aTextureCoordHandle = GLES20.glGetAttribLocation(programId, "aTexCoord");
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
}
private Rect rect = new Rect();
@Override
public void onSizeChanged(int width, int height) {
rect.left = 0;
rect.top = 0;
rect.right = width;
rect.bottom = height;
}
boolean createSurface = true;
@Override
public int onDrawFrame(int textureId) {
if(!enabled){
return textureId;
}
if(createSurface){
// 显示textureId的surface
createSurface = false;
}
GLES20.glUseProgram(programId);
GLES20.glViewport(0,0,rect.right,rect.bottom);
GLES20.glClearColor(0.f, 0, 0f, 0);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glEnableVertexAttribArray(aPositionHandle);
GLES20.glVertexAttribPointer(aPositionHandle, 2, GLES20.GL_FLOAT, false,
8, vertexBuffer);
GLES20.glEnableVertexAttribArray(aTextureCoordHandle);
GLES20.glVertexAttribPointer(aTextureCoordHandle, 2, GLES20.GL_FLOAT, false, 8, textureVertexBuffer);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glUniform1i(uTextureSamplerHandle, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glUseProgram(GLES20.GL_NONE);
return textureId;
}
public void onTexcoords(float[] texcoords){
textureVertexBuffer.clear();
textureVertexBuffer.put(texcoords);
textureVertexBuffer.position(0);
}
public void onVertices(float[] vertices){
vertexBuffer.clear();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
}
public void onRelease(){
if(programId != 0){
GLES20.glDeleteProgram(programId);
}
}
}
着色器代码,这里是调试写死的参数,实际使用可以通过参数传递进来
test_fragment_sharder.glsl
precision mediump float;
varying highp vec2 vTexCoord;
uniform sampler2D tex;
//uniform vec3 keyColor = vec3(0.449,0.805,0.414);
//uniform float similarity;
//uniform float smoothness;
//uniform float spill;
// From https://github.com/libretro/glsl-shaders/blob/master/nnedi3/shaders/rgb-to-yuv.glsl
vec2 RGBtoUV(vec3 rgb) {
return vec2(
rgb.r * -0.169 + rgb.g * -0.331 + rgb.b * 0.5 + 0.5,
rgb.r * 0.5 + rgb.g * -0.419 + rgb.b * -0.081 + 0.5
);
}
vec4 ProcessChromaKey(vec2 texCoord) {
vec4 rgba = texture2D(tex, texCoord);
// todo,debug
vec3 keyColor = vec3(0.0,1.0,0.0);
float similarity = 0.4;
float smoothness = 0.08;
float spill = 0.1;
// vec3 keyColor = vec3(115,206,106);
float chromaDist = distance(RGBtoUV(texture2D(tex, texCoord).rgb), RGBtoUV(keyColor));
// return vec4(1.0 *chromaDist,1.0 *chromaDist,1.0 *chromaDist,1.0);
float baseMask = chromaDist - similarity;
float fullMask = pow(clamp(baseMask / smoothness, 0., 1.), 1.5);
rgba.a = fullMask;
float spillVal = pow(clamp(baseMask / spill, 0., 1.), 1.5);
float desat = clamp(rgba.r * 0.2126 + rgba.g * 0.7152 + rgba.b * 0.0722, 0., 1.);
rgba.rgb = mix(vec3(desat, desat, desat), rgba.rgb, spillVal);
return rgba;
}
void main(void) {
gl_FragColor = ProcessChromaKey(vTexCoord);
}
业务调用
IjkVideoView.java
强制打开gles,走软解
ijkMediaPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "overlay-format", "fcc-_es2");
IjkMediaPlayer.java
public void setFilter(IjkFilter filter){
// _setGLFilter(filter);
this.filter = filter;
_setGLFilter(filter != null);
}
private native void _setGLFilter(boolean hasFilter);
private IjkFilter filter;
@CalledByNative
private static void onFilterCreated(Object weakThiz) {
if (weakThiz == null || !(weakThiz instanceof WeakReference<?>))
return;
@SuppressWarnings("unchecked")
WeakReference<IjkMediaPlayer> weakPlayer = (WeakReference<IjkMediaPlayer>) weakThiz;
IjkMediaPlayer player = weakPlayer.get();
if (player == null)
return;
player.filter.onCreated();
}
@CalledByNative
private static void onFilterSizeChanged(Object weakThiz, int width, int height) {
if (weakThiz == null || !(weakThiz instanceof WeakReference<?>))
return;
@SuppressWarnings("unchecked")
WeakReference<IjkMediaPlayer> weakPlayer = (WeakReference<IjkMediaPlayer>) weakThiz;
IjkMediaPlayer player = weakPlayer.get();
if (player == null)
return;
player.filter.onSizeChanged(width, height);
}
@CalledByNative
private static int onFilterDrawFrame(Object weakThiz, int textureId) {
if (weakThiz == null || !(weakThiz instanceof WeakReference<?>))
return textureId;
@SuppressWarnings("unchecked")
WeakReference<IjkMediaPlayer> weakPlayer = (WeakReference<IjkMediaPlayer>) weakThiz;
IjkMediaPlayer player = weakPlayer.get();
if (player == null)
return textureId;
return player.filter.onDrawFrame(textureId);
}
@CalledByNative
private static void onFilterTexcoords(Object weakThiz, float[] texcoords) {
if (weakThiz == null || !(weakThiz instanceof WeakReference<?>))
return;
@SuppressWarnings("unchecked")
WeakReference<IjkMediaPlayer> weakPlayer = (WeakReference<IjkMediaPlayer>) weakThiz;
IjkMediaPlayer player = weakPlayer.get();
if (player == null)
return;
player.filter.onTexcoords(texcoords);
}
@CalledByNative
private static void onFilterVertices(Object weakThiz, float[] vertices) {
if (weakThiz == null || !(weakThiz instanceof WeakReference<?>))
return;
@SuppressWarnings("unchecked")
WeakReference<IjkMediaPlayer> weakPlayer = (WeakReference<IjkMediaPlayer>) weakThiz;
IjkMediaPlayer player = weakPlayer.get();
if (player == null)
return;
player.filter.onVertices(vertices);
}
@CalledByNative
private static void onFilterRelease(Object weakThiz) {
if (weakThiz == null || !(weakThiz instanceof WeakReference<?>))
return;
@SuppressWarnings("unchecked")
WeakReference<IjkMediaPlayer> weakPlayer = (WeakReference<IjkMediaPlayer>) weakThiz;
IjkMediaPlayer player = weakPlayer.get();
if (player == null)
return;
player.filter.onRelease();
}
实现效果
关闭抠绿时的效果 开启抠绿时的效果
同时播放9个视频并开启抠绿的效果