uniapp 使用ffmpeg播放rtsp

uniapp正常是无法使用ffmpeg进行播放的,我们需要使用android进行编译原生插件。

ffmpeg的版本使用的 7.1

1、首选,需要使用android加载ffmpeg,对视频进行解码等操作

2、其次,对代码进行打包so文件

3、使用so文件在uniapp给的组件打包示例代码中(特别注意,so文件需要包名一致,否则无法使用)

4、打包成插件,让uniapp使用。

android项目结构,如图所示:

需要使用ffmpeg,

CMakeLlists.txt代码:

复制代码
cmake_minimum_required(VERSION 3.18.1)

project("decodertsp")
include_directories(${CMAKE_SOURCE_DIR}/include/ffmpeg/include/${ANDROID_ABI})
include_directories(${CMAKE_SOURCE_DIR})

set(LIB_DIR ${CMAKE_SOURCE_DIR}/lib/ffmpeg/lib/${ANDROID_ABI})

add_library( # Sets the name of the library.
        decodertsp

        # Sets the library as a shared library.
        SHARED

        # Provides a relative path to your source file(s).
        native-lib.cpp)

add_library(avformat
        SHARED
        IMPORTED)

set_target_properties(
        avformat
        PROPERTIES IMPORTED_LOCATION
        ${LIB_DIR}/libavformat.so)

add_library(avcodec
        SHARED
        IMPORTED)

set_target_properties(
        avcodec
        PROPERTIES IMPORTED_LOCATION
        ${LIB_DIR}/libavcodec.so)

add_library(swscale
        SHARED
        IMPORTED)

set_target_properties(
        swscale
        PROPERTIES IMPORTED_LOCATION
        ${LIB_DIR}/libswscale.so)
add_library(avutil
        SHARED
        IMPORTED)

set_target_properties(
        avutil
        PROPERTIES IMPORTED_LOCATION
        ${LIB_DIR}/libavutil.so)

add_library(swresample
        SHARED
        IMPORTED)

set_target_properties(
        swresample
        PROPERTIES IMPORTED_LOCATION
        ${LIB_DIR}/libswresample.so)

add_library(avfilter
        SHARED
        IMPORTED)

set_target_properties(
        avfilter
        PROPERTIES IMPORTED_LOCATION
        ${LIB_DIR}/libavfilter.so)

#add_library(postproc
#        SHARED
#        IMPORTED)
#
#set_target_properties(
#        postproc
#        PROPERTIES IMPORTED_LOCATION
#        ${LIB_DIR}/libpostproc.so)

find_library( # Sets the name of the path variable.
        log-lib
        log)
target_link_libraries( # Specifies the target library.
        decodertsp
        android
        avcodec
        avfilter
        avformat
        avutil
        swresample
        swscale
#        postproc
        ${log-lib})

native-lib.cpp

cpp 复制代码
#include <jni.h>
#include <string>
#include <include/jniLog.h>
#include "include/Decoder.h"
#include <android/native_window_jni.h>
#include <android/native_window.h>

bool IsStop = false;
extern "C"
JNIEXPORT void JNICALL
Java_rtsp_ffmpeg_player_RtspPlayerView_decodeVideo(JNIEnv *env, jobject thiz, jstring rtsp_url,
                                                      jobject surface) {
    const char *uri = env->GetStringUTFChars(rtsp_url, 0);
    IsStop = false;
    // 解码视频,解码音频类似,解码的流程类似,把之前的代码拷过来
    avformat_network_init();
    AVFormatContext *pFormatContext = NULL;
    int formatOpenInputRes = 0;
    int formatFindStreamInfoRes = 0;
    int audioStramIndex = -1;
    AVCodecParameters *pCodecParameters;
    const AVCodec *pCodec = NULL;
    AVCodecContext *pCodecContext = NULL;
    int codecParametersToContextRes = -1;
    int codecOpenRes = -1;
    int index = 0;
    AVPacket *pPacket = NULL;
    AVFrame *pFrame = NULL;
    formatOpenInputRes = avformat_open_input(&pFormatContext, uri, NULL, NULL);
    const char* version = av_version_info();
    LOGI("Version= %s",version);
    if(formatOpenInputRes<0){
        LOGE("open url error : %s", av_err2str(formatOpenInputRes));
        return;
    }


    formatFindStreamInfoRes = avformat_find_stream_info(pFormatContext, NULL);


    // 查找视频流的 index
    audioStramIndex = av_find_best_stream(pFormatContext, AVMediaType::AVMEDIA_TYPE_VIDEO, -1, -1,
                                          NULL, 0);


    // 查找解码
    pCodecParameters = pFormatContext->streams[audioStramIndex]->codecpar;
    pCodec = avcodec_find_decoder(pCodecParameters->codec_id);

    // 打开解码器
    pCodecContext = avcodec_alloc_context3(pCodec);
    codecParametersToContextRes = avcodec_parameters_to_context(pCodecContext, pCodecParameters);
    codecOpenRes = avcodec_open2(pCodecContext, pCodec, NULL);
    // 1. 获取窗体
    ANativeWindow *pNativeWindow = ANativeWindow_fromSurface(env, surface);
    if(pNativeWindow == NULL){
        //LOGE("获取窗体失败");
        return ;
    }
    // 2. 设置缓存区的数据
    ANativeWindow_setBuffersGeometry(pNativeWindow, pCodecContext->width, pCodecContext->height,WINDOW_FORMAT_RGBA_8888);
    // Window 缓冲区的 Buffer
    ANativeWindow_Buffer outBuffer;
    // 3.初始化转换上下文
    SwsContext *pSwsContext = sws_getContext(pCodecContext->width, pCodecContext->height,
                                             pCodecContext->pix_fmt, pCodecContext->width, pCodecContext->height,
                                             AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL);
    AVFrame *pRgbaFrame = av_frame_alloc();
    int frameSize = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecContext->width,
                                             pCodecContext->height, 1);
    uint8_t *frameBuffer = (uint8_t *) malloc(frameSize);
    av_image_fill_arrays(pRgbaFrame->data, pRgbaFrame->linesize, frameBuffer, AV_PIX_FMT_RGBA,
                         pCodecContext->width, pCodecContext->height, 1);

    pPacket = av_packet_alloc();
    pFrame = av_frame_alloc();
    while (av_read_frame(pFormatContext, pPacket) >= 0 && !IsStop) {
        if (pPacket->stream_index == audioStramIndex) {
            // Packet 包,压缩的数据,解码成 数据
            int codecSendPacketRes = avcodec_send_packet(pCodecContext, pPacket);
            if (codecSendPacketRes == 0) {
                int codecReceiveFrameRes = avcodec_receive_frame(pCodecContext, pFrame);
                if (codecReceiveFrameRes == 0) {
                    // AVPacket -> AVFrame
                    index++;
                    //LOGE("解码第 %d 帧", index);
                    // 假设拿到了转换后的 RGBA 的 data 数据,如何渲染,把数据推到缓冲区
                    sws_scale(pSwsContext, (const uint8_t *const *) pFrame->data, pFrame->linesize,
                              0, pCodecContext->height, pRgbaFrame->data, pRgbaFrame->linesize);
                    // 把数据推到缓冲区
                    if (ANativeWindow_lock(pNativeWindow, &outBuffer, NULL) < 0) {
                        // Handle error
                        LOGE("ANativeWindow_lock is ERROR");
                    }
// Data copy
                    memcpy(outBuffer.bits, frameBuffer, frameSize);
                    if (ANativeWindow_unlockAndPost(pNativeWindow) < 0) {
                        // Handle error
                        LOGE("ANativeWindow_unlockAndPost is ERROR");
                    }
                }
            }
        }
        // 解引用
        av_packet_unref(pPacket);
        av_frame_unref(pFrame);
    }

    // 1. 解引用数据 data , 2. 销毁 pPacket 结构体内存  3. pPacket = NULL
    av_packet_free(&pPacket);
    av_frame_free(&pFrame);

    __av_resources_destroy:
    if (pCodecContext != NULL) {

        avcodec_free_context(&pCodecContext);
        pCodecContext = NULL;
    }

    if (pFormatContext != NULL) {
        avformat_close_input(&pFormatContext);
        avformat_free_context(pFormatContext);
        pFormatContext = NULL;
    }
    avformat_network_deinit();

    env->ReleaseStringUTFChars(rtsp_url, uri);
}
extern "C"
JNIEXPORT void JNICALL
Java_rtsp_ffmpeg_player_RtspPlayerView_stopVideo(JNIEnv *env, jobject thiz) {
    IsStop = true;
}
extern "C"
JNIEXPORT void JNICALL
Java_rtsp_ffmpeg_player_RtspPlayerView_drawToSurface(JNIEnv *env, jclass clazz, jobject surface,
                                                     jint color) {
    ANativeWindow_Buffer nwBuffer;

    LOGI("ANativeWindow_fromSurface ");
    ANativeWindow *mANativeWindow = ANativeWindow_fromSurface(env, surface);

    if (mANativeWindow == NULL) {
        LOGE("ANativeWindow_fromSurface error");
        return;
    }

    LOGI("ANativeWindow_lock ");
    if (0 != ANativeWindow_lock(mANativeWindow, &nwBuffer, 0)) {
        LOGE("ANativeWindow_lock error");
        return;
    }

    LOGI("ANativeWindow_lock nwBuffer->format ");
    if (nwBuffer.format == WINDOW_FORMAT_RGBA_8888) {
        LOGI("nwBuffer->format == WINDOW_FORMAT_RGBA_8888 ");
        for (int i = 0; i < nwBuffer.height * nwBuffer.width; i++) {
            *((int*)nwBuffer.bits + i) = color;
        }
    }
    LOGI("ANativeWindow_unlockAndPost ");
    if (0 != ANativeWindow_unlockAndPost(mANativeWindow)) {
        LOGE("ANativeWindow_unlockAndPost error");
        return;
    }

    ANativeWindow_release(mANativeWindow);

}

RtspPlayerView.java

java 复制代码
package rtsp.ffmpeg.player;

import android.content.Context;
import android.graphics.PixelFormat;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

public class RtspPlayerView  extends SurfaceView implements SurfaceHolder.Callback {

    //加载so库文件
    static {
        System.loadLibrary("decodertsp");
    }


    private  SurfaceHolder holder;
    private  String url = "";

    public  RtspPlayerView(Context context, AttributeSet attrs){
        super(context,attrs);

        init();
    }

    public  RtspPlayerView(Context context,AttributeSet attrs,int defStyleAttr){
        super(context,attrs,defStyleAttr);
        init();
    }

    private  void init(){

        holder = getHolder();
        holder.addCallback(this);
        holder.setFormat(PixelFormat.RGBA_8888);

    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        Log.i("RtspPlayerView", "Surface create success");


    }
    public  void stop(){
        String bufferedImage = rgb2Hex(0, 0, 0);
        String substring = String.valueOf(bufferedImage).substring(3);
        int color = Integer.parseInt(substring,16);
        drawToSurface(holder.getSurface(),color);
        stopVideo();
    }
    public void play(String uri) {
        this.url = uri;

        if (uri != null && !uri.isEmpty()) {
            new Thread(new Runnable() {
                @Override
                public void run() {
                    decodeVideo(url, holder.getSurface());

                }
            }).start();
        }
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
        Log.i("RtspPlayerView", "Surface ");
    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
        Log.i("RtspPlayerView", "Surface 销毁");

    }
    public static String  rgb2Hex(int r,int g,int b){
        return String.format("0xFF%02X%02X%02X", r,g,b);
    }

    public static native void drawToSurface(Surface surface, int color);

    private native void decodeVideo(String rtspUrl, Surface surface);
    //private native  void stopVideo();
    //public static native void drawToSurface(Surface surface, int color);
    private native void stopVideo();
}

MainActivity.java

java 复制代码
package com.hisign.decodertsp;

import androidx.appcompat.app.AppCompatActivity;

import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.widget.Toast;

import com.blankj.utilcode.util.KeyboardUtils;
import com.blankj.utilcode.util.SPUtils;
import com.hisign.decodertsp.databinding.ActivityMainBinding;

import rtsp.ffmpeg.player.RtspPlayerView;

public class MainActivity extends AppCompatActivity {
    private final static String KEY_IP  = "KEY_IP";

    private ActivityMainBinding binding;

    private RtspPlayerView myRtsp;
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        binding = ActivityMainBinding.inflate(getLayoutInflater());
        setContentView(binding.getRoot());

        String ip = SPUtils.getInstance().getString(KEY_IP);
        binding.etIp.setText(ip);

        myRtsp = new RtspPlayerView(this,null,0);
        binding.rtspParent.addView(myRtsp);



        binding.btnStart.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {

                if(TextUtils.isEmpty(ip)){
                    Toast.makeText(MainActivity.this, "please input ip!", Toast.LENGTH_SHORT).show();
                    return;
                }

                SPUtils.getInstance().put(KEY_IP,ip);
                String url = "rtsp://admin:123456@"+ip+":554/H.264/ch1/main/av_stream";

                myRtsp.play(url);



                KeyboardUtils.hideSoftInput(MainActivity.this);
            }
        });



        binding.btnStop.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                myRtsp.stop();
            }
        });
    }


}

app的build.gradle

java 复制代码
plugins {
    id 'com.android.application'
}

android {
    compileSdk 32

    defaultConfig {
        applicationId "com.hisign.decodertsp"
        minSdk 23
        targetSdk 32
        versionCode 1
        versionName "1.0"
        ndkVersion "25.1.8937393"  // 指定 NDK 版本
        testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
        externalNativeBuild {
            cmake {
                cppFlags '-std=c++11'
            }
        }
        ndk {
            abiFilters 'arm64-v8a','armeabi-v7a'
            //abiFilters 'armeabi-v7a'
        }
    }

    buildTypes {
        release {
            minifyEnabled false
            proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
        }
    }
    compileOptions {
        sourceCompatibility JavaVersion.VERSION_1_8
        targetCompatibility JavaVersion.VERSION_1_8
    }
    externalNativeBuild {
        cmake {
            path file('src/main/cpp/CMakeLists.txt')
            version '3.18.1'
        }
    }
    buildFeatures {
        viewBinding true
    }
}

dependencies {
    implementation 'androidx.appcompat:appcompat:1.3.0'
    implementation 'com.google.android.material:material:1.4.0'
    implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
    testImplementation 'junit:junit:4.13.2'
    androidTestImplementation 'androidx.test.ext:junit:1.1.3'
    androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
    implementation 'com.blankj:utilcodex:1.31.1'
}

感谢:

复制代码
原文链接:https://blog.csdn.net/weixin_46999174/article/details/140531411

编译完成后,会得到so文件 路径: \app\build\intermediates\merged_native_libs\debug\out\lib

将so文件放在uniapp的示例项目中进行使用

build.gradle中添加:

java 复制代码
android{




   sourceSets{
        main{
            jniLibs.srcDirs = ["libs"]
        }
    }
}

编译完成,

路径:\uniplugin_component\build\outputs\aar 得到aar包

然后在项目中:

这样使用,就没问题了

相关推荐
牧杉-惊蛰12 分钟前
uniapp微信小程序css中background-image失效问题
css·微信小程序·uni-app
大胡子大叔3 小时前
webrtc-streamer视频流播放(rstp协议h264笔记)
笔记·webrtc·rtsp·webrtc-streamer
儿歌八万首9 小时前
uniapp 和原生插件交互
uni-app·交互
前端 贾公子18 小时前
在移动端使用 Tailwind CSS (uniapp)
前端·uni-app
源码_V_saaskw10 天前
宇鹿家政服务系统小程序ThinkPHP+UniApp
微信小程序·小程序·uni-app·微信公众平台
xw510 天前
支付宝小程序IDE版本迭代异常
uni-app·支付宝
!win !10 天前
支付宝小程序IDE版本迭代异常
uni-app·支付宝小程序
xw510 天前
抖音小程序支付错误码141211
uni-app·抖音小程序