基于实时音视频技术的远程控制传输SDK的功能设计

一直以来从pc anywhere 到vnc 以及windows的自带远程工具,后来疫情加速了todesk的成功,占据了向日葵的远控市场,还有开源的rustdesk,硬件版的pikvm,以及久负盛名的云游戏sunshine moonlight。所有的远控工具都在追求极速,小码流,画质以及私密连接,这些努力在多年以后引来了巨大的行业技术的发展,特别是云游戏的现金诱惑,让这门技术变得越来越值钱。

经过多年的积累,在音视频的应用过程中,如何生成一套自己的sdk来适应这些新的快速变化的音视频应用场景,我做了一些工程化的尝试实践,并取得了一些成果。

流媒体的极速传输一定是非webrtc莫属,但是webrtc的传输又有很多Qos的设计,会影响画质和延迟,需要基于webrtc传输来实现自己的数据传输应用层协议,在延迟和质量间做取舍是一门工程学的艺术,比如工程实践中可以尽量采用单一IDR帧,在非必须情况下可以只传p帧以减少传输量,采集编码采用零拷贝以降低cpu消耗和延迟。当然这些实践需要耗费很多精力和时间去摸索,兴趣和正向激励都不可或缺,我也在路上不停的给自己打气。

我没有科班经历,所有的都是野路子,写的代码也不是那么的优雅,大部分都是我实践的一些经验,希望大佬们看到了不要笑话,在下会虚心学习各位的意见建议,下面是部分sdk的使用代码,可以实现信令,从远端拉流,本地发布并向远端传输流,支持多种流的协议,支持极速H265编码传输,可以快速的部署到windows, linux, android等平台设备,方便的集成sunshine的云游戏的生态,能够对等的进行音视频以及文件的互传互通,本地流管理方便的采用了订阅发布机制,可以将复杂的业务逻辑转换成简单的生产消费,包括级联也是可以的。在ipc监控,远程控制,群组通话等应用场景均可以实现简单的接入,甚至同时拥有这些功能,驻留后台服务既是应用也是边缘服务器,可以组网也可以p2p。下面的代码算是抛砖引玉吧,在大环境不好的当下,希望结识优秀的你共同学习,创造更多可能性

复制代码
//
// Created by xiang on 2023/7/14.
//

#include "gomediainterface.h"
#include "macro.h"
#include "Nalu.h"
extern "C" {
#include <libgomedia.h> // 根据实际情况包含生成的头文件
}

#include <boost/log/expressions.hpp>
#include "../logging.h"

static void *pullThread(void* self){
    gomediainterface* tself = (gomediainterface*)self;
    if(tself) {
        LOGD("pullThread %p", tself);
        LOGD("muri %s", tself->GetUri());
        //CallRtspClientStart(reinterpret_cast<void*>(tself->GetgoInstance()),tself->GetUri());
        CallRtspClientStart2(reinterpret_cast<void *>(tself->GetgoInstance()), tself->GetUri(),tself->GetChannel());
    }
    return 0;
}
static void *pullWebrtcThread(void *self){
    gomediainterface* tself = (gomediainterface*)self;
    if(tself) {
        tself->StartWebRTCPull();
    }
    return 0;
}
void OnSendPacket(void * callclass, uint8_t* data, int length,int mediatype,int channel,int inject,int duration) {
    if(callclass){
        gomediainterface *rtspclient=(gomediainterface*)callclass;
        if(data&&length>0) {
            rtspclient->onMediaDataReceive(data, length, mediatype, channel, inject, duration);
        }
    }
}

    extern int startkvmstream(char* displayname, int width, int height, int pixfmt, int fps, int gop, int ratekbps, int audiomuted, int playerid, int channel);
    extern void StopVideoStream(char* videopath, int playerid, int channel);
    extern void ReqIDR(int playerid, int channel);
    extern void AbsMouseMove(int playerid, int channel, int x, int y);
    extern void RelMouseMove(int playerid, int channel, int x, int y);
    extern void MouseUp(int playerid, int channel, int button);
    extern void MouseDown(int playerid, int channel, int button);
    extern void KeyUp(int playerid, int channel, int key,int modifiers);
    extern void KeyDown(int playerid, int channel, int key, int modifiers);
    extern void KeyTap(int playerid, int channel, int key,int modifiers);
    extern void MouseWheel(int playerid, int channel, int delta);
    extern void KeyProcess(int playerid, int channel, char* keystr, bool isKeyDown);
    extern void ClipBoardText(int playerid, int channel, char* textstr);
    extern void SwitchCursor(int playerid, int channel, int ndisplay);
    extern void *GetKVMVideoConfigInfo();
    // void OnKeyUp(int playerid, int channelid, int key, int modifiers);
void OnClipBoardText(int playerid, int channelid, char* textstr) {
        ClipBoardText(playerid, channelid,textstr);
}
void OnKeyProcess(int playerid, int channelid, char* keystr, int isKeyDown) {
        KeyProcess(playerid, channelid, keystr, isKeyDown==0?false:true);
}
void OnKeyUp(int playerid, int channelid, int key, int modifiers) {
        KeyUp(playerid, channelid, key, modifiers);
}
// void OnKeyDown(int playerid,int channelid,int key,int modifiers);
void OnKeyDown(int playerid, int channelid, int key, int modifiers) {
    KeyDown(playerid, channelid, key, modifiers);
}
void OnKeyTap(int playerid, int channelid, int key, int modifiers) {
    KeyTap(playerid, channelid, key, modifiers);
}
void OnAbsMouseMove(int playerid, int channel, int x, int y) {
    AbsMouseMove(playerid, channel, x, y);
}
void OnRelMouseMove(int playerid, int channel, int x, int y) {
    RelMouseMove(playerid, channel, x, y);
}
void OnMouseUp(int playerid, int channel, int button) {
    MouseUp(playerid, channel, button);
}
void OnMouseDown(int playerid, int channel, int button) {
    MouseDown(playerid, channel, button);
}
void OnMouseWheel(int playerid, int channelid, int delta) {
    MouseWheel(playerid, channelid, delta);
}
void OnSwitchCursor(int playerid, int channel,int ndisplay) {
    SwitchCursor(playerid, channel,ndisplay);
}
// void OnStartTransFile(char* filename) {
    
// }
void OnStartKVM(char* displayname, int width, int height, int pixfmt, int fps, int gop, int ratekbps, int audiomuted, int playerid, int channel) {
    LOGI("I go command from client to server start kvm %s %d/%d %s fps:%d gop:%d playerid:%d channelid:%d", displayname, width, height, (pixfmt == 0) ? "H264" : (pixfmt == 1) ? "H265" : "AV1",fps,gop, playerid, channel);
    startkvmstream(displayname, width, height, pixfmt, fps,gop,ratekbps, audiomuted,playerid, channel);
}
void OnStopKVM(char* displayname,int playerid, int channel) {
    LOGI("I go command from client to server stop kvm %s %d %d",displayname,playerid,channel);
    StopVideoStream(displayname,  playerid, channel);
}
void OnReqIDR(int playerid, int channel) {
    LOGI("I go command from client to server req idr  %d %d",playerid,channel);
    ReqIDR(  playerid, channel);
}
void AndroidLogD(char* msg) {
    // BOOST_LOG(info) << "gomedia msg "sv << msg;
    LOGD("go msg %s", msg);
}
//cgo回调获取rtsp音频参数
void OnAudioCodecConfig(void* callclass, unsigned char* data, int length,void * mpeg4audioconf, int mediatype,int channel) {
    LOGI("OnAudioCodecConfig %d %d %d", mediatype, length,channel);
//    dumphex("OnCodecConfig", data, length);
    MPEG4AudioConfig *mpeg4AudioConfig = (MPEG4AudioConfig *) mpeg4audioconf;
    LOGD("OnAudioCodecConfig SampleRate %d ChannelLayout %d ObjectType %d SampleRateIndex %d ChannelConfig %d ",
         mpeg4AudioConfig->SampleRate,
         mpeg4AudioConfig->ChannelLayout,
         mpeg4AudioConfig->ObjectType,
         mpeg4AudioConfig->SampleRateIndex,
         mpeg4AudioConfig->ChannelConfig);
    if(callclass){
        gomediainterface *rtspclient=(gomediainterface*)callclass;
        if(mediatype==av_AAC){ //aac
            rtspclient->SetConfBuf(data,length);
            rtspclient->SetAudioParameter(mpeg4AudioConfig->SampleRate,mpeg4AudioConfig->ChannelConfig);
        }

    }
}
// cgo回调获取rtsp视频参数
void OnVideoCodecConfig(void* callclass, void* configdata, int mediatype,int channel) {
    LOGI("OnVideoCodecConfig %d %d",mediatype,channel);
    VideoCodecData* codecData = (VideoCodecData*)configdata;
    LOGI("OnCodecConfig w/d %d:%d FPS %d ProfileIdc %d",codecData->Width,codecData->Height,codecData->FPS,codecData->ProfileIdc);
    if(callclass){
        gomediainterface *rtspclient=(gomediainterface*)callclass;
        if(mediatype==av_H264||mediatype==av_H265){
        // 将接收到的数据转换为兼容的结构体类型
            VideoCodecData* codecData = (VideoCodecData*)configdata;
            // 使用 codecData 中的字段进行处理
            rtspclient->SetVideoParameter(codecData->Width,codecData->Height,codecData->FPS,mediatype);
        }
    }
    // 在这里进行进一步处理...
}
void OnGoMediaStatus(void* callclass,int status,int channel) {
    LOGD("go msg OnGoMediaStatus status %d",status);
    if(callclass){
        gomediainterface *rtspclient=(gomediainterface*)callclass;
        auto statusset=rtspclient->GetMediaStatusCallBack();
        if(statusset){
            statusset(status,channel);
        }

    }

}
//内部断线超时关闭时回调,让上次做自动关闭动作
void OnVideoClose(void* callclass,int videoid){
    LOGD("go msg OnVideoClose videoid %d",videoid);
    if(callclass) {
        gomediainterface *rtspclient = (gomediainterface *) callclass;
        auto callb=rtspclient->GetOnVideoCloseCallBack();
        if(callb){
            callb(videoid);
        }
    }
}
void OnAudioClose(void* callclass,int cameraid){
    LOGD("go msg OnAudioClose videoid %d",cameraid);
    if(callclass) {
        gomediainterface *rtspclient = (gomediainterface *) callclass;
        auto callb=rtspclient->GetOnAudioCloseCallBack();
        if(callb){
            callb(cameraid);
        }
    }
}
std::mutex gomediamutex;
std::vector<gomediainterface*> g_GomediaInterfaces;
void _AddGlobleGoWebrtcInterface(gomediainterface *&go_interface){
    std::lock_guard<std::mutex> lock(gomediamutex);
    g_GomediaInterfaces.push_back(go_interface);
    LOGI("_AddGlobleGoWebrtcInterface  %d ",g_GomediaInterfaces.size());
}
void _ReleaseGlobleGoWebrtcInterface(){
    std::lock_guard<std::mutex> lock(gomediamutex);
    g_GomediaInterfaces.clear();

}
void OnFileDataReceived(void* callclass, int channel, int len, unsigned char* data){
        if (callclass){
            LOGD("on file data %d",len);
            //onFileData received        
        }
}
void OnHIDDataReceived(void* callclass, int channel, int len, unsigned char* data){
        if (callclass){
            LOGD("on hid data %d",len);
            //onHIDData received        
        }
}

//呼叫回调,做响铃提醒,并在app层调用流发布和应答开启对讲
void OnTalkCallIn(char *userid){

    LOGD("have %s call in \n",userid);
    const char *msg="just test";
    for (int i = 0; i < g_GomediaInterfaces.size(); i ++){
        auto interface=g_GomediaInterfaces[i];
        LOGD("g_GomediaInterfaces[%d]=%p",i,interface);
        if(interface){
        }
     }
//    char *oncallmsg="agree call in";
//    AgreeTalk((void*)oncallrtsp->GetgoInstance(),userid,2,1,oncallmsg);
}
gomediainterface::gomediainterface(int channel){
    goInstance = NewMediaImplementation();
    if(goInstance){
        LOGD("goInstance %lu",goInstance);
    }else{
        LOGE("NewMediaImplementation FAILED");
    }
    SetCallbackWrapper(reinterpret_cast<void*>(goInstance), reinterpret_cast<void*>(this),channel);
//    mChannel=channel;

}
gomediainterface::gomediainterface(const char *uri,int channel){
    if(mUri==nullptr){
        mUri=new char[strlen(uri)+1];
        if(mUri!=nullptr){
            strcpy(mUri,uri);
            LOGD("mUri %s",mUri);
        }

    }
    goInstance = NewMediaImplementation();
    if(goInstance){
        LOGD("goInstance %lu",goInstance);
    }else{
        LOGE("NewMediaImplementation FAILED");
    }
    SetCallbackWrapper(reinterpret_cast<void*>(goInstance), reinterpret_cast<void*>(this),channel);
    mChannel=channel;

}
//构造函数直接启动webrtc信令
gomediainterface::gomediainterface(const char *uri,int channel,char *mqttserver,char* sn,char *clientid,char * stunserver,char * turnserver ,char *turnservername,char * turnserverpassword,bool busewebrtc){
    if(mUri==nullptr){
        mUri=new char[strlen(uri)+1];
        if(mUri!=nullptr){
            strcpy(mUri,uri);
            LOGD("mUri %s",mUri);
        }

    }
    goInstance = NewMediaImplementation();
    if(goInstance)    LOGD("goInstance %lu",goInstance);
    SetCallbackWrapper(reinterpret_cast<void*>(goInstance), reinterpret_cast<void*>(this),channel);
    mChannel=channel;
    if(busewebrtc){
        SetWebRTCConfig(mqttserver, sn, clientid,
                        stunserver, turnserver, turnservername,
                        turnserverpassword);
    }
}
gomediainterface::~gomediainterface(){
    if(mUri!=nullptr){
        delete mUri;
        mUri = nullptr;
    }
    if(mServername) {
        delete mServername;
        mServername=nullptr;
    }
    if(mStreamname) {
        delete mStreamname;
        mStreamname=nullptr;
    }
    if(mTransmode) {
        delete mTransmode;
        mTransmode=nullptr;
    }
    if(opusdecoder){
        delete opusdecoder;
        opusdecoder=nullptr;
    }
    if(aacDecoder) {
        delete aacDecoder;
        aacDecoder = nullptr;
    }
#if USE_MIXER
    if(audiomixer) {
        delete audiomixer;
        audiomixer= nullptr;
    }
#endif

    if(goInstance){//销毁 goInstance
        RemoveMediaImplementation(reinterpret_cast<void*>(goInstance));
        goInstance=0;
    }
}
void gomediainterface::StartRtspClient()
{
    // pull_rtsp_client_thread=std::thread(&rtspClient::pullThread, this,this);

    pthread_create(&pull_rtsp_client_thread, 0, pullThread, (void*)this);
}
void gomediainterface::GetRtspConfig(int channel){
    LOGD("GetRtspConfig");
    GetMediaAudioExternData(reinterpret_cast<void*>(goInstance),channel);
}
void gomediainterface::StopRtspClient(int channel){
    LOGD("gomediainterface::StopRtspClient close ...");
    CallRtspClientStop(reinterpret_cast<void*>(goInstance),channel);
    if(pull_rtsp_client_thread){
        pthread_join(pull_rtsp_client_thread,NULL);
    }
    LOGD("gomediainterface::StopRtspClient closed");
    // pull_rtsp_client_thread.join();
}
int gomediainterface::PublishStream(char *streamname,int channel,int mediatype,int card,int device){
    int deviceid=-1;
    switch(mediatype) {
        case av_H264:
        case av_H265:
            deviceid = CallStartVideoStreamPublish((void *) (goInstance), streamname, channel);
            LOGD("\ngomediainterface PublishStream deviceid is %d\n", deviceid);
            return deviceid;
        case av_OPUS:
            deviceid=CallStartAudioStreamPublish((void*)(goInstance),streamname,card,device,channel);
            LOGD("\ngomediainterface PublishStream deviceid is %d\n",deviceid);
            return deviceid;
        default:
            break;
    }
    return deviceid;
//        mVideodeviceID=deviceid;
}
//启动从节点servername拉流streamname,生成级联拉流并生成本地流供本地处理和其他节点共享
// 采用transmode "webrtc_trans" 或者"datachannel_trans"传输模式 H265目前支持支datachannel_trans,
// bOnDataCall表示允许回调供渲染,不回调数据则只做数据级联生成本地流
void gomediainterface::SetWebRTCPullStream(char *servername, char *streamname,char * transmode,bool bIsH265,int bUseVideo,int bUseAudio,int bOnDataCall,int signalid,int channel){
    if(mServername) {
        delete mServername;
        mServername=nullptr;
    }
    if(mStreamname) {
        delete mStreamname;
        mStreamname=nullptr;
    }
    if(mTransmode) {
        delete mTransmode;
        mTransmode=nullptr;
    }
    mServername=new char(strlen(servername)+1);
    strcpy(mServername,servername);
    mStreamname=new char(strlen(streamname)+1);
    strcpy(mStreamname,streamname);
    mTransmode=new char(strlen(transmode)+1);
    strcpy(mTransmode,transmode);
    mSignalid=signalid;
    mbIsH265=bIsH265;
    mbUseVideo=bUseVideo;
    mbUseAudio=bUseAudio;
    mbOnDataCall=bOnDataCall;
    mChannel=channel;
}
void gomediainterface::StartWebRTCPullStream() {
    pthread_create(&pull_webrtc_client_thread, 0, pullWebrtcThread, (void*)this);
}
void gomediainterface::StartWebRTCPull() {
    LOGD("StartWebRTCPull %s %s %s %d %d %d %d %d %d\n",mServername,mStreamname,mTransmode,mbIsH265,mbUseVideo,mbUseAudio,mbOnDataCall,mChannel,mSignalid);
    if(mbIsH265) {
        CallStartVideoStreamPull((void *) (goInstance),mSignalid, mServername, mStreamname,"audio/opus", "video/H265", "datachannel_trans",mbUseVideo, mbUseAudio, mbOnDataCall,mChannel);
    }else{
        CallStartVideoStreamPull((void *) (goInstance),mSignalid, mServername, mStreamname, "audio/opus", "video/H264",mTransmode, mbUseVideo, mbUseAudio, mbOnDataCall,mChannel);
    }
}
//设置webrtc参数并启动mqtt信令代理
void gomediainterface::SetWebRTCConfig(char *mqttserver , char *sn , char *clientid, char *stunserver, char *turnserver, char *turnservername, char *turnserverpassword ,int signalid){
    goWebrtcConfig = GetMediaWebRTCConfig((void *) (goInstance),signalid);
    if(goWebrtcConfig) {

        LOGD("GetKVMVideoConfigInfo\r\n");
        auto config=(VideoConfigInfo*)GetKVMVideoConfigInfo();
        if(config){//设置kvm的视频采集参数,用于注册
         BOOST_LOG(info) << "videoconfig is width: "sv << config->width << "height :"sv <<config->height<< "framerate:"sv <<config->framerate << "videoformate:"sv <<config->videoFormat; 

  
          SetVideoConfigInfo((void *) goWebrtcConfig,config);   
        }
        SetWebrtcConfig((void *) goWebrtcConfig, mqttserver, sn , clientid , stunserver , turnserver , turnservername , turnserverpassword );
        CallStartWebrtc((void *) (goInstance),signalid);
    }
}
//停止信令
void gomediainterface::StopWebRTCSignal(int signalid){
    CallStopWebrtc((void *) (goInstance),signalid);
}
void gomediainterface::StopWebRTCPullStream(char *streamname){
    LOGD("gomediainterface::StopWebRTCPullStream  CallStopWebrtcStream close %s...",streamname);
    CallStopWebrtcStream(reinterpret_cast<void*>(goInstance),streamname);
}
void gomediainterface::StopWebRTCPullStream(int channel){
    LOGD("gomediainterface::StopWebRTCPullStream close %d...",channel);
//    CallStopWebrtcStream(reinterpret_cast<void*>(goInstance),channel);
}
void gomediainterface::StopWebRTCClient(){
    if(pull_webrtc_client_thread){
        pthread_join(pull_webrtc_client_thread,NULL);
    }
    LOGD("gomediainterface::StopWebRTCClient closed");
    // pull_rtsp_client_thread.join();
}
//此处回调只有rtsp拉流或者webrtc拉流,增加一个逻辑变量标识是否再注入
void gomediainterface::onMediaDataReceive( uint8_t* data, int length,int mediatype,int channel,int binject,int duration){
//    LOGD("onMediaDataReceive channel:%d mediatype:%d length %d",mChannel , mediatype ,length);
//    Nalu::dumphex("onMediaDataReceive", data, length);
    if(mMediacallfunc)   mMediacallfunc((void *) data, (size_t) length,mediatype,channel); //回调
    if(binject){//向webrtc 流中注入数据,内部会判断是否有虚拟摄像头或音频节点,只有本地的设备节点或者rtsp拉流到本地才发布生成内部虚拟节点,webrtc拉流不会生成虚拟节点,只有流管理,所以webrtc拉流出来注入式直接返回
        //目前还没有其他办法来判断过滤是否注入数据,注入音频数据需要转换成opus格式,如果回调的数据类型为opus类型则需要处理解码或者转aac编码
        if(mediatype==av_AAC){

        }else {
            tInfo videoinfo;
            videoinfo.a = data;
            // void* a;
            // unsigned long long  int timestamp;
            videoinfo.size = length;
            // int  pcmsize;
            videoinfo.mediatype = mediatype;
            videoinfo.devid = channel;
            CallSendData((void *) goInstance, &videoinfo);
        }
    }
}
void gomediainterface::SendToWebrtc(void * data,int length,int mediatype,int channel){
    if(mediatype==av_AAC){
     //transcode aac to opus
        trancodeAAC2OPUS(data, length,channel);
    }else {
        tInfo videoinfo;
        videoinfo.a = data;
        // void* a;
        // unsigned long long  int timestamp;
        videoinfo.size = length;
        // int  pcmsize;
        videoinfo.mediatype = mediatype;
        videoinfo.devid = channel;
        CallSendData((void *) goInstance, &videoinfo);
    }
}

void gomediainterface::trancodeAAC2OPUS(void * databuffer,int len,int channel){
    void *pData=databuffer;
    int outSize = len;
    int encsize=len;
    if(!aacDecoder){
        // GetMediaAudioExternData(reinterpret_cast<void*>(goInstance),channel);
        // char filename[256];
        // snprintf(filename,256,"%s.mp4",mSN);
        // StartMP4Recorder(filename);

        aacDecoder=new AACDecoder();
        if (!aacDecoder) {
            return ;
        }
        // int dsi[]={0x11,0x90};
        // printf("")
        // MP4E_set_dsi(mux, audio_track_id,  maacInfo!=NULL?maacInfo->confBuf:(void*)dsi, maacInfo!=NULL?maacInfo->confSize:2);
        // if (aacDecoder->initDecoder(maacInfo!=NULL?maacInfo->confBuf:(void*)dsi,maacInfo!=NULL?maacInfo->confSize:2)) {
        if (aacDecoder->initDecoder(mConfBuf,mConfBufLen)) {
//            pOut=  (char*)&OutputBuf[audioDataOutIndex][0];
            char* pOut=(char*)pData;
            if(aacDecoder->Decode((char *)(databuffer), static_cast<UINT>(len),pOut,outSize)){
//                pData=(void*)p
            }

//            if(++audioDataOutIndex>=MAX_AUDIO_QUEUE_SIZE) audioDataOutIndex=0;
        }else{
            return;
        }
//        if(mBPublish){
//            mAudioDeviceID=CallStartAudioStreamPublish((void*)(goInstance),(char*)mStreamath,0,0,mChannel);
//        }

    }else{
        char* pOut=(char*)pData;
//        pOut=(char *)&OutputBuf[audioDataOutIndex][0];
        aacDecoder->Decode((char *)(databuffer), static_cast<UINT>(len),pOut,outSize);
//        if(++audioDataOutIndex>=MAX_AUDIO_QUEUE_SIZE) audioDataOutIndex=0;
    }
//    int outSize = outsize;
#if USE_MIXER
    if(audiomixer== nullptr){
        audiomixer=new AudioMixer();
        if(audiomixer) {
            // 添加输入流
            // 参数根据实际pcm数据设置
//            printf("addAudioInput %d %d\n",mpeg4AudioConfig.SampleRate, mpeg4AudioConfig.ChannelLayout);
            audiomixer->addAudioInput(0, mAudioRate, mAudioChannels, 16, AV_SAMPLE_FMT_S16);
//                audiomixer->addAudioInput(1, rate, channels, 16, AV_SAMPLE_FMT_S16);
            // 添加输出流
            audiomixer->addAudioOutput(48000, 2, 16, AV_SAMPLE_FMT_S16);
            // tempchannels = 2;
            // 初始化
            if (audiomixer->init("longest") < 0) {
                printf("audioMix.init() failed!");
            }else{
//                    if ((audiomixer->addFrame(0, (uint8_t*)buffer, size) < 0) &&(audiomixer->addFrame(1, NULL, 0) < 0)) {
//                printf("addFrame %p %d\n",pOut,outsize);
                if(audiomixer->addFrame(0, (uint8_t*)pData, outSize) < 0){
                    printf("audioMix.addFrame() failed!");
                    return;

                }else {
                    while ((outSize = audiomixer->getFrame((uint8_t*)pData, 10240)) > 0) {
//                            pData = &outBuf[0];
//                        outsize=outSize;
                        printf("audiomixer->getFrame %d \n",outSize);
                        //            fwrite(outBuf, 1, outSize, fileOut);
                    }
                }
            }

        }
    }else{
//            if ((audiomixer->addFrame(0, (uint8_t*)buffer, size) < 0) && (audiomixer->addFrame(1, NULL, 0) < 0)) {
//        printf("addFrame %p %d\n",pOut,outsize);
        if(audiomixer->addFrame(0, (uint8_t*)pData, outSize) < 0){
            printf("audioMix.addFrame() failed!");
            return;


        }else {
            while ((outSize = audiomixer->getFrame((uint8_t*)pData, 10240)) > 0) {
//                    pData = &outBuf[0];
//                outsize=outSize;
                printf("audiomixer->getFrame %d \n",outSize);
                //            fwrite(outBuf, 1, outSize, fileOut);
            }
        }
    }
#endif
    if(!opusdecoder){
        // GetMediaAudioExternData(reinterpret_cast<void*>(goInstance),channel);
        opusdecoder = new OpusCodec;
//        auto audioconfig=GetMPEG4AudioConfig();
        // printf("audioconfig %d %d\n",audioconfig->SampleRate, audioconfig->ChannelLayout);
        // opusdecoder->initEncoder(audioconfig->SampleRate, audioconfig->ChannelLayout, 0);
        opusdecoder->initEncoder(48000, 2, 0);
        opus_frame_callback_=std::bind(&gomediainterface::SendToWebrtc, this, std::placeholders::_1,
                                       std::placeholders::_2, std::placeholders::_3,
                                       std::placeholders::_4);
        opusdecoder->encode((const short *) pData, outSize,channel, opus_frame_callback_);

    }else{
        opusdecoder->encode((const short *) pData, outSize,channel, opus_frame_callback_);
    }
}
相关推荐
DogDaoDao19 小时前
WebRTC音视频编码模块深度解析:从编解码器到自适应码率控制(2025技术实践)
音视频·webrtc·实时音视频·视频编解码·h264·vp9·svc编码
猫猫的小茶馆20 小时前
【STM32】HAL库中的实现(四):RTC (实时时钟)
stm32·单片机·嵌入式硬件·mcu·51单片机·实时音视频·pcb工艺
DogDaoDao3 天前
WebRTC前处理模块技术详解:音频3A处理与视频优化实践
音视频·webrtc·实时音视频·视频增强·视频前处理·3a算法·音频前处理
ZEGO即构开发者3 天前
如何用即构ZEGO SDK和uni-app开发一款直播带货应用?
uniapp·实时音视频·直播·电商直播
jingjing~4 天前
STM32 使用 RTC 实现实时时钟功能
stm32·嵌入式硬件·实时音视频
qq10549415164 天前
江协科技STM32 12-2 BKP备份寄存器&RTC实时时钟
科技·stm32·实时音视频
朱古力(音视频开发)5 天前
NDI开发指南
fpga开发·音视频·实时音视频·视频编解码·流媒体
芯片小熊5 天前
RTC时钟详解
单片机·嵌入式硬件·实时音视频·时钟·rtc