投屏Sink端音频底层解码并用OpenSLES进行播放

发布时间 2023-03-31 23:55:53作者: 咸鱼Jay

一、代码分析

在公司项目中,音频解码及播放是把数据传到Java层进行解码播放的,其实这个步骤没有必要,完全可以在底层进行处理。

通过代码发现其实也做了在底层进行解码,那么为啥不直接使用底层解码播放呢,我们可以看看原先代码怎么做的:



代码中通过定义的宏DECODE_AUDIO_IN_JAVA来控制mAudioCodec对象是否创建,然后在通过mAudioCodec对象是否为null来控制音频数据是否传给Java层处理,代码中原来已经支持了在底层解码然后在传回上传使用AudioTrack进行播放,那我求改宏DECODE_AUDIO_IN_JAVA来让其在底层进行解码,运行后会发现播放的声音非常的卡顿。

二、解决办法

最终发现原来是在使用底层处理时播放的音频是数据大小传的不对,会导致播放的声音非常的卡顿。

解决办法就是将下面红框的修改成info.size就可以了。

三、底层播放音频

但是这样还是将音频的播放传给Java层进行播放。

我们可以通过使用OpenSLES来处理底层音频的播放

3.1 OpenSLRender类的实现

#ifndef _OPENSLRENDER_HEAD_
#define _OPENSLRENDER_HEAD_
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
 
namespace android{
    class OpenSLRender : public Thread {
    public:
        OpenSLRender(int64_t buffertime,int32_t bufferframes=5);
        ~OpenSLRender();
        bool init(int32_t chanNum,int rate);
        void stop();
        void setBufferTimes(int64_t buffertime);
        void queueInputBuffer(sp<ABuffer> data);
        void playerCallback();
    private:
        SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
        SLObjectItf bqPlayerObject;
        SLPlayItf bqPlayerPlay;
        SLObjectItf outputMixObject;
        SLObjectItf engineObject;
 
        List<sp<ABuffer>> mList;
        int64_t mBufferTimeUs;
        int32_t mBufferFrames;
        int64_t mLasPts;
        bool bFist;
        pthread_mutex_t startMutex;
        pthread_cond_t startCond;
 
        Mutex dataMutex;        // for data in/out on diffrent thread
        bool bRun;
        sp<ABuffer> mMuteData;
        int64_t mlastAudtime;
        int mPlayAudFrames;
        int mDropFrames;
        int32_t muteCounts;
        sp<ABuffer> mRenderData;
        int32_t mOverFlowContinuetimes;
    private:
        virtual bool threadLoop();
        sp<ABuffer> intervalOut(int gap);
        sp<ABuffer> dropToMaxBuffer(int gap);
        sp<ABuffer> dropAutoNums();
        sp<ABuffer> getNextBuffer();
        void destroy();
        static void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context);
        DISALLOW_EVIL_CONSTRUCTORS(OpenSLRender);
    };
}
#endif
//#define LOG_NDEBUG 0
#define LOG_TAG "OpenSLRender"
 
#include"OpenSLRender.h"
 
#define UNUSED(x) ((void)x)
#define AUD_DROP_THRESHOLD 5
namespace android{
    OpenSLRender::OpenSLRender(int64_t bufferTime,int32_t bufferFrames):
            mBufferTimeUs(bufferTime),
            mBufferFrames(bufferFrames),
            bFist(true),
            startMutex(PTHREAD_MUTEX_INITIALIZER),
            startCond(PTHREAD_COND_INITIALIZER),
            bRun(true),
            mRenderData(NULL),
            mMuteData(new ABuffer(2048)),
            muteCounts(0),
            mlastAudtime(0),
            mPlayAudFrames(0),
            mDropFrames(0),
            mOverFlowContinuetimes(0){
        memset(mMuteData->data(),0,mMuteData->size());
    }
    OpenSLRender::~OpenSLRender(){
        stop();
        requestExit();
        requestExitAndWait();
        //this.clear(); //sp<>.clear, this is not sp
    }
 
    bool OpenSLRender::init(int32_t chanNum,int rate){
        // engine interfaces
        SLEngineItf engineEngine;
 
        // output mix interfaces
        SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL;
 
        // aux effect on the output mix, used by the buffer queue player
        const SLEnvironmentalReverbSettings reverbSettings =
                SL_I3DL2_ENVIRONMENT_PRESET_DEFAULT;
 
        // buffer queue player interfaces
        SLresult result;
 
        // create engine
        result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
        CHECK(SL_RESULT_SUCCESS == result);
        (void)result;
 
        // realize the engine
        result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
        CHECK(SL_RESULT_SUCCESS == result);
        (void)result;
 
        // get the engine interface, which is needed in order to create other objects
        result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
        CHECK(SL_RESULT_SUCCESS == result);
        (void)result;
 
        // create output mix, with environmental reverb specified as a non-required interface
        const SLInterfaceID ids[1] = {SL_IID_ENVIRONMENTALREVERB};
        const SLboolean req[1] = {SL_BOOLEAN_FALSE};
        result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
        CHECK(SL_RESULT_SUCCESS == result);
        (void)result;
 
        // realize the output mix
        result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
        CHECK(SL_RESULT_SUCCESS == result);
        (void)result;
 
        // get the environmental reverb interface
        // this could fail if the environmental reverb effect is not available,
        // either because the feature is not present, excessive CPU load, or
        // the required MODIFY_AUDIO_SETTINGS permission was not requested and granted
        result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB,
                                                  &outputMixEnvironmentalReverb);
        if (SL_RESULT_SUCCESS == result) {
            result = (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(
                    outputMixEnvironmentalReverb, &reverbSettings);
            (void)result;
        }
        // ignore unsuccessful result codes for environmental reverb, as it is optional for this example
 
        {
            // configure audio source
            SLuint32 samplesPerSec = SL_SAMPLINGRATE_48;
            if(48000 == rate){
                samplesPerSec = SL_SAMPLINGRATE_48;
            }else if(44100 == rate){
                samplesPerSec = SL_SAMPLINGRATE_44_1;
            }
            SLuint32 audChan = chanNum;
            SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
            SLDataFormat_PCM format_pcm = {SL_DATAFORMAT_PCM,
                                           (audChan == 0) ? 2 : audChan,
                                           samplesPerSec,
                                           SL_PCMSAMPLEFORMAT_FIXED_16,
                                           SL_PCMSAMPLEFORMAT_FIXED_16,
                                           SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,//
                                           SL_BYTEORDER_LITTLEENDIAN};
            /*
             * Enable Fast Audio when possible:  once we set the same rate to be the native, fast audio path
             * will be triggered
             */
            SLDataSource audioSrc = {&loc_bufq, &format_pcm};
 
            // configure audio sink
            SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
            SLDataSink audioSnk = {&loc_outmix, NULL};
 
            /*
             * create audio player:
             *     fast audio does not support when SL_IID_EFFECTSEND is required, skip it
             *     for fast audio case
             */
            const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_VOLUME,/* SL_IID_EFFECTSEND,
                                    SL_IID_MUTESOLO,*/};
            const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE,
                    /*SL_BOOLEAN_TRUE,*/ };
 
            result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk,
                                                        2, ids, req);
            CHECK(SL_RESULT_SUCCESS == result);
            (void)result;
 
            // realize the player
            result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
            CHECK(SL_RESULT_SUCCESS == result);
            (void)result;
 
            // get the play interface
            result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
            CHECK(SL_RESULT_SUCCESS == result);
            (void)result;
 
            // get the buffer queue interface
            result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
                                                     &bqPlayerBufferQueue);
            CHECK(SL_RESULT_SUCCESS == result);
            (void)result;
            // register callback on the buffer queue
            result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, this);
            CHECK(SL_RESULT_SUCCESS == result);
            (void)result;
 
            // set the player's state to playing
            result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
            CHECK(SL_RESULT_SUCCESS == result);
            (void)result;
 
            status_t err = run("opensl buffering", ANDROID_PRIORITY_AUDIO);
            CHECK(err==OK);
            return true;
        }
    }
 
    void OpenSLRender:: destroy(){
        ALOGE("opeslRender destroy ![%s][%d]",__FUNCTION__,__LINE__);
        (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_STOPPED);
        // destroy buffer queue audio player object, and invalidate all associated interfaces
        if (bqPlayerObject != NULL) {
            (*bqPlayerObject)->Destroy(bqPlayerObject);
            bqPlayerObject = NULL;
            bqPlayerPlay = NULL;
            bqPlayerBufferQueue = NULL;
        }
 
        // destroy output mix object, and invalidate all associated interfaces
        if (outputMixObject != NULL) {
            (*outputMixObject)->Destroy(outputMixObject);
            outputMixObject = NULL;
        }
 
        // destroy engine object, and invalidate all associated interfaces
        if (engineObject != NULL) {
            (*engineObject)->Destroy(engineObject);
            engineObject = NULL;
        }
    }
    void OpenSLRender::stop(){
        // AutoMutex _l(dataMutex);
        ALOGE("OpenSLRender_stop:[%s%d]",__FUNCTION__,__LINE__);
        if(bRun==true){
            bRun=false;
            destroy();
        }
    }
//to support Adjustment
    void OpenSLRender::setBufferTimes(int64_t buffertime){
        AutoMutex _l(dataMutex);
        mBufferTimeUs = buffertime;
    }
    void OpenSLRender::queueInputBuffer(sp<ABuffer> data){
        //input buffer, becareful!!!!!!!
        AutoMutex _l(dataMutex);
        //to chek pts
        if(!mList.empty()){
            sp<ABuffer> max = *(--mList.end());
            int64_t dataPts=0,maxPts=0;
            CHECK(data->meta()->findInt64("timePts", &dataPts));
            CHECK(max->meta()->findInt64("timePts", &maxPts));
            if(dataPts <  maxPts){
                ALOGD("[%s%d] pts erro data:%ld list:%ld\n",__FUNCTION__,__LINE__,maxPts,dataPts);
                return;
            }
        }
#if ENABLE_STATISTICS
        ALOGD(COMMON_DEBUG," Audio in, %lld remain __%ld__ [%s%d]\n",ALooper::GetNowUs(), mList.size(),__FUNCTION__,__LINE__);
#endif
        mList.push_back(data);
        if(bFist){
            sp<ABuffer> min = *mList.begin();
            sp<ABuffer> max = *(--mList.end());
            int64_t minPts=0,maxPts=0;
            CHECK(min->meta()->findInt64("timePts", &minPts));
            CHECK(max->meta()->findInt64("timePts", &maxPts));
//            ALOGE("==minPts=%lld,maxPts:%lld,mBufferTimeUs:%lld,(maxPts - minPts)=%lld",minPts,maxPts,mBufferTimeUs,(maxPts - minPts));
            //if((maxPts - minPts > mBufferTimeUs)  || mList.size()>=mBufferFrames){
            if((maxPts - minPts > mBufferTimeUs/2)  || mList.size()>=mBufferFrames/2){
                //buffer over! go---------
                pthread_mutex_lock(&startMutex);
                pthread_cond_signal(&startCond);
                pthread_mutex_unlock(&startMutex);
            }
        }
    }
    void OpenSLRender::playerCallback(){
        AutoMutex _l(dataMutex);
        if(!bRun){
            return;
        }
        int64_t nowUs = ALooper::GetNowUs();
        if(!mList.empty()){
            sp<ABuffer> min = *mList.begin();
            sp<ABuffer> max = *(--mList.end());
            int64_t minPts=0,maxPts=0;
            CHECK(min->meta()->findInt64("timePts", &minPts));
            CHECK(max->meta()->findInt64("timePts", &maxPts));
            //if(maxPts - minPts > mBufferTimeUs -timeDuration){
            if(mList.size()>=mBufferFrames) {
                mOverFlowContinuetimes++;
            }else{
                mOverFlowContinuetimes = 0;
            }
            if(mOverFlowContinuetimes > AUD_DROP_THRESHOLD)
            {
                //"Break out"
                //Take one output to render       for every two buffers
                //data = intervalOut(2);
                //data = dropAutoNums();
                int flowFrames = mList.size() - mBufferFrames;
                if( flowFrames >= mBufferFrames){
                    //ALOGD(COMMON_DEBUG,"video jetterbuff dopallflows %d  [%s%d] mList.size():%ld \n",flowFrames,__FUNCTION__,__LINE__,mList.size());
                    sp<ABuffer> data = dropToMaxBuffer(flowFrames);
                    mRenderData = getNextBuffer();
                }else{
                    //"Break out"
                    //Take one output to render       for every two buffers
                    sp<ABuffer> data = dropToMaxBuffer(2);
                    mRenderData = getNextBuffer();
                }
                mOverFlowContinuetimes = 0;
            }else{
                //one by one
                mRenderData = getNextBuffer();
                mPlayAudFrames++;
            }
        }else{
            mRenderData = mMuteData;
            muteCounts++;
            mOverFlowContinuetimes = 0;
        }
        if(mRenderData ==NULL){
            //just give the mutex data
            mRenderData = mMuteData;
            muteCounts++;
        }
        SLresult result;
        //enqueue another buffer
        result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, mRenderData->data(), mRenderData->size());
        // the most likely other result is SL_RESULT_BUFFER_INSUFFICIENT,
        // which for this code example would indicate a programming error
        if (SL_RESULT_SUCCESS != result) {
        }
 
        if(!mlastAudtime)
        {
            mlastAudtime = nowUs;
        }
        if(nowUs - mlastAudtime >= 1000*1000)
        {
            ALOGE("playback(%d) droped(%d) muteCounts(%d) frames in one second,QSize:%d",mPlayAudFrames,mDropFrames,muteCounts,(int32_t)mList.size());
            mDropFrames = 0;
            mPlayAudFrames = 0;
            mlastAudtime = nowUs;
            muteCounts = 0;
        }
    }
 
    void OpenSLRender::bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context){
        UNUSED(bq);
        OpenSLRender * pRender =static_cast<OpenSLRender*>(context);
        if(pRender){
            pRender->playerCallback();
        }
    }
 
 
    sp<ABuffer> OpenSLRender::intervalOut(int gap){
        int count =0;
        sp<ABuffer> data = NULL;
        while( (data = getNextBuffer())!=NULL && ++count < gap){
            //ALOGD(COMMON_DEBUG," audio drop one [%s%d] remain mList.size():%ld \n",__FUNCTION__,__LINE__,mList.size());
        }
        return data;
    }
 
    sp<ABuffer> OpenSLRender::dropToMaxBuffer(int gap){
        sp<ABuffer> data = NULL;
        int count = 0;
        while( (data = getNextBuffer())!=NULL && count++ < gap){
            mDropFrames++;
            //ALOGD(COMMON_DEBUG," audio drop one [%s%d] remain mList.size():%ld \n",__FUNCTION__,__LINE__,mList.size());
        }
        return data;
    }
    sp<ABuffer> OpenSLRender::dropAutoNums(){
        sp<ABuffer> data = NULL;
        while( (data = getNextBuffer())!=NULL && muteCounts>0){
            muteCounts--;
            //ALOGD(COMMON_DEBUG," audio drop one [%s%d] remain mList.size():%ld \n",__FUNCTION__,__LINE__,mList.size());
        }
        return data;
    }
 
 
    sp<ABuffer> OpenSLRender::getNextBuffer(){
        if(!mList.empty()){
            sp<ABuffer> data = *mList.begin();
            mList.erase(mList.begin());
            return data;
        }
        return NULL;
    }
 
 
    bool OpenSLRender::threadLoop(){
        if(bFist){
            pthread_mutex_lock(&startMutex);
            pthread_cond_wait(&startCond,&startMutex);
            pthread_mutex_unlock(&startMutex);
            ALOGE("[%s%d]start out\n",__FUNCTION__,__LINE__);
            bFist = false;
        }
        //to start play
        playerCallback();
        return false;
    }
}

3.2 OpenSLRender类的使用

3.2.1 创建OpenSLRender对象并初始化

3.2.2 OpenSLRender的数据处理播放

3.2.3 OpenSLRender的停止