Android 4.0 Ice Cream Sandwich MediaFramework

Android 4.0 Ice Cream Sandwich MediaFramework (1)

Record Android 4.0 media framework tracingabout Stagefright and NuPlayer

記錄一下關於Android4.0中的多媒體框架運作流程



1.In APK, when we program to playmedia(Audio/Video) file
            mMediaPlayer =new MediaPlayer();
           mMediaPlayer.setDataSource(path);
           mMediaPlayer.prepare();
           mMediaPlayer.start();

2.In Media Framework :android_src\framework\basemedia\libmediaplayerservice\MediaPlayerService.cpp

Here, the MediaPlayerService, is the interfacewith MediaPlayer in Android SDK

extmap FILE_EXTS [] =  {
        {".mid",SONIVOX_PLAYER},
        {".midi",SONIVOX_PLAYER},
        {".smf",SONIVOX_PLAYER},
        {".xmf",SONIVOX_PLAYER},
        {".imy",SONIVOX_PLAYER},
        {".rtttl",SONIVOX_PLAYER},
        {".rtx",SONIVOX_PLAYER},
        {".ota",SONIVOX_PLAYER},
};


status_t MediaPlayerService::Client::setDataSource(
        const char *url, constKeyedVector *headers)
{


        player_type playerType = getPlayerType(url);
        LOGV("player type =%d", playerType);


        // create the right type ofplayer
        sp p = createPlayer(playerType);


        mStatus = p->setDataSource(url,headers);


}

We can see that the method"setDataSource()" here want to get a player by using "getPlayerType(url)"

player_type getPlayerType(const char* url)
{


    if (!strncasecmp("http://", url, 7)
            ||!strncasecmp("https://", url, 8)) {
        size_t len = strlen(url);
        if (len >= 5 &&!strcasecmp(".m3u8", &url[len - 5])) {
            returnNU_PLAYER;
        }


        if(strstr(url,"m3u8")) {
            returnNU_PLAYER;
        }
    }


    if (!strncasecmp("rtsp://", url,7)) {
        return NU_PLAYER;
    }


int lenURL = strlen(url);
    for (int i = 0; i < NELEM(FILE_EXTS); ++i){
        int len =strlen(FILE_EXTS[i].extension);
        int start = lenURL - len;
        if (start > 0) {
            if(!strncasecmp(url + start, FILE_EXTS[i].extension, len)) {
                returnFILE_EXTS[i].playertype;
            }
        }
    }


    return getDefaultPlayerType();
}



static player_type getDefaultPlayerType() {
    return STAGEFRIGHT_PLAYER;
}




static sp createPlayer(player_type playerType, void*cookie,
        notify_callback_f notifyFunc)
{
    sp p;
    switch (playerType) {
        case SONIVOX_PLAYER:
            LOGD("create MidiFile");
            p = newMidiFile();
            break;
        caseSTAGEFRIGHT_PLAYER:
            LOGD("create StagefrightPlayer");
            p = newStagefrightPlayer;
            break;
        case NU_PLAYER:
            LOGD("create NuPlayer");
            p = newNuPlayerDriver;
            break;
        case TEST_PLAYER:
            LOGD("CreateTest Player stub");
            p = newTestPlayerStub();
            break;
        default:
           LOGD("Unknown player type: %d", playerType);
            return NULL;
    }

In Media Framework:android_src\frameworks\base\include\media\MediaPlayerInterface.h

enum player_type {
    PV_PLAYER = 1,//legacy of OpenCORE
    SONIVOX_PLAYER = 2,
    STAGEFRIGHT_PLAYER = 3,
    NU_PLAYER = 4,
    // Test players are available only in the'test' and 'eng' builds.
    // The shared library with the test player ispassed passed as an
    // argument to the 'test:' url in thesetDataSource call.
    TEST_PLAYER = 5,
};




There are three player to handle the URL:
1.NuPlayer - If URL start with http/https andcontains m3u8, or start with rtsp
2.SONIVOX_PLAYER - if the file extension isone of{".mid",".midi",".smf",".xmf",".imy",".rtttl",".rtx",".ota"}
3.StagefrightPlayer - the remaining types

In Android 2.3, there are onlyStagefrightPlayer and SoniVox Player

NuPlayer first appears in android 3.x version,used for Apple's HTTP Live Streaming standard

Let focus on StagefrightPlayer and NuPlayer

Start from Stagefright

---===Stagefright:===---


In Media Framework:android_src\framework\base\media\libmediaplayerservice\StagefrightPlayer.cpp

StagefrightPlayer::StagefrightPlayer()
    : mPlayer(new AwesomePlayer) {
    LOGV("StagefrightPlayer");


    mPlayer->setListener(this);
}

The constructor of StagefrightPlayer willfirst new AwesomePlayer, store as mPlayer

In Media Framework:android_src\framework\base\media\libstagefright\AwesomePlayer.cpp

AwesomePlayer::AwesomePlayer()
      mTimeSource(NULL),
      mAudioPlayer(NULL),
      mVideoBuffer(NULL),
      mLastVideoTimeUs(-1),
 {


    mVideoEvent = new AwesomeEvent(this,&AwesomePlayer::onVideoEvent);
    mVideoEventPending = false;
    mStreamDoneEvent = new AwesomeEvent(this,&AwesomePlayer::onStreamDone);
    mStreamDoneEventPending = false;
    mBufferingEvent = new AwesomeEvent(this,&AwesomePlayer::onBufferingUpdate);
    mBufferingEventPending = false;
    mVideoLagEvent = new AwesomeEvent(this,&AwesomePlayer::onVideoLagUpdate);
    mVideoEventPending = false;
    mCheckAudioStatusEvent = newAwesomeEvent(this, &AwesomePlayer::onCheckAudioStatus);


}

In constructor of AwesomePlayer, someAwesomeEvent will be registered, those events will be triggered at some propertime by TimedEvenQueue. It's why the behavior of AwesomePlayer called"Event Driven"

struct AwesomeEvent : public TimedEventQueue::Event {
    AwesomeEvent(
            AwesomePlayer*player,
            void(AwesomePlayer::*method)())
        : mPlayer(player),
          mMethod(method) {
    }
}

In Media Framework:android_src\framework\base\media\libstagefright\include\TimedEventQueue.h

struct TimedEventQueue {
void start();


    // Stop executing the event loop, if flush isfalse, any pending
    // events are discarded, otherwise the queuewill stop (and this call
    // return) once all pending events have beenhandled.
    void stop(bool flush = false);


    // Posts an event to the front of the queue(after all events that
    // have previously been posted to the frontbut before timed events).
    event_id postEvent(const sp &event);


    event_id postEventToBack(const sp &event);


    // It is an error to post an event with anegative delay.
    event_id postEventWithDelay(const sp &event,int64_t delay_us);


    // If the event is to be posted at a timethat has already passed,
    // it will fire as soon as possible.
    event_id postTimedEvent(const sp &event,int64_t realtime_us);


    // Returns true iff event is currently in thequeue and has been
    // successfully cancelled. In this case theevent will have been
    // removed from the queue and won't fire.
    bool cancelEvent(event_id id);
    static int64_t getRealTimeUs();


}

Continue to setDataSource fromStagefrightPlayer, note that now mPlayer is an instance of AwesomePlayer

In Media Framework:android_src\framework\base\media\libmediaplayerservice\StagefrightPlayer.cpp

status_t StagefrightPlayer::setDataSource(
        const char *url, constKeyedVector *headers) {
    return mPlayer->setDataSource(url,headers);
}

In Media Framework:android_src\framework\base\media\libstagefright\AwesomePlayer.cpp

status_t AwesomePlayer::setDataSource(
        int fd, int64_t offset, int64_tlength) {
  
    sp dataSource = new FileSource(fd,offset, length);
    return setDataSource_l(dataSource);
}


status_t AwesomePlayer::setDataSource_l(
        const sp &dataSource){
    sp extractor = MediaExtractor::Create(dataSource);


    return setDataSource_l(extractor);
}

The MediaExtractor will analyze the content ofmedia file and dispatch the data source to the proper extractor for advancedanalysis.

In Media Framework:android_src\framework\base\media\libstagefright\MediaExtractor.cpp 

sp MediaExtractor::Create(
        const sp &source,const char *mime) {


MediaExtractor *ret = NULL;
    if (!strcasecmp(mime,MEDIA_MIMETYPE_CONTAINER_MPEG4)
            ||!strcasecmp(mime, "audio/mp4")) {
        ret = newMPEG4Extractor(source);
    } else if (!strcasecmp(mime,MEDIA_MIMETYPE_AUDIO_MPEG)) {
        ret = new MP3Extractor(source,meta);
    } else if (!strcasecmp(mime,MEDIA_MIMETYPE_AUDIO_AMR_NB)
            ||!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
        ret = new AMRExtractor(source);
    } else if (!strcasecmp(mime,MEDIA_MIMETYPE_AUDIO_FLAC)) {
        ret = newFLACExtractor(source);
    } else if (!strcasecmp(mime,MEDIA_MIMETYPE_CONTAINER_WAV)) {
        ret = new WAVExtractor(source);
    } else if (!strcasecmp(mime,MEDIA_MIMETYPE_CONTAINER_OGG)) {
        ret = new OggExtractor(source);
    } else if (!strcasecmp(mime,MEDIA_MIMETYPE_CONTAINER_MATROSKA)) {
        ret = newMatroskaExtractor(source);
    } else if (!strcasecmp(mime,MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
        ret = newMPEG2TSExtractor(source);
    } else if (!strcasecmp(mime,MEDIA_MIMETYPE_CONTAINER_WVM)) {
        ret = new WVMExtractor(source);
    } else if (!strcasecmp(mime,MEDIA_MIMETYPE_AUDIO_AAC_ADTS)) {
        ret = new AACExtractor(source);
    } else if (!strcasecmp(mime,MEDIA_MIMETYPE_CONTAINER_MPEG2PS)) {
        ret = newMPEG2PSExtractor(source);
    }


    return ret;
}

Take an example of H.264 video with MP4container, MPEG4Extractor will be newed

In Media Framework:android_src\framework\base\media\libstagefright\MPEG4Extractor.cpp 

MPEG4Extractor::MPEG4Extractor(const sp &source)
    : mDataSource(source),
      mHasVideo(false),
      mFileMetaData(new MetaData),
        {
}

In Media Framework:android_src\framework\base\media\libstagefright\AwesomePlayer.cpp 

status_t AwesomePlayer::setDataSource_l(const sp &extractor){


 for (size_t i = 0; i countTracks(); ++i) {
        sp meta =extractor->getTrackMetaData(i);
        if (!haveVideo &&!strncasecmp(mime.string(), "video/", 6)) {
            setVideoSource(extractor->getTrack(i));
            haveVideo = true;
        else if (!haveAudio &&!strncasecmp(mime.string(), "audio/", 6)) {
            setAudioSource(extractor->getTrack(i));
            haveAudio = true;


return OK;
}

In above code, AwesomePlayer use MPEG4Extractorto split the Audio and Video tracks

In Media Framework:android_src\framework\base\media\libstagefright\MPEG4Extractor.cpp 

sp MPEG4Extractor::getTrack(size_t index) {
    return new MPEG4Source(
            track->meta,mDataSource, track->timescale, track->sampleTable);
}


MPEG4Source::MPEG4Source(
        const sp &format,
        const sp &dataSource,
)
    : mFormat(format),
      mDataSource(dataSource),
      mIsAVC(false),
      mNALLengthSize(0),
      mWantsNALFragments(false),
      mSrcBuffer(NULL) {
    const char *mime;
    mIsAVC = !strcasecmp(mime,MEDIA_MIMETYPE_VIDEO_AVC);
}

The Audio and Video tracks then wrapped asMPEG4Source, returned to AwesomePlayer

In Media Framework:android_src\framework\base\media\libstagefright\AwesomePlayer.cpp 

void AwesomePlayer::setVideoSource(sp source) {
    mVideoTrack = source;
}


void AwesomePlayer::setAudioSource(sp source) {
    mAudioTrack = source;
}

After setVideoSource and setAudioSource, nowMediaPlayerService will allocate the SurfaceTexture for future video rendering(connect to surfaceflinger)

In Media Framework:android_src\framework\basemedia\libmediaplayerservice\MediaPlayerService.cpp

status_t MediaPlayerService::Client::setVideoSurfaceTexture(
        const sp& surfaceTexture)
{
    LOGV("[%d]setVideoSurfaceTexture(%p)", mConnId, surfaceTexture.get());


    sp p = getPlayer();


    sp anw;


    if (surfaceTexture != NULL) {
        anw = newSurfaceTextureClient(surfaceTexture);
        status_t err =native_window_api_connect(anw.get(),
               NATIVE_WINDOW_API_MEDIA);
    }


    status_t err =p->setVideoSurfaceTexture(surfaceTexture);


    return err;
}

 

 

Android 4.0 Ice Cream Sandwich Media Framework (2)

 

Continued (2)

 

---===Stagefright===---

 

 

 

In part 1, we trace the first two line in APK, now continue thefollowing:

 

            mMediaPlayer =new MediaPlayer();

           mMediaPlayer.setDataSource(path);

           mMediaPlayer.prepare();

           mMediaPlayer.start();

 

the mMediaPlayer.prepare() will call into prepareAsync ofStagefright

 

In Media Framework :android_src\framework\base\media\libmediaplayerservice\StagefrightPlayer.cpp

 

status_t StagefrightPlayer::prepareAsync() {

    returnmPlayer->prepareAsync();

}

 

In Media Framework :android_src\framework\base\media\libstagefright\AwesomePlayer.cpp  

 

status_t AwesomePlayer::prepareAsync_l() {

 

 

    mQueue.start();

    mAsyncPrepareEvent =new AwesomeEvent(this, &AwesomePlayer::onPrepareAsyncEvent);

    mQueue.postEvent(mAsyncPrepareEvent);

    return OK;

 

 

}

 

Here the mQueue is the instance of TimedEventQueue, used fortriggering some event.

 

In prepareAsync_l(), onPrepareAsyncEvent is registered andhandled by mQueue

 

 

void AwesomePlayer::onPrepareAsyncEvent() {

 

 

    if (mVideoTrack !=NULL && mVideoSource == NULL) {

        status_t err =initVideoDecoder();

    }

 

 

    if (mAudioTrack !=NULL && mAudioSource == NULL) {

        status_t err =initAudioDecoder();

    }

 

   modifyFlags(PREPARING_CONNECTED, SET);

 

 

    if (isStreamingHTTP()){

       postBufferingEvent_l();

    } else {

       finishAsyncPrepare_l();

    }

}

 

Recall in Part 1, the MediaExtractor split Audio/Video intomAudioTrace/mVideoTrack,

 

So it's time to instantiate the audio/video codecs

 

status_t AwesomePlayer::initVideoDecoder(uint32_t flags) {

 

 

mVideoSource = OMXCodec::Create(

           mClient.interface(), mVideoTrack->getFormat(),

            false, //createEncoder

            mVideoTrack,

            NULL, flags,USE_SURFACE_ALLOC ? mNativeWindow : NULL);

 

 

if (mVideoSource != NULL) {

status_t err = mVideoSource->start();

return mVideoSource != NULL ? OK : UNKNOWN_ERROR;

 

 

}

 

Here, AwesomePlayer use OMXCodec to find a proper codec forAudio/Video.(OMX=OpenMAX)

 

Following is all the codecs in Stagefright can be found.

 

In Media Framework:android_src\framework\base\media\libstagefright\OMXCodec.cpp  

 

static const CodecInfo kDecoderInfo[] = {

    {MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },

    { MEDIA_MIMETYPE_AUDIO_MPEG,"OMX.google.mp3.decoder" },

    {MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, "OMX.Nvidia.mp2.decoder" },

    {MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.google.amrnb.decoder" },

    {MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" },

    { MEDIA_MIMETYPE_AUDIO_AMR_WB,"OMX.google.amrwb.decoder" },

    {MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" },

    {MEDIA_MIMETYPE_AUDIO_AAC, "OMX.google.aac.decoder" },

    {MEDIA_MIMETYPE_AUDIO_G711_ALAW, "OMX.google.g711.alaw.decoder" },

    { MEDIA_MIMETYPE_AUDIO_G711_MLAW,"OMX.google.g711.mlaw.decoder" },

    {MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.DUCATI1.VIDEO.DECODER" },

    {MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.decode" },

    {MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" },

    {MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" },

    {MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" },

    {MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Decoder" },

    {MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.google.mpeg4.decoder" },

    {MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.DUCATI1.VIDEO.DECODER" },

    {MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.decode" },

    {MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" },

    {MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" },

    {MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Decoder" },

    {MEDIA_MIMETYPE_VIDEO_H263, "OMX.google.h263.decoder" },

    {MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.DUCATI1.VIDEO.DECODER" },

    {MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.decode" },

    {MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" },

    {MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" },

    {MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" },

    {MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Decoder" },

    { MEDIA_MIMETYPE_VIDEO_AVC,"OMX.google.h264.decoder" },

    {MEDIA_MIMETYPE_VIDEO_AVC, "OMX.google.avc.decoder" },

    {MEDIA_MIMETYPE_AUDIO_VORBIS, "OMX.google.vorbis.decoder" },

    {MEDIA_MIMETYPE_VIDEO_VPX, "OMX.google.vpx.decoder" },

    { MEDIA_MIMETYPE_VIDEO_MPEG2,"OMX.Nvidia.mpeg2v.decode" },

};

 

Continue to see what the OMXCodec::Create do

 

sp OMXCodec::Create(

        const sp &omx,

        const sp&meta, bool createEncoder,

        const sp&source,

        const char*matchComponentName,

        uint32_t flags,

        const sp&nativeWindow) {

 

 

    Vector matchingCodecs;

    findMatchingCodecs(

            mime,createEncoder, matchComponentName, flags, &matchingCodecs);

 

 

    for (size_t i = 0; i< matchingCodecs.size(); ++i) {

 

 

        const char*componentNameBase = matchingCodecs[i].string();

        const char*componentName = componentNameBase;

 

 

       LOGV("Attempting to allocate OMX node '%s'", componentName);

 

 

        status_t err =omx->allocateNode(componentName, observer, &node);

 

 

        if (err == OK) {

           LOGV("Successfully allocated OMX node '%s'", componentName);

 

 

            sp codec = newOMXCodec(

                    omx,node, quirks, flags,

                   createEncoder, mime, componentName,

                    source, nativeWindow);

 

 

            return codec;

         }

return NULL;

}

 

 

 

 

void OMXCodec::findMatchingCodecs(

        const char *mime,

        boolcreateEncoder, const char *matchComponentName,

        uint32_t flags,

        Vector*matchingCodecs) {

   matchingCodecs->clear();

 

 

    for (int index = 0;;++index) {

        const char*componentName;

 

 

        componentName =GetCodec(

                   kDecoderInfo,

                   sizeof(kDecoderInfo) / sizeof(kDecoderInfo[0]),

                    mime,index);

 

 

       matchingCodecs->push(String8(componentName));

    }

}

 

After find the matching codec, an instance of OMXCodec will benewed and returned to AwesomePlayer (stored as mVideoSource for video,mAudioSource for Audio)

 

Let see the detail of omx->allocateNode(componentName, observer, &node) above

 

In Media Framework:android_src\framework\base\media\libstagefright\omx\OMX.cpp

 

status_t OMX::allocateNode(

        const char *name,const sp &observer, node_id *node) {

 

 

    OMX_ERRORTYPE err =mMaster->makeComponentInstance(

            name,&OMXNodeInstance::kCallbacks,

            instance,&handle);

 

 

    return OK;

}

 

Use OMXMaster to makeComponentInstance

 

In Media Framework:android_src\framework\base\media\libstagefright\omx\OMXMaster.cpp

 

OMX_ERRORTYPE OMXMaster::makeComponentInstance(

 

 

    OMXPluginBase *plugin= mPluginByComponentName.valueAt(index);

    OMX_ERRORTYPE err =

       plugin->makeComponentInstance(name, callbacks, appData, component);

    return err;

}

 

Use SoftOMXPlugin to makeComponentInstance

 

In Media Framework:android_src\framework\base\media\libstagefright\omx\SoftOMXPlugin.cpp

 

OMX_ERRORTYPE SoftOMXPlugin::makeComponentInstance(

 

 

    for (size_t i = 0; i< kNumComponents; ++i) {

        if (strcmp(name,kComponents[i].mName)) {

            continue;

        }

 

 

        AString libName ="libstagefright_soft_";

       libName.append(kComponents[i].mLibNameSuffix);

       libName.append(".so");

 

 

        void *libHandle =dlopen(libName.c_str(), RTLD_NOW);

 

 

       CreateSoftOMXComponentFunc createSoftOMXComponent =

           (CreateSoftOMXComponentFunc)dlsym(

                   libHandle,

                   "_Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPE"

                   "PvPP17OMX_COMPONENTTYPE");

 

 

        sp codec =

           (*createSoftOMXComponent)(name, callbacks, appData, component);

 

 

}

 

Something interesting happened here, the codec is found andinstantiated by loading a library and call the function createSoftOMXComponentin library. If we check the filesystem of Android, under system/lib, we canfind there are libraries of codec located here like:

 

===

 

libstagefright_soft_aacdec.so

libstagefright_soft_amrdec.so

libstagefright_soft_g711dec.so

libstagefright_soft_h264dec.so

libstagefright_soft_mp3dec.so

libstagefright_soft_mpeg4dec.so

libstagefright_soft_vorbisdec.so

libstagefright_soft_vpxdec.so

 

===

 

 

In Media Framework:android_src\framework\base\media\libstagefright\codecs\on2\h264dec\SoftAVC.cpp

 

android::SoftOMXComponent *createSoftOMXComponent(

        const char *name,const OMX_CALLBACKTYPE *callbacks,

        OMX_PTR appData,OMX_COMPONENTTYPE **component) {

    return newandroid::SoftAVC(name, callbacks, appData, component);

}

 

SoftAVC::SoftAVC(

        const char *name,

        constOMX_CALLBACKTYPE *callbacks,

        OMX_PTR appData,

        OMX_COMPONENTTYPE**component)

    :SimpleSoftOMXComponent(name, callbacks, appData, component),

      mHandle(NULL),

     mInputBufferCount(0),

      mWidth(320),

      mHeight(240),

      mPictureSize(mWidth* mHeight * 3 / 2),

      mCropLeft(0),

      mCropTop(0),

      mCropWidth(mWidth),

     mCropHeight(mHeight),

      mFirstPicture(NULL),

      mFirstPictureId(-1),

      mPicId(0),

     mHeadersDecoded(false),

      mEOSStatus(INPUT_DATA_AVAILABLE),

     mOutputPortSettingsChange(NONE),

     mSignalledError(false) {

    initPorts();

   CHECK_EQ(initDecoder(), (status_t)OK);

}

 

The SoftAVC is an inheritance from SimpleSoftOMXComponent, thatis, an OMX Component

 

So, by the specification of OpenMAX, the Component need"ports" for message passing.

 

The "initPorts()" will initialize the parameters ofinputPort and outputPort.

 

Take a look of constructor of SimpleSoftOMXComponent

 

In Media Framework :android_src\framework\base\media\libstagefright\omx\SimpleSoftOMXComponent.cpp

 

SimpleSoftOMXComponent::SimpleSoftOMXComponent(

        const char *name,

        constOMX_CALLBACKTYPE *callbacks,

        OMX_PTR appData,

        OMX_COMPONENTTYPE**component)

    : SoftOMXComponent(name,callbacks, appData, component),

      mLooper(newALooper),

      mHandler(newAHandlerReflector(this)),

     mState(OMX_StateLoaded),

     mTargetState(OMX_StateLoaded) {

   mLooper->setName(name);

   mLooper->registerHandler(mHandler);

 

 

    mLooper->start(

            false, //runOnCallingThread

            false, //canCallJava

           ANDROID_PRIORITY_FOREGROUND);

}

 

Here the SimpleSoftOMXComponent use the Message/Handler/Loopermodel

 

In Media Framework:android_src\framework\base\media\libstagefright\codecs\on2\h264dec\SoftAVC.cpp

 

status_t SoftAVC::initDecoder() {

    if(H264SwDecInit(&mHandle, 0) == H264SWDEC_OK) {

        return OK;

    }

    return UNKNOWN_ERROR;

}

 

In Media Framework:android_src\framework\base\media\libstagefright\codecs\on2\h264dec\source\H264SwDecApi.c

 

H264SwDecRet H264SwDecInit(H264SwDecInst *decInst, u32noOutputReordering)

{

 

 

    pDecCont =(decContainer_t *)H264SwDecMalloc(sizeof(decContainer_t));

    pDecCont->decStat  = INITIALIZED;

    pDecCont->picNumber= 0;

    *decInst =(decContainer_t *)pDecCont;

    return(H264SWDEC_OK);

 

 

}

 

After omx->allocateNode(componentName, observer, &node), Back to newOMXCodec in  OMXCodec::Create, and take alook of the constructor of OMXCodec

 

In Media Framework:android_src\framework\base\media\libstagefright\OMXCodec.cpp  

 

OMXCodec::OMXCodec(

        const sp &omx,IOMX::node_id node,

        uint32_t quirks,uint32_t flags,

        bool isEncoder,

        const char *mime,

        const char*componentName,

        const sp&source,

        const sp&nativeWindow)

    : mOMX(omx),

     mOMXLivesLocally(omx->livesLocally(getpid())),

      mNode(node),

      mQuirks(quirks),

      mFlags(flags),

      mIsEncoder(isEncoder),

      mMIME(strdup(mime)),

     mComponentName(strdup(componentName)),

      mSource(source),

     mCodecSpecificDataIndex(0),

      mState(LOADED),

     mInitialBufferSubmit(true),

     mSignalledEOS(false),

      mNoMoreOutputData(false),

     mOutputPortSettingsHaveChanged(false),

      mSeekTimeUs(-1),

     mSeekMode(ReadOptions::SEEK_CLOSEST_SYNC),

      mTargetTimeUs(-1),

     mOutputPortSettingsChangedPending(false),

     mLeftOverBuffer(NULL),

      mPaused(false),

      mNativeWindow(

             (!strncmp(componentName, "OMX.google.", 11)

              ||!strcmp(componentName, "OMX.Nvidia.mpeg2v.decode"))

                        ?NULL : nativeWindow) {

   mPortStatus[kPortIndexInput] = ENABLED;

    mPortStatus[kPortIndexOutput]= ENABLED;

 

 

    setComponentRole();

}

 

Back to AwesomePlayer::onPrepareAsyncEvent()

 

In Media Framework:android_src\framework\base\media\libstagefright\AwesomePlayer.cpp

 

void AwesomePlayer::finishAsyncPrepare_l() {

    if (mIsAsyncPrepare) {

       notifyListener_l(MEDIA_PREPARED);

    }

    mPrepareResult = OK;

}

 

 

Android 4.0 Ice Cream Sandwich MediaFramework (3)

Continued (3)

---===Stagefright===---


In APK
            mMediaPlayer =new MediaPlayer();
           mMediaPlayer.setDataSource(path);
           mMediaPlayer.prepare();
            mMediaPlayer.start();

Start tracing mMediaPlayer.start()



In Media Framework:android_src\framework\basemedia\libmediaplayerservice\MediaPlayerService.cpp

status_t MediaPlayerService::Client::start()
{
    LOGV("[%d] start", mConnId);
    sp p = getPlayer();
    p->setLooping(mLoop);
    return p->start();
}


In Media Framework:android_src\framework\base\media\libmediaplayerservice\StagefrightPlayer.cpp


status_t StagefrightPlayer::start() {
    LOGV("start");
    return mPlayer->play();
}


In Media Framework :android_src\framework\base\media\libstagefright\AwesomePlayer.cpp


status_t AwesomePlayer::play() {
    return play_l();
}


status_t AwesomePlayer::play_l() {


if (mVideoSource != NULL) {
        // Kick off video playback
        postVideoEvent_l();
}
return OK;
}


void AwesomePlayer::postVideoEvent_l(int64_tdelayUs) {
    if (mVideoEventPending) {
        return;
    }
    mVideoEventPending = true;
    mQueue.postEventWithDelay(mVideoEvent,delayUs < 0 ? 10000 : delayUs);
}

From the above code, AwesomePlayer use theevent-post to trigger the onVideoEvent method

In Media Framework:android_src\framework\base\media\libstagefright\AwesomePlayer.cpp

void AwesomePlayer::onVideoEvent() {


if (!mVideoBuffer) {
for (;;) {
            status_t err = mVideoSource->read(&mVideoBuffer,&options);
          
            if(mVideoBuffer->range_length() == 0) {
               mVideoBuffer->release();
               mVideoBuffer = NULL;
               continue;
            }
            break;
         }
}
    int64_t timeUs;
   CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));


    int64_t realTimeUs, mediaTimeUs;
    if (!(mFlags & AUDIO_AT_EOS) &&mAudioPlayer != NULL
        &&mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
        mTimeSourceDeltaUs = realTimeUs- mediaTimeUs;
    }


        int64_t nowUs =ts->getRealTimeUs() - mTimeSourceDeltaUs;


        int64_tlatenessUs = nowUs - timeUs;



        if (latenessUs > 500000ll
               && mAudioPlayer != NULL
               && mAudioPlayer->getMediaTimeMapping(
                   &realTimeUs, &mediaTimeUs)) {
            LOGI("we'remuch too late (%.2f secs), video skipping ahead",
                latenessUs / 1E6);


           mVideoBuffer->release();
            mVideoBuffer =NULL;


            mSeeking =SEEK_VIDEO_ONLY;
            mSeekTimeUs =mediaTimeUs;


           postVideoEvent_l();
            return;
        }


        if (latenessUs > 40000) {
            // We're morethan 40ms late.
            LOGV("we'relate by %lld us (%.2f secs)",
                latenessUs, latenessUs / 1E6);
               mSinceLastDropped = 0;
               mVideoBuffer->release();
               mVideoBuffer = NULL;
                postVideoEvent_l();
               return;
        }


        if (latenessUs < -10000) {
            // We're morethan 10ms early.
            postVideoEvent_l(10000);
            return;
        }


    if (mVideoRenderer != NULL) {
        mSinceLastDropped++;
        mVideoRenderer->render(mVideoBuffer);
    }
    postVideoEvent_l();
}

The mVideoSource->read will communicationwith the OMX Component(SoftAVC) and do the deocding

, then store the decoded buffer inmVideoBuffer.

Note that mVideoSource is an instance of OMXCodec,that is, the communication with Component is handled by OMXCodec.

In Media Framework:android_src\framework\base\media\libstagefright\OMXCodec.cpp   

status_t OMXCodec::read(
        MediaBuffer **buffer, constReadOptions *options) {
    if (mInitialBufferSubmit) {
        mInitialBufferSubmit = false;


        drainInputBuffers();
        if (mState == EXECUTING) {
            // OtherwisemState == RECONFIGURING and this code will trigger
            // after theoutput port is reenabled.
            fillOutputBuffers();
        }
}
while (mState != ERROR && !mNoMoreOutputData&& mFilledBuffers.empty()) {
        if ((err =waitForBufferFilled_l()) != OK) {
            return err;
        }
    }
    size_t index = *mFilledBuffers.begin();
    mFilledBuffers.erase(mFilledBuffers.begin());
    BufferInfo *info =&mPortBuffers[kPortIndexOutput].editItemAt(index);
    *buffer = info->mMediaBuffer;


    return OK;
}


Here
drainInputBuffers means to feed decoder the un-decoded buffer,
fillOutputBuffers means to fill decoded buffer to output buffer queue
waitForBufferFilled_l() will wait until decoded buffer is filled on outputqueue


void OMXCodec::drainInputBuffers() {


        Vector *buffers =&mPortBuffers[kPortIndexInput];
        for (size_t i = 0; i size(); ++i) {
            BufferInfo *info= &buffers->editItemAt(i);
            if (!drainInputBuffer(info)){
               break;
            }
        }
}


bool OMXCodec::drainInputBuffer(BufferInfo*info){
            err =mSource->read(&srcBuffer);


           memcpy(info->mData, kNALStartCode, 4);
            memcpy((uint8_t*)info->mData + 4,
                  specific->mData, specific->mSize);


           status_t err = mOMX->emptyBuffer(
               mNode, info->mBuffer, 0, size,
               OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_CODECCONFIG,
                0);
           return true;


}

The mSource->read(&srcBuffer)  will get the buffer of media file, mSource hererefer to the MPEG4Souece (defined in MPEG4Extractor.cpp) since now the mediacontainer is mp4 (if you use other container, it will bedifferent;ex: ts container will refer to AnotherPacketSource)

In Media Framework  :android_src\framework\base\media\libstagefright\MPEG4Extractor.cpp


status_t MPEG4Source::read(
        MediaBuffer **out, constReadOptions *options) {
        err =mGroup->acquire_buffer(&mBuffer);

            ssize_tnum_bytes_read =
               mDataSource->readAt(offset, (uint8_t *)mBuffer->data(), size);

           mBuffer->set_range(0, size);
           mBuffer->meta_data()->clear();
           mBuffer->meta_data()->setInt64(
                   kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
        *out = mBuffer;
        return OK;
}





In Media Framework:android_src\framework\base\media\libstagefright\omx\SimpleSoftOMXComponent.cpp

OMX_ERRORTYPESimpleSoftOMXComponent::emptyThisBuffer(
        OMX_BUFFERHEADERTYPE *buffer) {
    sp msg = new AMessage(kWhatEmptyThisBuffer,mHandler->id());
    msg->setPointer("header",buffer);
    msg->post();


    return OMX_ErrorNone;
}

Here emptyThisBuffer usesthe Message/Handler model to send message "kWhatEmptyThisBuffer".


Since the handler is registered as itself, onMessageReceived will be triggered.

voidSimpleSoftOMXComponent::onMessageReceived(const sp &msg) {



    switch (msg->what()) {
        casekWhatEmptyThisBuffer:
        casekWhatFillThisBuffer:



           OMX_BUFFERHEADERTYPE *header;
           CHECK(msg->findPointer("header", (void **)&header));


            CHECK(mState ==OMX_StateExecuting && mTargetState == mState);


            bool found =false;
            for (size_t i =0; i < mPorts.size(); ++i) {
               PortInfo *port = &mPorts.editItemAt(i);


                for(size_t j = 0; j < port->mBuffers.size(); ++j) {
                   BufferInfo *buffer = &port->mBuffers.editItemAt(j);


                   if (buffer->mHeader == header) {
                       CHECK(!buffer->mOwnedByUs);


                       buffer->mOwnedByUs = true;


                       CHECK((msg->what() == kWhatEmptyThisBuffer
                                  && port->mDef.eDir == OMX_DirInput)
                               || (port->mDef.eDir== OMX_DirOutput));


                       port->mQueue.push_back(buffer);
                       onQueueFilled(i);


                       found = true;
                       break;
                   }
                }
            }
}

onQueueFilled() will call into the "real" codec component

void SoftAVC::onQueueFilled(OMX_U32 portIndex) {

    List &inQueue =getPortQueue(kInputPortIndex);

 

    List &outQueue =getPortQueue(kOutputPortIndex);

    while ((mEOSStatus != INPUT_DATA_AVAILABLE ||!inQueue.empty())

            &&outQueue.size() == kNumOutputBuffers) {

        BufferInfo *inInfo =*inQueue.begin();

 

        OMX_BUFFERHEADERTYPE *inHeader =inInfo->mHeader;

        H264SwDecInput inPicture;

        H264SwDecOutput outPicture;

        memset(&inPicture, 0,sizeof(inPicture));

        inPicture.dataLen =inHeader->nFilledLen;

        inPicture.pStream =inHeader->pBuffer + inHeader->nOffset;

        while (inPicture.dataLen > 0) {

 

            ret =H264SwDecDecode(mHandle, &inPicture, &outPicture);

 

            if (ret ==H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY ||

                ret ==H264SWDEC_PIC_RDY_BUFF_NOT_EMPTY) {

 

               inPicture.dataLen -= (u32)(outPicture.pStrmCurrPos - inPicture.pStream);

 

               inPicture.pStream = outPicture.pStrmCurrPos;

            }

        }

        if (mFirstPicture &&!outQueue.empty()) {

 

            drainOneOutputBuffer(mFirstPictureId,mFirstPicture);

 

            delete[]mFirstPicture;

 

            mFirstPicture = NULL;

 

            mFirstPictureId = -1;

        }

        while (!outQueue.empty() &&

               mHeadersDecoded &&

               H264SwDecNextPicture(mHandle, &decodedPicture, 0)

                   == H264SWDEC_PIC_RDY) {

 

            int32_t picId =decodedPicture.picId;

            uint8_t *data =(uint8_t *) decodedPicture.pOutputPicture;

            drainOneOutputBuffer(picId,data);

        }

    }

}



Here, when a buffer is decoded, and ready for rendering, drainOneOutputBuffer will becalled

 

void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {

    List &outQueue =getPortQueue(kOutputPortIndex);

    BufferInfo *outInfo = *outQueue.begin();

    outQueue.erase(outQueue.begin());

    OMX_BUFFERHEADERTYPE *outHeader =outInfo->mHeader;

    OMX_BUFFERHEADERTYPE *header =mPicToHeaderMap.valueFor(picId);

    outHeader->nTimeStamp = header->nTimeStamp;

    outHeader->nFlags = header->nFlags;

    outHeader->nFilledLen = mPictureSize;

    memcpy(outHeader->pBuffer +outHeader->nOffset,

            data, mPictureSize);

    mPicToHeaderMap.removeItem(picId);

    delete header;

    outInfo->mOwnedByUs = false;

    notifyFillBufferDone(outHeader);

}

 

notifyFillBufferDone will go through manyclasses, for simplifing the process, we just jump toOMXCodec::on_message()

In Media Framework:android_src\framework\base\media\libstagefright\OMXCodec.cpp   

 

void OMXCodec::on_message(const omx_message &msg) {

 

    switch (msg.type) {

        case omx_message::FILL_BUFFER_DONE:

        {

 

            Vector *buffers =&mPortBuffers[kPortIndexOutput];

 

 

            CHECK(i size());

            BufferInfo *info =&buffers->editItemAt(i);

 

 

            if(mPortStatus[kPortIndexOutput] == DISABLING) {

               CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);

 

                status_terr = freeBuffer(kPortIndexOutput, i);

               CHECK_EQ(err, (status_t)OK);

 

            } else if(mPortStatus[kPortIndexOutput] != SHUTTING_DOWN) {

               CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);

 

                if(info->mMediaBuffer == NULL) {

                   CHECK(mOMXLivesLocally);

                   CHECK(mQuirks & kRequiresAllocateBufferOnOutputPorts);

                   CHECK(mQuirks & kDefersOutputBufferAllocation);

 

                   // The qcom video decoders on Nexus don't actually allocate

                   // output buffer memory on a call to OMX_AllocateBuffer

                   // the "pBuffer" member of the OMX_BUFFERHEADERTYPE

                   // structure is only filled in later.

 

                   info->mMediaBuffer = new MediaBuffer(

                           msg.u.extended_buffer_data.data_ptr,

                           info->mSize);

                   info->mMediaBuffer->setObserver(this);

                }

 

                MediaBuffer*buffer = info->mMediaBuffer;

 

               buffer->set_range(

                       msg.u.extended_buffer_data.range_offset,

                       msg.u.extended_buffer_data.range_length);

 

               buffer->meta_data()->clear();

 

               buffer->meta_data()->setInt64(

                       kKeyTime, msg.u.extended_buffer_data.timestamp);

 

               buffer->meta_data()->setPointer(

                       kKeyPlatformPrivate,

                       msg.u.extended_buffer_data.platform_private);

 

               buffer->meta_data()->setPointer(

                       kKeyBufferID,

                       msg.u.extended_buffer_data.buffer);

 

               mFilledBuffers.push_back(i);

               mBufferFilled.signal();

 

            }

            break;

    }

}

 

Now, we decode a buffer and wrapped as MediaBuffer, 

 

Back to AwesomePlayer


In Media Framework:android_src\framework\base\media\libstagefright\AwesomePlayer.cpp

void AwesomePlayer::onVideoEvent() {


if(!mVideoBuffer) {
for(;;) {
           status_t err = mVideoSource->read(&mVideoBuffer,&options);
         
           if (mVideoBuffer->range_length() == 0) {
               mVideoBuffer->release();
               mVideoBuffer = NULL;
               continue;
           }
           break;
        }
}


   int64_t timeUs;
   CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime,&timeUs));


   int64_t realTimeUs, mediaTimeUs;
   if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
       && mAudioPlayer->getMediaTimeMapping(&realTimeUs,&mediaTimeUs)) {
       mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
   }


        int64_t nowUs =ts->getRealTimeUs() - mTimeSourceDeltaUs;


        int64_t latenessUs = nowUs - timeUs;



       if (latenessUs > 500000ll
               && mAudioPlayer !=NULL
               &&mAudioPlayer->getMediaTimeMapping(
                   &realTimeUs,&mediaTimeUs)) {
           LOGI("we're much too late (%.2f secs),video skipping ahead",
                latenessUs / 1E6);
           mVideoBuffer->release();
           mVideoBuffer = NULL;
           mSeeking = SEEK_VIDEO_ONLY;
           mSeekTimeUs = mediaTimeUs;
           postVideoEvent_l();
           return;
       }


       if (latenessUs > 40000) {
           // We're more than 40ms late.
           LOGV("we're late by %lld us (%.2fsecs)",
                latenessUs, latenessUs /1E6);
               mSinceLastDropped = 0;
               mVideoBuffer->release();
               mVideoBuffer = NULL;
               postVideoEvent_l();
               return;
       }


       if (latenessUs < -10000) {
           // We're more than 10ms early.
           postVideoEvent_l(10000);
           return;
       }


   if (mVideoRenderer != NULL) {
       mSinceLastDropped++;
       mVideoRenderer->render(mVideoBuffer);
   }
   postVideoEvent_l();
}


After decoding, the media time of video will be used to see if too late or not.If too late, then just drop this video buffer.

Then, postVideoEvent->onVideoEvent->decode->render->postVideoEvent againand again, until end of stream (EOS)

 

 

Android 4.0 Ice Cream Sandwich Media Framework (4)


Continued (4)

Start NuPlayer tracing

---===NuPlayer===---



Recall in Android 4.0 IceCream Sandwich Media Framework (1), if the data source URL containsm3u8 (ex: http://192.168.1.3/playlist.m3u8), or start with rtsp protocol, thenNU_PLAYER will be selected by MediaPlayerService.

Here, the NuPlayer tracing will focus on HTTP Live Streaming, which means them3u8 file decode.

Before start the code tracing, we first take a look about the m3u8 file.
(the URL to set for MediaPlayer in APK islike:http://192.168.1.3/playlist.m3u8 )

==playlist.m3u8 =================================

#EXTM3U
#EXT-X-STREAM-INF:PROGRAM-ID:1,bandwidth=600000
http://192.168.1.22/playhigh.m3u8
#EXT-X-STREAM-INF:PROGRAM-ID:1,bandwidth=200000
http://192.168.1.21/playlow.m3u8

==playhigh.m3u8=================================


#EXTM3U
#EXT-X-TARGETDURATION:5
#EXTINF:5,
http://192.168.1.22/high/sample-no-1.ts
#EXTINF:5,
http://192.168.1.22/high/sample-no-2.ts
#EXTINF:5,
http://192.168.1.22/high/sample-no-3.ts
#EXT-X-ENDLIST

==playlow.m3u8=================================

#EXTM3U
#EXT-X-TARGETDURATION:5
#EXTINF:5,
http://192.168.1.21/low/sample-no-1.ts
#EXTINF:5,
http://192.168.1.21/low/sample-no-2.ts
#EXTINF:5,
http://192.168.1.21/low/sample-no-3.ts


#EXT-X-ENDLIST
=============================================

All the tags like "#EXTM3U" and others are defined in Apple's HTTPLive Streaming protocol

see the following website for more information
http://tools.ietf.org/html/draft-pantos-http-live-streaming-07

The introduction of HTTP Live Streaming

see the following website for more information
http://developer.apple.com/library/ios/#documentation/NetworkingInternet/Conceptual/StreamingMediaGuide/Introduction/Introduction.html#//apple_ref/doc/uid/TP40008332-CH1-DontLinkElementID_39

And, the supported version of HTTP Live Streaming in Android :
http://developer.android.com/guide/appendix/media-formats.html


==================================================================
Let's get it started.

In APK, we use the same process to start media playback

           mMediaPlayer = new MediaPlayer();
           mMediaPlayer.setDataSource(path);
           mMediaPlayer.prepare();
           mMediaPlayer.start();



In Media Framework :android_src\framework\base\media\libmediaplayerservice\MediaPlayerService.cpp

static sp createPlayer(player_type playerType, void*cookie,
        notify_callback_f notifyFunc)

{
   sp p;
   switch (playerType) {
       case SONIVOX_PLAYER:
           LOGD(" create MidiFile");
           p = new MidiFile();
           break;
       case STAGEFRIGHT_PLAYER:
           LOGD(" create StagefrightPlayer");
           p = new StagefrightPlayer;
           break;
        case NU_PLAYER:
            LOGD(" createNuPlayer");
            p = newNuPlayerDriver;
            break;

       case TEST_PLAYER:
           LOGD("Create Test Player stub");
           p = new TestPlayerStub();
           break;
       default:
           LOGD("Unknown player type: %d",playerType);
           return NULL;
   }
}

Just mentioned before, if the URL contains m3u8, the NU_PLAYER will beselected. Therefore, here NuPlayerDriver will be newed.

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerDriver.cpp

NuPlayerDriver::NuPlayerDriver()
   : mDurationUs(-1),
     mPositionUs(-1),
     mNumFramesTotal(0),
     mNumFramesDropped(0),
     mState(UNINITIALIZED),
     mLooper(new ALooper),{
   mLooper->setName("NuPlayerDriver Looper");


   mLooper->start();


   mPlayer = new NuPlayer;
   mLooper->registerHandler(mPlayer);


   mPlayer->setDriver(this);
}

NuPlayerDriver's constructor initialize the Looper/Message/Handler mechanismfor NuPlayer.

The role of NuPlayerDriver is like StagefrightPlayer, the interface withMediaPlayer.

mPlayer->setDriver(this) let NuPlayer has the ability to send messagesback.(media durations, number of frames, etc).

Note here mState is set as UNINITIALIZED.

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp

NuPlayer::NuPlayer()
   : mVideoIsAVC(false),
     mAudioEOS(false),
     mVideoEOS(false),
     mVideoLateByUs(0ll),
     mNumFramesTotal(0ll),
     mNumFramesDropped(0ll) {
}

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerDriver.cpp

status_t NuPlayerDriver::setDataSource(
       const char *url, const KeyedVector *headers) {


   CHECK_EQ((int)mState, (int)UNINITIALIZED);


    mPlayer->setDataSource(url, headers);


   mState = STOPPED;


}

Note here mState is set as  STOPPED .

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp

void NuPlayer::setDataSource(
       const char *url, const KeyedVector *headers) {
   sp msg = new AMessage(kWhatSetDataSource, id());


   if (!strncasecmp(url, "rtsp://", 7)) {
        msg->setObject(
               "source", new RTSPSource(url, headers, mUIDValid, mUID));

   } else {
        msg->setObject(
               "source", new HTTPLiveSource(url, headers, mUIDValid, mUID));

   }


   msg->post();
}

If url start with "rtsp://", then RTSPSource will beinstantiated, else (for m3u8)HTTPLiveSource will be instantiated.

Both of them are used to handle the incoming media stream.

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\HTTPLiveSource.cpp

NuPlayer::HTTPLiveSource::HTTPLiveSource(
       const char *url,
       const KeyedVector *headers,
       bool uidValid, uid_t uid)
   : mURL(url),
     mFlags(0),
     mFinalResult(OK),
     mOffset(0) {
}

Then a message  kWhatSetDataSource posted. Note that the messagecontains the instance of RTSPSource or HTTPLiveSource.

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp

void NuPlayer::onMessageReceived(const sp &msg) {
   switch (msg->what()) {
       case kWhatSetDataSource:
       {
           LOGV("kWhatSetDataSource");


           CHECK(mSource == NULL);


           sp obj;
           CHECK(msg->findObject("source",&obj));


           mSource = static_cast(obj.get());
           break;
       }
   }
}

Here, mSource refer to HTTPLiveSource or RTSPSource

Now we finish the mMediaPlayer.setDataSource(path);

In Media Framework :android_src\framework\base\media\libmediaplayerservice\MediaPlayerService.cpp

status_t MediaPlayerService::Client::prepareAsync()
{
   LOGV("[%d] prepareAsync", mConnId);
   sp p = getPlayer();
   if (p == 0) return UNKNOWN_ERROR;
    status_t ret = p->prepareAsync();
   return ret;
}

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerDriver.cpp

status_t NuPlayerDriver::prepareAsync() {
   notifyListener(MEDIA_PREPARED);


   return OK;
}

The prepareAsync() here do nothing but notify back MEDIA_PREPARED

In Media Framework :android_src\framework\base\media\libmediaplayerservice\MediaPlayerService.cpp

status_t MediaPlayerService::Client::start()
{
   LOGV("[%d] start", mConnId);
   sp p = getPlayer();


    return p->start();
}

Note that mState of NuPlayerDriver is set as STOPPED now.

In Media Framework  :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerDriver.cpp

status_t NuPlayerDriver::start() {
   switch (mState) {
       case STOPPED:
       {
           mAtEOS = false;
           mPlayer->start();


           break;
       }


   mState = PLAYING;
   return OK;
}

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp

void NuPlayer::start() {
   (new AMessage(kWhatStart, id()))->post();
}

NuPlayer use message to post kWhatStart.

void NuPlayer::onMessageReceived(const sp &msg) {
   switch (msg->what()) {
        case kWhatStart:
       {
           LOGV("kWhatStart");


           mVideoIsAVC = false;
           mAudioEOS = false;
           mVideoEOS = false;
           mSkipRenderingAudioUntilMediaTimeUs = -1;
           mSkipRenderingVideoUntilMediaTimeUs = -1;
           mVideoLateByUs = 0;
           mNumFramesTotal = 0;
           mNumFramesDropped = 0;


           mSource->start();


            mRenderer = newRenderer(
                   mAudioSink,
                   new AMessage(kWhatRendererNotify, id()));



           looper()->registerHandler(mRenderer);


            postScanSources();
           break;
       }
   }
}

In Media Framework  :android_src\framework\base\media\libmediaplayerservice\nuplayer\HTTPLiveSource.cpp

void NuPlayer::HTTPLiveSource::start() {
    mLiveLooper = new ALooper;

   mLiveLooper->setName("http live");
   mLiveLooper->start();


    mLiveSession = new LiveSession(
            (mFlags &kFlagIncognito) ? LiveSession::kFlagIncognito : 0,
            mUIDValid, mUID);



   mLiveLooper->registerHandler(mLiveSession);


    mLiveSession->connect(
            mURL.c_str(),mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders);


    mTSParser = new ATSParser;

}

HTTPLiveSource create it own Looper for Message/Handler mechanism, andinstantiate the LiveSession

In Media Framework :android_src\framework\base\media\libstagefright\httplive\LiveSession.cpp

LiveSession::LiveSession(uint32_t flags, bool uidValid, uid_tuid)
   : mDataSource(new LiveDataSource),
     mHTTPDataSource
(
             HTTPBase::Create(
                 (mFlags & kFlagIncognito)
                   ? HTTPBase::kFlagIncognito
                   : 0)),

     mPrevBandwidthIndex(-1),
     mLastPlaylistFetchTimeUs(-1),
     mSeqNumber(-1),
     mSeekTimeUs(-1),
     mNumRetries(0),
     mDurationUs(-1),
     mSeekDone(false),
     mDisconnectPending(false),
     mMonitorQueueGeneration(0),
     mRefreshState(INITIAL_MINIMUM_RELOAD_DELAY) {
}

mDataSource is an instance of LiveDataSource, which acts asthe buffer queue manager. we'll see how it work later.

In Media Framework :android_src\framework\base\media\libstagefright\httplive\LiveDataSource.cpp

LiveDataSource::LiveDataSource()
   : mOffset(0),
     mFinalResult(OK),
     mBackupFile(NULL) {
}

In Media Framework :android_src\framework\base\media\libstagefright\HTTPBase.cpp

sp HTTPBase::Create(uint32_t flags) {
       return new ChromiumHTTPDataSource(flags);
}

So, the mHTTPDataSource in LiveSession is an instanceof ChromiumHTTPDataSource.

ChromiumHTTPDataSource will help LiveSession to read data from internet.

In Media Framework  :android_src\framework\base\media\libstagefright\chromium_http\ChromiumHTTPDataSource.cpp

ChromiumHTTPDataSource::ChromiumHTTPDataSource(uint32_t flags)
   : mFlags(flags),
     mState(DISCONNECTED),
     mDelegate(new SfDelegate),
     mCurrentOffset(0),
     mIOResult(OK),
     mContentSize(-1),
     mDecryptHandle(NULL),
     mDrmManagerClient(NULL) {
     mDelegate->setOwner(this);
 LOG_PRI(ANDROID_LOG_VERBOSE,LOG_TAG,"ChromiumHTTPDataSource ctor");
}

Now back to NuPlayer::HTTPLiveSource::start() anddo mLiveSession->connect()

In Media Framework :android_src\framework\base\media\libstagefright\httplive\LiveSession.cpp

void LiveSession::connect(
       const char *url, const KeyedVector *headers) {
   sp msg = new AMessage(kWhatConnect, id());
   msg->setString("url", url);


   if (headers != NULL) {
       msg->setPointer(
               "headers",
               new KeyedVector(*headers));
   }


   msg->post();
}

We pause here and take a look of new ATSParser, used for futureparsing.

In Media Framework  :android_src\framework\base\media\libstagefright\mpegts\ATSParser.cpp

ATSParser::ATSParser(uint32_t flags)
   : mFlags(flags) {
}

ATSParser 's constructor doesn't do anything special, so wecontinue to see how to handle the message kWhatConnect postedin LiveSession::connect()

void LiveSession::onMessageReceived(const sp &msg) {

   switch (msg->what()) {
       case kWhatConnect:
           onConnect(msg);
           break;
   }
}

void LiveSession::onConnect(const sp &msg) {
   CHECK(msg->findString("url", &url));


   bool dummy;
    sp playlist = fetchPlaylist(url.c_str(),&dummy);


   if (playlist->isVariantPlaylist()) {
       for (size_t i = 0; i < playlist->size(); ++i) {
           BandwidthItem item;


           sp meta;
           playlist->itemAt(i, &item.mURI,&meta);


           unsigned long bandwidth;
          CHECK(meta->findInt32("bandwidth", (int32_t*)&item.mBandwidth));
           LOGD("Check Bandwidth :%lu",item.mBandwidth);
           mBandwidthItems.push(item);
       }


       CHECK_GT(mBandwidthItems.size(), 0u);


       mBandwidthItems.sort(SortByBandwidth);
   }else{
       LOGD("Not VariantPlayList");
   }


   postMonitorQueue();
}


sp LiveSession::fetchPlaylist(const char *url, bool*unchanged) {
   *unchanged = false;
    status_t err = fetchFile(url, &buffer);
    sp playlist =
        new M3UParser(url,buffer->data(), buffer->size());


    return playlist;

}

fetchPlaylist here will read the data of m3u8 file from internet,and instantiate M3UParser for parsing the content of m3u8 file

playlist->isVariantPlaylist() judge if the playlist contains thetag "#EXT-X-STREAM-INF"

If contains, the playlist has the bandwidth information in the playlist.

In Media Framework  :android_src\framework\base\media\libstagefright\httplive\M3UParser.cpp

M3UParser::M3UParser(
       const char *baseURI, const void *data,size_t size)
   : mInitCheck(NO_INIT),
     mBaseURI(baseURI),
     mIsExtM3U(false),
     mIsVariantPlaylist(false),
     mIsComplete(false) {
    mInitCheck = parse(data, size);
}

The parse() in M3UParser analyzes the tags in m3u8 file.

In Media Framework :android_src\framework\base\media\libstagefright\httplive\LiveSession.cpp

void LiveSession::postMonitorQueue(int64_t delayUs) {
   sp msg = new AMessage(kWhatMonitorQueue, id());
   msg->setInt32("generation", ++mMonitorQueueGeneration);
   msg->post(delayUs);
}

void LiveSession::onMessageReceived(const sp &msg) {
   switch (msg->what()) {
        case kWhatMonitorQueue:
       {
           int32_t generation;
          CHECK(msg->findInt32("generation", &generation));


           if (generation != mMonitorQueueGeneration) {
               // Stale event
               break;
           }


            onMonitorQueue();
           break;
       }
   }
}

void LiveSession::onMonitorQueue() {
   if (mSeekTimeUs >= 0
           || mDataSource->countQueuedBuffers() < kMaxNumQueuedFragments){
        onDownloadNext();
   } else {
        postMonitorQueue(1000000ll);
   }
}

mDataSource at first with no buffer from internet,so here triggering onDownloadNext() todownload

In Media Framework :android_src\framework\base\media\libstagefright\include\LiveSession.h


    enum {
        kMaxNumQueuedFragments = 3,
        kMaxNumRetries        = 5,
    };


In Media Framework :android_src\framework\base\media\libstagefright\httplive\LiveSession.cpp

void LiveSession::onDownloadNext() {
    size_t bandwidthIndex = getBandwidthIndex();



   if (mLastPlaylistFetchTimeUs < 0
           || (ssize_t)bandwidthIndex !=mPrevBandwidthIndex
           || (!mPlaylist->isComplete() &&timeToRefreshPlaylist(nowUs))) {
       AString url;
       if (mBandwidthItems.size() > 0) {
           url =mBandwidthItems.editItemAt(bandwidthIndex).mURI;
       } else {
           url = mMasterURL;
       }


       bool firstTime = (mPlaylist == NULL);


 bool unchanged;
        sp playlist =fetchPlaylist(url.c_str(), &unchanged);
       if (playlist == NULL) {
           if (unchanged) {
               // We succeeded in fetchingthe playlist, but it was
               // unchanged from the lasttime we tried.
           } else {
               LOGE("failed to loadplaylist at url '%s'", url.c_str());
              mDataSource->queueEOS(ERROR_IO);
               return;
           }
       } else {
           mPlaylist = playlist;
       }


       if (firstTime) {
           Mutex::Autolock autoLock(mLock);


           if (!mPlaylist->isComplete()) {
               mDurationUs = -1;
           } else {
               mDurationUs = 0;
               for (size_t i = 0; i size(); ++i) {
                   sp itemMeta;
                   CHECK(mPlaylist->itemAt(
                              i, NULL /* uri */, &itemMeta));


                   int64_titemDurationUs;
                  CHECK(itemMeta->findInt64("durationUs", &itemDurationUs));


                   mDurationUs +=itemDurationUs;
               }
           }
       }
   sp buffer;
    status_t err = fetchFile(uri.c_str(),&buffer);


    mDataSource->queueBuffer(buffer);



   mPrevBandwidthIndex = bandwidthIndex;
   ++mSeqNumber;


    postMonitorQueue();
}

 

onDownloadNext() downloads a media segmentlist in playlist and queue the downloaded buffer to  mDataSource, thencontinuously postMonitorQueue() to download next

 

 

 

Android 4.0 Ice Cream Sandwich MediaFramework (5)

Continued (5)

---===NuPlayer===---


In part 4, HTTPLiveSource start downloadingthe m3u8 file and the successive media segments

Back to NuPlayer

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp

void NuPlayer::onMessageReceived(const sp&msg) {
    switch (msg->what()) {
        case kWhatStart:
        {
           LOGV("kWhatStart");


            mVideoIsAVC =false;
            mAudioEOS =false;
            mVideoEOS =false;
           mSkipRenderingAudioUntilMediaTimeUs = -1;
           mSkipRenderingVideoUntilMediaTimeUs = -1;
            mVideoLateByUs =0;
            mNumFramesTotal =0;
            mNumFramesDropped= 0;


            mSource->start();//HTTPLiveSource


            mRenderer= new Renderer(
                   mAudioSink,
                   new AMessage(kWhatRendererNotify, id()));



           looper()->registerHandler(mRenderer);


           postScanSources();
            break;
        }
    }
}

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerRenderer.cpp


NuPlayer::Renderer::Renderer(
        const sp &sink,
        const sp &notify)
    : mAudioSink(sink),
      mNotify(notify),
      mNumFramesWritten(0),
      mDrainAudioQueuePending(false),
      mDrainVideoQueuePending(false),
      mAudioQueueGeneration(0),
      mVideoQueueGeneration(0),
      mAnchorTimeMediaUs(-1),
      mAnchorTimeRealUs(-1),
      mFlushingAudio(false),
      mFlushingVideo(false),
      mHasAudio(false),
      mHasVideo(false),
      mSyncQueues(false),
      mPaused(false),
      mLastPositionUpdateUs(-1ll),
      mVideoLateByUs(0ll) {
}


Note that NuPlayer pass a message "kWhatRendererNotify"to construct the NuPlayerRenderer, NuPlayerRenderer can use this message(storedas mNotify) to notify NuPlayer some information in the future


In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp

void NuPlayer::postScanSources() {
    if (mScanSourcesPending) {
        return;
    }


    sp msg = newAMessage(kWhatScanSources, id());
    msg->setInt32("generation",mScanSourcesGeneration);
    msg->post();


    mScanSourcesPending = true;
}

"mScanSourcesPending=true" means thekWhatScanSources message is pending in the Looper's message queue.

void NuPlayer::onMessageReceived(const sp &msg){
    switch (msg->what()) {
        case kWhatScanSources:
        {
           mScanSourcesPending = false;


           instantiateDecoder(false, &mVideoDecoder);


            if (mAudioSink !=NULL) {
               instantiateDecoder(true, &mAudioDecoder);
            }


            status_t err;
            if ((err =mSource->feedMoreTSData()) != OK) {
                if(mAudioDecoder == NULL && mVideoDecoder == NULL) {
                   // We're not currently decoding anything (no audio or
                   // video tracks found) and we just ran out of input data.


                   if (err == ERROR_END_OF_STREAM) {
                       notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
                   } else {
                       notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN,err);
                   }
                }
               break;
            }


            if(mAudioDecoder == NULL || mVideoDecoder == NULL) {
               msg->post(100000ll);
               mScanSourcesPending = true;

            }
            break;
        }
    }
}

status_t NuPlayer::instantiateDecoder(boolaudio, sp *decoder) {
    if (*decoder != NULL) {
        return OK;
    }


    sp meta = mSource->getFormat(audio);


    if (meta == NULL) {
        return -EWOULDBLOCK;
    }
...
...
...
}


Note that mSource here refer toHTTPLiveSource.

At first, the meta == NULL and decoder == NULLbecause the media segment has not been downloaded or been parsed. (none of thebuffer stored in mDataSource).


In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\HTTPLiveSource.cpp

status_tNuPlayer::HTTPLiveSource::feedMoreTSData() {


    sp source =
       static_cast(mLiveSession->getDataSource().get());



    for (int32_t i = 0; i < 50; ++i) {
        char buffer[188];
        ssize_t n =source->readAtNonBlocking(mOffset, buffer, sizeof(buffer));


        if (n == -EWOULDBLOCK) {
            break;
        }
...
...
    }


    return OK;
}


Because of the same reason above, (n ==-EWOULDBLOCK) is true and break the for loop.

Therefore NuPlayer will continuously do msg->post(100000ll) topost kWhatScanSourcesmessage.

When the meta in instantiateDecodernot equal to NULL ?

Back to see what the LiveSession do whenonDownloadNext() executed

In Media Framework :android_src\framework\base\media\libstagefright\httplive\LiveSession.cpp

void LiveSession::onDownloadNext() {
...
...
...
    mDataSource->queueBuffer(buffer);
    postMonitorQueue();
}

The buffers will be stored in mDataSource'sbuffer queue

Let's see feedMoreTSData() inHTTPLiveSource again

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\HTTPLiveSource.cpp

status_t NuPlayer::HTTPLiveSource::feedMoreTSData(){


    sp source =
       static_cast(mLiveSession->getDataSource().get());


    for (int32_t i = 0; i < 50; ++i) {
        char buffer[188];
        ssize_t n =source->readAtNonBlocking(mOffset, buffer, sizeof(buffer));




        if (n == -EWOULDBLOCK) {
            break;
        }


        status_t err =mTSParser->feedTSPacket(buffer, sizeof(buffer));


        mOffset += n;
    }
    return OK;
}

If there are buffer inthe LiveDataSource's buffer queue, then mTSParser->feedTSPacket() willbe called


In Media Framework :android_src\framework\base\media\libstagefright\mpegts\ATSParser.cpp

status_t ATSParser::feedTSPacket(const void*data, size_t size) {
    CHECK_EQ(size, kTSPacketSize);


    ABitReader br((const uint8_t *)data,kTSPacketSize);
    return parseTS(&br);
}


status_t ATSParser::parseTS(ABitReader *br) {


    unsigned sync_byte = br->getBits(8);
    CHECK_EQ(sync_byte, 0x47u);
...
...
    return OK;
}


The parseTS() involves the format andspecification of TS(transport stream),

and the process of parseTS() will end withparsePES()


status_tATSParser::Stream::parsePES(ABitReader *br) {
...
...
           onPayloadData(
                   PTS_DTS_flags, PTS, DTS, br->data(),dataLength);

}

void ATSParser::Stream::onPayloadData(
        unsigned PTS_DTS_flags,uint64_t PTS, uint64_t DTS,
        const uint8_t *data, size_tsize) {
    LOGV("onPayloadDatamStreamType=0x%02x", mStreamType);


    int64_t timeUs = 0ll;  // no presentationtimestamp available.
    if (PTS_DTS_flags == 2 || PTS_DTS_flags == 3){
        timeUs =mProgram->convertPTSToTimestamp(PTS);
    }


    status_t err =mQueue->appendData(data, size, timeUs);


    sp accessUnit;
    while ((accessUnit =mQueue->dequeueAccessUnit()) != NULL) {
        if (mSource == NULL) {
            sp meta =mQueue->getFormat();


            if (meta != NULL){
               LOGV("Stream PID 0x%08x of type 0x%02x now has data.",
                    mElementaryPID, mStreamType);


               mSource = new AnotherPacketSource(meta);
               mSource->queueAccessUnit(accessUnit);

            }
        } else if(mQueue->getFormat() != NULL) {
            // After adiscontinuity we invalidate the queue's format
            // and won'tenqueue any access units to the source until
            // the queue hasreestablished the new format.


            if(mSource->getFormat() == NULL) {
               mSource->setFormat(mQueue->getFormat());
            }
           mSource->queueAccessUnit(accessUnit);
        }
    }
}

(Here mQueue refer to ESQueuein android_src\framework\base\media\libstagefright\mpegts\ESQueue.cpp)

The onPayloadData finally put the H264AccessUnit into mSource(refer to AnotherPacketSource)

and set the format of mSource

Back to NuPlayer

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp


status_t NuPlayer::instantiateDecoder(boolaudio, sp *decoder) {
    if (*decoder != NULL) {
        return OK;
    }


    sp meta =mSource->getFormat(audio);


    if (meta == NULL) {
        return -EWOULDBLOCK;
    }


    sp notify =
        newAMessage(audio ? kWhatAudioNotify : kWhatVideoNotify,
                    id());



    *decoder = audio ? newDecoder(notify) :
                      new Decoder(notify, mNativeWindow);
   looper()->registerHandler(*decoder);



    (*decoder)->configure(meta);


    return OK;
}

 

Take a look of mSource->getFormat()

 

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\HTTPLiveSource.cpp

 

sp NuPlayer::HTTPLiveSource::getFormat(bool audio) {

    ATSParser::SourceType type =

        audio ? ATSParser::AUDIO :ATSParser::VIDEO;

 

    sp source =

       static_cast(mTSParser->getSource(type).get());

 

    if (source == NULL) {

        return NULL;

    }

    return source->getFormat();

}

 

We can get the none-null format if the process of parsePES isfinished

 

It's time to new Decoder, note that NuPlayer also pass amessage kWhatVideoNotify (if video) to construct the Decoder.

 

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerDecoder.cpp

 

NuPlayer::Decoder::Decoder(

        const sp ¬ify,

        const sp &nativeWindow)

    : mNotify(notify),

      mNativeWindow(nativeWindow) {

}

 

void NuPlayer::Decoder::configure(const sp &meta) {

 

    sp notifyMsg =

        new AMessage(kWhatCodecNotify,id());

 

    bool needDedicatedLooper = !strncasecmp(mime,"video/", 6);

 

    mCodec = new ACodec;

 

    if (needDedicatedLooper && mCodecLooper ==NULL) {

        mCodecLooper = new ALooper;

       mCodecLooper->setName("NuPlayerDecoder");

        mCodecLooper->start(false, false,ANDROID_PRIORITY_AUDIO);

    }

 

    (needDedicatedLooper ? mCodecLooper :looper())->registerHandler(mCodec);

 

    mCodec->setNotificationMessage(notifyMsg);

    mCodec->initiateSetup(format);

}

 

A message "kWhatCodecNotify" is passed toACodec for notify back. The role of ACodec is like OMXCodec in Stagefright

 

In Media Framework :android_src\framework\base\media\libstagefright\ACodec.cpp 

 

ACodec::ACodec()

    : mNode(NULL),

      mSentFormat(false) {

    mUninitializedState = newUninitializedState(this);

    mLoadedToIdleState = new LoadedToIdleState(this);

    mIdleToExecutingState = newIdleToExecutingState(this);

    mExecutingState = new ExecutingState(this);

    mOutputPortSettingsChangedState =newOutputPortSettingsChangedState(this);

    mExecutingToIdleState = newExecutingToIdleState(this);

    mIdleToLoadedState = new IdleToLoadedState(this);

    mFlushingState = new FlushingState(this);

 

    mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput]= false;

    mInputEOSResult = OK;

 

    changeState(mUninitializedState);

}

 

ACodec inherit AHierarchicalStateMachine, and implement thestate machine.

 

We can see the code above with the states: 

==================================

UninitializedState

LoadedToIdleState

IdleToExecutingState

ExecutingState 

OutputPortSettingsChangedState 

ExecutingToIdleState 

IdleToLoadedState 

FlushingState 

================================== 

 

In Media Framework :android_src\framework\base\media\libstagefright\foundation\AHierarchicalStateMachine.cpp

 

void AHierarchicalStateMachine::changeState(const sp &state){

...

...

    for (size_t i = 0; i < A.size(); ++i) {

        A.editItemAt(i)->stateExited();

    }

    for (size_t i = B.size(); i-- > 0;) {

        B.editItemAt(i)->stateEntered();

    }

}

 

Each state except UninitializedState in ACodecoverride the stateEntered, but stateExited() not.

 

In Media Framework  :android_src\framework\base\media\libstagefright\ACodec.cpp  

 

void ACodec::setNotificationMessage(const sp &msg) {

    mNotify = msg;

}

 

void ACodec::initiateSetup(const sp &msg) {

    msg->setWhat(kWhatSetup);

    msg->setTarget(id());

    msg->post();

}

 

To be continued.

 

 

 

Android 4.0 Ice Cream Sandwich Media Framework (6)


Continued (6)

---===NuPlayer===---




In part 5, initiateSetup() called from NuPlayerDecoder andpost kWhatSetup message

Note the handler of messages in ACodec is set as itself.

In Media Framework :android_src\framework\base\media\libstagefright\ACodec.cpp

void ACodec::initiateSetup(const sp &msg) {
    msg->setWhat(kWhatSetup);

   msg->setTarget(id());
    msg->post();
}


bool ACodec::UninitializedState::onMessageReceived(const sp &msg){
   bool handled = false;
   switch (msg->what()) {
       case ACodec::kWhatSetup:
       {
            onSetup(msg);


           handled = true;
           break;
       }
   }
   return handled;
}


void ACodec::UninitializedState::onSetup(
       const sp &msg) {
   OMXClient client;
   CHECK_EQ(client.connect(), (status_t)OK);


   sp omx = client.interface();


   AString mime;
   CHECK(msg->findString("mime", &mime));


   Vector matchingCodecs;
   OMXCodec::findMatchingCodecs(
           mime.c_str(),
           false, // createEncoder
           NULL,  // matchComponentName
           0,     // flags
           &matchingCodecs);


   sp observer = new CodecObserver;
   IOMX::node_id node = NULL;


   AString componentName;


   for (size_t matchIndex = 0; matchIndex < matchingCodecs.size();
           ++matchIndex) {
       componentName = matchingCodecs.itemAt(matchIndex).string();
       status_t err = omx->allocateNode(componentName.c_str(),observer, &node);
   }


    sp notify = new AMessage(kWhatOMXMessage,mCodec->id());
    observer->setNotificationMessage(notify);



   mCodec->mComponentName = componentName;
   mCodec->mOMX = omx;
   mCodec->mNode = node;


   mCodec->mPortEOS[kPortIndexInput] =
       mCodec->mPortEOS[kPortIndexOutput] = false;


   mCodec->mInputEOSResult = OK;


    mCodec->configureCodec(mime.c_str(), msg);


   sp obj;
   if (msg->findObject("native-window", &obj)
           && strncmp("OMX.google.",componentName.c_str(), 11)) {
       sp nativeWindow(
               static_cast(obj.get()));
       CHECK(nativeWindow != NULL);
       mCodec->mNativeWindow = nativeWindow->getNativeWindow();
   }


   CHECK_EQ((status_t)OK, mCodec->initNativeWindow());


    CHECK_EQ(omx->sendCommand(node,OMX_CommandStateSet, OMX_StateIdle),
             (status_t)OK);


   mCodec->changeState(mCodec->mLoadedToIdleState);

}




struct CodecObserver : public BnOMXObserver {
    CodecObserver() {}


    void setNotificationMessage(const sp &msg){
        mNotify = msg;
    }
}




void ACodec::configureCodec(

       const char *mime, const sp &msg) {
   setComponentRole(false /* isEncoder */, mime);


   if (!strncasecmp(mime, "video/", 6)) {
       int32_t width, height;
       CHECK(msg->findInt32("width", &width));
       CHECK(msg->findInt32("height", &height));


        CHECK_EQ(setupVideoDecoder(mime,width, height),
                (status_t)OK);
   } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
       int32_t numChannels, sampleRate;
       CHECK(msg->findInt32("channel-count",&numChannels));
       CHECK(msg->findInt32("sample-rate",&sampleRate));


       CHECK_EQ(setupAACDecoder(numChannels, sampleRate),(status_t)OK);
   }
...
...
}

The above code will set the attributes of component like input/output ports'parameters

void ACodec::LoadedToIdleState::stateEntered() {
   LOGV("[%s] Now Loaded->Idle",mCodec->mComponentName.c_str());
   status_t err;
    err = allocateBuffers()) != OK


}


status_t ACodec::LoadedToIdleState::allocateBuffers() {
    status_t err =mCodec->allocateBuffersOnPort(kPortIndexInput);



   if (err != OK) {
       return err;
   }


    returnmCodec->allocateBuffersOnPort(kPortIndexOutput);
}

After sendCommand(node, OMX_CommandStateSet, OMX_StateIdle) ,thecomponent will send back response, and trigger onOMXEvent

bool ACodec::LoadedToIdleState::onOMXEvent(

       OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
   switch (event) {
        case OMX_EventCmdComplete:
       {
           CHECK_EQ(data1,(OMX_U32)OMX_CommandStateSet);
           CHECK_EQ(data2, (OMX_U32)OMX_StateIdle);


           CHECK_EQ(mCodec->mOMX->sendCommand(
                       mCodec->mNode, OMX_CommandStateSet,OMX_StateExecuting),

                   (status_t)OK);


           mCodec->changeState(mCodec->mIdleToExecutingState);


           return true;
       }
   }
}

Just as  OMX_StateIdle state is set to component,the OMX_CommandStateSet of  OMX_StateExecuting commandsent to componet right away

Note that ACodec always wait the component's response in a temporarystate,like LoadedToIdleState,IdleToExecutingState, even donothing

void ACodec::IdleToExecutingState::stateEntered() {
   LOGV("[%s] Now Idle->Executing",mCodec->mComponentName.c_str());
}




bool ACodec::IdleToExecutingState::onMessageReceived(const sp &msg) {
   switch (msg->what()) {
       default:
            returnBaseState::onMessageReceived(msg);
   }
}

Again, the component responding the complete of command.


bool ACodec::IdleToExecutingState::onOMXEvent(
       OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
   switch (event) {
        case OMX_EventCmdComplete:
       {
           CHECK_EQ(data1,(OMX_U32)OMX_CommandStateSet);
           CHECK_EQ(data2,(OMX_U32)OMX_StateExecuting);


           mCodec->mExecutingState->resume();
            mCodec->changeState(mCodec->mExecutingState);



           return true;
       }
   }
}


void ACodec::ExecutingState::resume() {
    submitOutputBuffers();



   // Post the first input buffer.
   CHECK_GT(mCodec->mBuffers[kPortIndexInput].size(), 0u);
   BufferInfo *info =&mCodec->mBuffers[kPortIndexInput].editItemAt(0);


    postFillThisBuffer(info);


   mActive = true;
}






void ACodec::ExecutingState::submitOutputBuffers() {
    for (size_t i = 0; i mBuffers[kPortIndexOutput].size(); ++i) {
        BufferInfo *info =&mCodec->mBuffers[kPortIndexOutput].editItemAt(i);



       LOGV("[%s] calling fillBuffer %p",
            mCodec->mComponentName.c_str(),info->mBufferID);


       CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
                (status_t)OK);


       info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
   }
}


void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {
   if (mCodec->mPortEOS[kPortIndexInput]) {
       return;
   }


   CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);


    sp notify = mCodec->mNotify->dup();
    notify->setInt32("what",ACodec::kWhatFillThisBuffer);
    notify->setPointer("buffer-id",info->mBufferID);


    info->mData->meta()->clear();
    notify->setObject("buffer",info->mData);


    sp reply = newAMessage(kWhatInputBufferFilled, mCodec->id());
    reply->setPointer("buffer-id",info->mBufferID);


    notify->setMessage("reply", reply);


    notify->post();



   info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
}

Here, in executing state, postFillThisBuffer() is calledin ExecutingState::resume(), used to carry PPS SPSinformation.

Now use the notify to send message(ACodec::kWhatFillThisBuffer) backto NuPlayerDecoder with a reply message(kWhatInputBufferFilled).

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerDecoder.cpp

void NuPlayer::Decoder::onMessageReceived(const sp &msg){
   switch (msg->what()) {
       case kWhatCodecNotify:
       {
           int32_t what;
           CHECK(msg->findInt32("what",&what));


           if (what ==ACodec::kWhatFillThisBuffer) {
  LOGD("NuPlayer::Decoder::onMessageReceived: onFillThisBuffer(msg);");
               onFillThisBuffer(msg);
           } else {
                sp notify= mNotify->dup();
               notify->setMessage("codec-request", msg);
               notify->post();

           }
           break;
       }
   }
}


void NuPlayer::Decoder::onFillThisBuffer(const sp &msg){
   sp reply;
   CHECK(msg->findMessage("reply", &reply));


   sp outBuffer;


   if (mCSDIndex < mCSD.size()) {
       outBuffer = mCSD.editItemAt(mCSDIndex++);
       outBuffer->meta()->setInt64("timeUs", 0);


       reply->setObject("buffer", outBuffer);
       reply->post();
       return;
   }


    sp notify = mNotify->dup();
//wrapped message ACodec::kWhatFillThisBuffer toNuPlayer  

    notify->setMessage("codec-request",msg);
    notify->post();

}

As mestioned, the first few buffers is used to carry the PPS,SPSinformation(stored in mCSD), so the condition (mCSDIndex
Here use the reply message to reply ACodec.

void ACodec::BaseState::onInputBufferFilled(const sp &msg){
   IOMX::buffer_id bufferID;
   CHECK(msg->findPointer("buffer-id", &bufferID));


    switch (mode) {
        case RESUBMIT_BUFFERS:

       {
                CHECK_EQ(mCodec->mOMX->emptyBuffer(
                          mCodec->mNode,
                          bufferID,
                          0,
                          buffer->size(),
                          flags,
                          timeUs),
                       (status_t)OK);


               info->mStatus =BufferInfo::OWNED_BY_COMPONENT;


               getMoreInputDataIfPossible();
           }
       }
   }
}

ACodec tries to get more input buffers filled


void ACodec::BaseState::getMoreInputDataIfPossible() {
   BufferInfo *eligible = NULL;


   for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size();++i) {
       BufferInfo *info =&mCodec->mBuffers[kPortIndexInput].editItemAt(i);


       if (info->mStatus == BufferInfo::OWNED_BY_US) {
           eligible = info;
       }
   }


   if (eligible == NULL) {
       return;
   }


    postFillThisBuffer(eligible);
}

postFillThisBuffer() called again until thecondition (mCSDIndex < mCSD.size()) is false, which means the codecspecific data all have been sent to component

Then in NuPlayer::Decoder::onFillThisBuffer(const sp &msg), messageof kWhatVideoNotify/kWhatAudioNotify sent back to NuPlayer torequest to un-decoded frame buffers

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp


void NuPlayer::onMessageReceived(const sp &msg) {
   switch (msg->what()) {
       case kWhatVideoNotify:
        case kWhatAudioNotify:

       {
           bool audio = msg->what() ==kWhatAudioNotify;


           sp codecRequest;
            CHECK(msg->findMessage("codec-request",&codecRequest));


           int32_t what;
          CHECK(codecRequest->findInt32("what", &what));


            if (what ==ACodec::kWhatFillThisBuffer) {
                status_terr = feedDecoderInputData(
                       audio, codecRequest);


            }

       }
   }
}




status_t NuPlayer::feedDecoderInputData(bool audio, const sp &msg){
   sp reply;
    CHECK(msg->findMessage("reply",&reply));


   sp accessUnit;


   bool dropAccessUnit;
   do {
        status_t err =mSource->dequeueAccessUnit(audio, &accessUnit);


       if (!audio) {
           ++mNumFramesTotal;
       }


       dropAccessUnit = false;
       if (!audio
               && mVideoLateByUs >100000ll
               && mVideoIsAVC
               &&!IsAVCReferenceFrame(accessUnit)) {
           dropAccessUnit = true;
           ++mNumFramesDropped;
       }
   } while (dropAccessUnit);


   // LOGV("returned a valid buffer of %s data", audio ?"audio" : "video");




    reply->setObject("buffer",accessUnit);
    reply->post();



   return OK;
}

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\HTTPLiveSource.cpp


status_t NuPlayer::HTTPLiveSource::dequeueAccessUnit(
       bool audio, sp *accessUnit) {
   ATSParser::SourceType type =
       audio ? ATSParser::AUDIO : ATSParser::VIDEO;


   sp source =
       static_cast(mTSParser->getSource(type).get());


   return source->dequeueAccessUnit(accessUnit);
}


Here NuPlayer try to dequeue the H264 access unit and send to ACodec fordecoding

In Media Framework :android_src\framework\base\media\libstagefright\ACodec.cpp

bool ACodec::BaseState::onMessageReceived(const sp &msg){
   switch (msg->what()) {
       case kWhatInputBufferFilled:
       {
           onInputBufferFilled(msg);
           break;
       }
   return true;
}

onInputBufferFilled>
mOMX->emptyBuffe
r>
getMoreInputDataIfPossible>
postFillThisBuffer >
feedDecoderInputData>...


again and again for decoding

=======================================================================
Let's see the response of  EMPTY_BUFFER_DONE message handlingfrom OMX component

In Media Framework :android_src\framework\base\media\libstagefright\ACodec.cpp

struct CodecObserver : public BnOMXObserver {


    virtual void onMessage(const omx_message&omx_msg) {

       sp msg = mNotify->dup();


       msg->setInt32("type", omx_msg.type);
       msg->setPointer("node", omx_msg.node);


       switch (omx_msg.type) {
            caseomx_message::EMPTY_BUFFER_DONE:
           {
              msg->setPointer("buffer", omx_msg.u.buffer_data.buffer);
               break;
           }
        msg->post();
   }
}




bool ACodec::BaseState::onOMXMessage(const sp &msg) {


   switch (type) {
        case omx_message::EMPTY_BUFFER_DONE:
       {
           IOMX::buffer_id bufferID;
          CHECK(msg->findPointer("buffer", &bufferID));


            returnonOMXEmptyBufferDone(bufferID);
       }
   }
}


bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_idbufferID) {
   LOGV("[%s] onOMXEmptyBufferDone %p",
        mCodec->mComponentName.c_str(), bufferID);


   BufferInfo *info =
       mCodec->findBufferByID(kPortIndexInput, bufferID);


   CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);
   info->mStatus = BufferInfo::OWNED_BY_US;


   PortMode mode = getPortMode(kPortIndexInput);


   switch (mode) {
       case RESUBMIT_BUFFERS:
            postFillThisBuffer(info);
           break;
   }


   return true;
}

As show above, when emptybufferdone, ACodec again calls postFillThisBuffer()

=======================================================================
Let's see the response of  FILL_BUFFER_DONE message handlingfrom OMX component

In Media Framework :android_src\framework\base\media\libstagefright\ACodec.cpp

struct CodecObserver : public BnOMXObserver {


   virtual void onMessage(const omx_message &omx_msg) {
       sp msg = mNotify->dup();


       msg->setInt32("type", omx_msg.type);
       msg->setPointer("node", omx_msg.node);


       switch (omx_msg.type) {
            caseomx_message::FILL_BUFFER_DONE:
           {
               msg->setPointer(
                      "buffer", omx_msg.u.extended_buffer_data.buffer);
               msg->setInt32(
                      "range_offset",
                      omx_msg.u.extended_buffer_data.range_offset);
               msg->setInt32(
                      "range_length",
                      omx_msg.u.extended_buffer_data.range_length);
               msg->setInt32(
                      "flags",
                      omx_msg.u.extended_buffer_data.flags);
               msg->setInt64(
                      "timestamp",
                      omx_msg.u.extended_buffer_data.timestamp);
               msg->setPointer(
                      "platform_private",
                      omx_msg.u.extended_buffer_data.platform_private);
               msg->setPointer(
                      "data_ptr",
                      omx_msg.u.extended_buffer_data.data_ptr);
               break;
           }
        msg->post();
   }
}


bool ACodec::BaseState::onOMXMessage(const sp &msg) {
   int32_t type;
   CHECK(msg->findInt32("type", &type));


   IOMX::node_id nodeID;
   CHECK(msg->findPointer("node", &nodeID));
   CHECK_EQ(nodeID, mCodec->mNode);


   switch (type) {
        case omx_message::FILL_BUFFER_DONE:
       {
           IOMX::buffer_id bufferID;
          CHECK(msg->findPointer("buffer", &bufferID));


           int32_t rangeOffset, rangeLength, flags;
           int64_t timeUs;
           void *platformPrivate;
           void *dataPtr;


           CHECK(msg->findInt32("range_offset",&rangeOffset));
          CHECK(msg->findInt32("range_length", &rangeLength));
           CHECK(msg->findInt32("flags",&flags));
          CHECK(msg->findInt64("timestamp", &timeUs));
           CHECK(msg->findPointer("platform_private",&platformPrivate));
          CHECK(msg->findPointer("data_ptr", &dataPtr));


            returnonOMXFillBufferDone(
                   bufferID,
                  (size_t)rangeOffset, (size_t)rangeLength,
                   (OMX_U32)flags,
                   timeUs,
                   platformPrivate,
                   dataPtr);
       }
   }
}

bool ACodec::BaseState::onOMXFillBufferDone(
       IOMX::buffer_id bufferID,
       size_t rangeOffset, size_t rangeLength,
       OMX_U32 flags,
       int64_t timeUs,
       void *platformPrivate,
       void *dataPtr) {
   LOGV("[%s] onOMXFillBufferDone %p time %lld us",
        mCodec->mComponentName.c_str(), bufferID,timeUs);


   ssize_t index;
   BufferInfo *info =
       mCodec->findBufferByID(kPortIndexOutput, bufferID,&index);


   CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);


   info->mStatus = BufferInfo::OWNED_BY_US;


   PortMode mode = getPortMode(kPortIndexOutput);


   switch (mode) {
       case RESUBMIT_BUFFERS:
       {
               if (!mCodec->mSentFormat) {
                  mCodec->sendFormatChange();
               }


               if (mCodec->mNativeWindow== NULL) {
                  info->mData->setRange(rangeOffset, rangeLength);
               }


              info->mData->meta()->setInt64("timeUs", timeUs);


               sp notify =mCodec->mNotify->dup();
               notify->setInt32("what", ACodec::kWhatDrainThisBuffer);
               notify->setPointer("buffer-id", info->mBufferID);
               notify->setObject("buffer", info->mData);


                sp reply=
                   new AMessage(kWhatOutputBufferDrained, mCodec->id());


               reply->setPointer("buffer-id", info->mBufferID);


               notify->setMessage("reply", reply);


               notify->post();



               info->mStatus =BufferInfo::OWNED_BY_DOWNSTREAM;


           if (flags & OMX_BUFFERFLAG_EOS) {
               sp notify =mCodec->mNotify->dup();
              notify->setInt32("what", ACodec::kWhatEOS);
              notify->setInt32("err", mCodec->mInputEOSResult);
               notify->post();


              mCodec->mPortEOS[kPortIndexOutput] = true;
           }
           break;
       }
   }


   return true;
}

In onOMXFillBufferDone ACodec send notify message(kWhatCodecNotify)back to NuPlayerDecoder with "what"(ACodec::kWhatDrainThisBuffer)and reply message(kWhatOutputBufferDrained)

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerDecoder.cpp

void NuPlayer::Decoder::onMessageReceived(const sp &msg){
//   LOGD("In NuPlayer::Decoder::onMessageReceived");
   switch (msg->what()) {
        case kWhatCodecNotify:
       {
           int32_t what;
           CHECK(msg->findInt32("what", &what));


                sp notify= mNotify->dup();
               notify->setMessage("codec-request", msg);
               notify->post();

           break;
       }
   }
}

Then NuPlayerDecoder give the decision to NuPlayer , note that this message isfrom ACodec and wrapped in notify back message

In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp

void NuPlayer::onMessageReceived(const sp &msg) {
   switch (msg->what()) {


        case kWhatVideoNotify:
        case kWhatAudioNotify:

       {
            sp codecRequest;
           CHECK(msg->findMessage("codec-request", &codecRequest));

...
...
           else {
               CHECK_EQ((int)what, (int)ACodec::kWhatDrainThisBuffer);
               renderBuffer(audio, codecRequest);

           }
           break;
       }
   }
}

Note that the message from ACodec is carried(now named codecRequest).

void NuPlayer::renderBuffer(bool audio, const sp &msg){
   // LOGV("renderBuffer %s", audio ? "audio" :"video");


   sp reply;
   CHECK(msg->findMessage("reply", &reply));


   sp obj;
   CHECK(msg->findObject("buffer", &obj));


   sp buffer = static_cast(obj.get());


   mRenderer->queueBuffer(audio, buffer, reply);
}

In renderBuffer, the reply message(kWhatOutputBufferDrained)is passed to NuPlayerRenderer

In Media Framework  :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerRenderer.cpp

void NuPlayer::Renderer::queueBuffer(
       bool audio,
       const sp &buffer,
       const sp ¬ifyConsumed) {
   sp msg = new AMessage(kWhatQueueBuffer, id());
   msg->setInt32("audio", static_cast(audio));
   msg->setObject("buffer", buffer);
   msg->setMessage("notifyConsumed", notifyConsumed);
   msg->post();
}

In  NuPlayer::Renderer::queueBuffer , the reply message(kWhatOutputBufferDrained)to ACodec named notifyConsumed.

Herepost kWhatQueueBuffer message


void NuPlayer::Renderer::onMessageReceived(const sp &msg){
   LOGD("NuPlayer::Renderer::onMessageReceived%d",msg->what());
   switch (msg->what()) {
        case kWhatQueueBuffer:
       {
           onQueueBuffer(msg);
           break;
       }
   }
}




void NuPlayer::Renderer::onQueueBuffer(const sp &msg) {
   int32_t audio;
   CHECK(msg->findInt32("audio", &audio));


    sp obj;
    CHECK(msg->findObject("buffer",&obj));
    sp buffer = static_cast(obj.get());


    sp notifyConsumed;
   CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed));



   QueueEntry entry;
    entry.mBuffer = buffer;
    entry.mNotifyConsumed = notifyConsumed;

   entry.mOffset = 0;
   entry.mFinalResult = OK;


   if (audio) {
       mAudioQueue.push_back(entry);
       postDrainAudioQueue();
   } else {
       mVideoQueue.push_back(entry);
        postDrainVideoQueue();
   }


   if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
       return;
   }


   sp firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
   sp firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;


   if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
       // EOS signalled on either queue.
       syncQueuesDone();
       return;
   }


   int64_t firstAudioTimeUs;
   int64_t firstVideoTimeUs;
   CHECK(firstAudioBuffer->meta()
           ->findInt64("timeUs",&firstAudioTimeUs));
   CHECK(firstVideoBuffer->meta()
           ->findInt64("timeUs",&firstVideoTimeUs));


   int64_t diff = firstVideoTimeUs - firstAudioTimeUs;


   LOGV("queueDiff = %.2f secs", diff / 1E6);


   if (diff > 100000ll) {
       // Audio data starts More than 0.1 secs before video.
       // Drop some audio.


       (*mAudioQueue.begin()).mNotifyConsumed->post();
       mAudioQueue.erase(mAudioQueue.begin());
       return;
   }


   syncQueuesDone();
}


voidNuPlayer::Renderer::syncQueuesDone() {
   if (!mSyncQueues) {
       return;
   }


   mSyncQueues = false;


   if (!mAudioQueue.empty()) {
       postDrainAudioQueue();
   }


   if (!mVideoQueue.empty()) {//not empty
       postDrainVideoQueue();
   }
}

Here NuPlayerRender maintain the mVideoQueue and mAudioQueue for storing thedecoded buffers.

Now postDrainVideoQueue() is called

void NuPlayer::Renderer::postDrainVideoQueue() {



   QueueEntry &entry = *mVideoQueue.begin();


   sp msg = new AMessage(kWhatDrainVideoQueue, id());
   msg->setInt32("generation", mVideoQueueGeneration);


   int64_t delayUs;


   if (entry.mBuffer == NULL) {
       // EOS doesn't carry a timestamp.
       delayUs = 0;
    } else {
        int64_t mediaTimeUs;
       CHECK(entry.mBuffer->meta()->findInt64("timeUs",&mediaTimeUs));
        if (mAnchorTimeMediaUs < 0) {
            delayUs = 0;


            if (!mHasAudio) {
               mAnchorTimeMediaUs = mediaTimeUs;
               mAnchorTimeRealUs = ALooper::GetNowUs();
            }
        } else {
            int64_t realTimeUs =
               (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;


            delayUs = realTimeUs -ALooper::GetNowUs();

       }
   }


    msg->post(delayUs);


   mDrainVideoQueuePending = true;
}

After calculating the delay of media the message(kWhatDrainVideoQueue)is post.


void NuPlayer::Renderer::onMessageReceived(const sp &msg){
   LOGD("NuPlayer::Renderer::onMessageReceived%d",msg->what());
   switch (msg->what()) {
       case kWhatDrainVideoQueue:
       {
           mDrainVideoQueuePending = false;


           onDrainVideoQueue();


           postDrainVideoQueue();
           break;
       }
   }
}


void NuPlayer::Renderer::onDrainVideoQueue() {
   if (mVideoQueue.empty()) {
       return;
   }


   QueueEntry *entry = &*mVideoQueue.begin();


    int64_t mediaTimeUs;
   CHECK(entry->mBuffer->meta()->findInt64("timeUs",&mediaTimeUs));


    int64_t realTimeUs = mediaTimeUs -mAnchorTimeMediaUs + mAnchorTimeRealUs;
    mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;


    bool tooLate = (mVideoLateByUs > 40000);



   if (tooLate) {
       LOGV("video late by %lld us (%.2f secs)",lateByUs, lateByUs / 1E6);
   } else {
       LOGD("rendering video at media time %.2f secs",mediaTimeUs / 1E6);
   }


    entry->mNotifyConsumed->setInt32("render",!tooLate);
    entry->mNotifyConsumed->post();
    mVideoQueue.erase(mVideoQueue.begin());

   entry = NULL;


   notifyPosition();
}

entry->mNotifyConsumed is the message:kWhatOutputBufferDrained in ACodec

In Media Framework  :android_src\framework\base\media\libstagefright\ACodec.cpp

bool ACodec::BaseState::onMessageReceived(const sp &msg) {
   switch (msg->what()) {
       case kWhatOutputBufferDrained:
       {
           onOutputBufferDrained(msg);
           break;
       }
   }


   return true;
}

void ACodec::BaseState::onOutputBufferDrained(const sp &msg){
   IOMX::buffer_id bufferID;
   CHECK(msg->findPointer("buffer-id", &bufferID));


   ssize_t index;
   BufferInfo *info =
       mCodec->findBufferByID(kPortIndexOutput, bufferID,&index);
   CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM);


   int32_t render;
   if (mCodec->mNativeWindow != NULL
           &&msg->findInt32("render", &render) && render != 0) {
       // The client wants this buffer to be rendered.


       if (mCodec->mNativeWindow->queueBuffer(
                  mCodec->mNativeWindow.get(),
                  info->mGraphicBuffer.get()) == OK) {
           info->mStatus =BufferInfo::OWNED_BY_NATIVE_WINDOW;
       } else {
           mCodec->signalError();
           info->mStatus = BufferInfo::OWNED_BY_US;
       }
   } else {
       info->mStatus = BufferInfo::OWNED_BY_US;
   }


   PortMode mode = getPortMode(kPortIndexOutput);


   switch (mode) {
       case RESUBMIT_BUFFERS:
       {
           if (!mCodec->mPortEOS[kPortIndexOutput]){
               if (info->mStatus ==BufferInfo::OWNED_BY_NATIVE_WINDOW) {
                   // We cannotresubmit the buffer we just rendered, dequeue
                   // the spareinstead.


                   info =mCodec->dequeueBufferFromNativeWindow();
               }


               if (info != NULL) {
                   LOGV("[%s]calling fillBuffer %p",
                       mCodec->mComponentName.c_str(), info->mBufferID);


                   CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode,info->mBufferID),
                           (status_t)OK);


                   info->mStatus= BufferInfo::OWNED_BY_COMPONENT;
               }
           }
           break;
       }
   }
}


After send the decoded buffer to NativeWindows, ACodec request the component tofill more decoded buffer by mOMX->fillBuffer(mCodec->mNode,info->mBufferID)

And

Again and again.

=======================================================================

To be continued ...... ?

 

 

 

 

 

你可能感兴趣的:(android,meida)