2012年3月17日 星期六

Android 4.0 Ice Cream Sandwich Media Framework (2)


Continued (2)

---===Stagefright===---



In part 1, we trace the first two line in APK, now continue the following:

            mMediaPlayer = new MediaPlayer();
            mMediaPlayer.setDataSource(path);
            mMediaPlayer.prepare();
            mMediaPlayer.start();

the mMediaPlayer.prepare() will call into prepareAsync of Stagefright

In Media Framework : android_src\framework\base\media\libmediaplayerservice\StagefrightPlayer.cpp

status_t StagefrightPlayer::prepareAsync() {
    return mPlayer->prepareAsync();
}

In Media Framework : android_src\framework\base\media\libstagefright\AwesomePlayer.cpp  

status_t AwesomePlayer::prepareAsync_l() {


    mQueue.start();
    mAsyncPrepareEvent = new AwesomeEvent(this, &AwesomePlayer::onPrepareAsyncEvent);
    mQueue.postEvent(mAsyncPrepareEvent);
    return OK;


}

Here the mQueue is the instance of TimedEventQueue, used for triggering some event.

In prepareAsync_l(), onPrepareAsyncEvent is registered and handled by mQueue


void AwesomePlayer::onPrepareAsyncEvent() {


    if (mVideoTrack != NULL && mVideoSource == NULL) {
        status_t err = initVideoDecoder();
    }


    if (mAudioTrack != NULL && mAudioSource == NULL) {
        status_t err = initAudioDecoder();
    }

    modifyFlags(PREPARING_CONNECTED, SET);


    if (isStreamingHTTP()) {
        postBufferingEvent_l();
    } else {
        finishAsyncPrepare_l();
    }
}

Recall in Part 1, the MediaExtractor split Audio/Video into mAudioTrace/mVideoTrack,

So it's time to instantiate the audio/video codecs

status_t AwesomePlayer::initVideoDecoder(uint32_t flags) {


mVideoSource = OMXCodec::Create(
            mClient.interface(), mVideoTrack->getFormat(),
            false, // createEncoder
            mVideoTrack,
            NULL, flags, USE_SURFACE_ALLOC ? mNativeWindow : NULL);


if (mVideoSource != NULL) {
status_t err = mVideoSource->start();
return mVideoSource != NULL ? OK : UNKNOWN_ERROR;


}

Here, AwesomePlayer use OMXCodec to find a proper codec for Audio/Video.(OMX=OpenMAX)

Following is all the codecs in Stagefright can be found.

In Media Framework :android_src\framework\base\media\libstagefright\OMXCodec.cpp  

static const CodecInfo kDecoderInfo[] = {
    { MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },
    { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.google.mp3.decoder" },
    { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, "OMX.Nvidia.mp2.decoder" },
    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.google.amrnb.decoder" },
    { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" },
    { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.google.amrwb.decoder" },
    { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" },
    { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.google.aac.decoder" },
    { MEDIA_MIMETYPE_AUDIO_G711_ALAW, "OMX.google.g711.alaw.decoder" },
    { MEDIA_MIMETYPE_AUDIO_G711_MLAW, "OMX.google.g711.mlaw.decoder" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.DUCATI1.VIDEO.DECODER" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.decode" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Decoder" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.google.mpeg4.decoder" },
    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.DUCATI1.VIDEO.DECODER" },
    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.decode" },
    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" },
    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" },
    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Decoder" },
    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.google.h263.decoder" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.DUCATI1.VIDEO.DECODER" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.decode" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Decoder" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.google.h264.decoder" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.google.avc.decoder" },
    { MEDIA_MIMETYPE_AUDIO_VORBIS, "OMX.google.vorbis.decoder" },
    { MEDIA_MIMETYPE_VIDEO_VPX, "OMX.google.vpx.decoder" },
    { MEDIA_MIMETYPE_VIDEO_MPEG2, "OMX.Nvidia.mpeg2v.decode" },
};

Continue to see what the OMXCodec::Create do

sp OMXCodec::Create(
        const sp &omx,
        const sp &meta, bool createEncoder,
        const sp &source,
        const char *matchComponentName,
        uint32_t flags,
        const sp &nativeWindow) {


    Vector matchingCodecs;
    findMatchingCodecs(
            mime, createEncoder, matchComponentName, flags, &matchingCodecs);


    for (size_t i = 0; i < matchingCodecs.size(); ++i) {


        const char *componentNameBase = matchingCodecs[i].string();
        const char *componentName = componentNameBase;


        LOGV("Attempting to allocate OMX node '%s'", componentName);


        status_t err = omx->allocateNode(componentName, observer, &node);


        if (err == OK) {
            LOGV("Successfully allocated OMX node '%s'", componentName);


            sp codec = new OMXCodec(
                    omx, node, quirks, flags,
                    createEncoder, mime, componentName,
                    source, nativeWindow);


            return codec;
         }
return NULL;
}




void OMXCodec::findMatchingCodecs(
        const char *mime,
        bool createEncoder, const char *matchComponentName,
        uint32_t flags,
        Vector *matchingCodecs) {
    matchingCodecs->clear();


    for (int index = 0;; ++index) {
        const char *componentName;


        componentName = GetCodec(
                    kDecoderInfo,
                    sizeof(kDecoderInfo) / sizeof(kDecoderInfo[0]),
                    mime, index);


        matchingCodecs->push(String8(componentName));
    }
}

After find the matching codec, an instance of OMXCodec will be newed and returned to AwesomePlayer (stored as mVideoSource for video, mAudioSource for Audio)

Let see the detail of  omx->allocateNode(componentName, observer, &node) above

In Media Framework :android_src\framework\base\media\libstagefright\omx\OMX.cpp

status_t OMX::allocateNode(
        const char *name, const sp &observer, node_id *node) {


    OMX_ERRORTYPE err = mMaster->makeComponentInstance(
            name, &OMXNodeInstance::kCallbacks,
            instance, &handle);


    return OK;
}

Use OMXMaster to makeComponentInstance

In Media Framework :android_src\framework\base\media\libstagefright\omx\OMXMaster.cpp

OMX_ERRORTYPE OMXMaster::makeComponentInstance(


    OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
    OMX_ERRORTYPE err =
        plugin->makeComponentInstance(name, callbacks, appData, component);
    return err;
}

Use SoftOMXPlugin to makeComponentInstance

In Media Framework :android_src\framework\base\media\libstagefright\omx\SoftOMXPlugin.cpp

OMX_ERRORTYPE SoftOMXPlugin::makeComponentInstance(


    for (size_t i = 0; i < kNumComponents; ++i) {
        if (strcmp(name, kComponents[i].mName)) {
            continue;
        }


        AString libName = "libstagefright_soft_";
        libName.append(kComponents[i].mLibNameSuffix);
        libName.append(".so");


        void *libHandle = dlopen(libName.c_str(), RTLD_NOW);


        CreateSoftOMXComponentFunc createSoftOMXComponent =
            (CreateSoftOMXComponentFunc)dlsym(
                    libHandle,
                    "_Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPE"
                    "PvPP17OMX_COMPONENTTYPE");


        sp codec =
            (*createSoftOMXComponent)(name, callbacks, appData, component);


}

Something interesting happened here, the codec is found and instantiated by loading a library and call the function createSoftOMXComponent in library. If we check the filesystem of Android, under system/lib, we can find there are libraries of codec located here like:

===

libstagefright_soft_aacdec.so
libstagefright_soft_amrdec.so
libstagefright_soft_g711dec.so
libstagefright_soft_h264dec.so
libstagefright_soft_mp3dec.so
libstagefright_soft_mpeg4dec.so
libstagefright_soft_vorbisdec.so
libstagefright_soft_vpxdec.so

===


In Media Framework :android_src\framework\base\media\libstagefright\codecs\on2\h264dec\SoftAVC.cpp

android::SoftOMXComponent *createSoftOMXComponent(
        const char *name, const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData, OMX_COMPONENTTYPE **component) {
    return new android::SoftAVC(name, callbacks, appData, component);
}

SoftAVC::SoftAVC(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
      mHandle(NULL),
      mInputBufferCount(0),
      mWidth(320),
      mHeight(240),
      mPictureSize(mWidth * mHeight * 3 / 2),
      mCropLeft(0),
      mCropTop(0),
      mCropWidth(mWidth),
      mCropHeight(mHeight),
      mFirstPicture(NULL),
      mFirstPictureId(-1),
      mPicId(0),
      mHeadersDecoded(false),
      mEOSStatus(INPUT_DATA_AVAILABLE),
      mOutputPortSettingsChange(NONE),
      mSignalledError(false) {
    initPorts();
    CHECK_EQ(initDecoder(), (status_t)OK);
}

The SoftAVC is an inheritance from SimpleSoftOMXComponent, that is, an OMX Component

So, by the specification of OpenMAX, the Component need "ports" for message passing.

The "initPorts()" will initialize the parameters of inputPort and outputPort.

Take a look of constructor of  SimpleSoftOMXComponent

In Media Framework :android_src\framework\base\media\libstagefright\omx\SimpleSoftOMXComponent.cpp

SimpleSoftOMXComponent::SimpleSoftOMXComponent(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SoftOMXComponent(name, callbacks, appData, component),
      mLooper(new ALooper),
      mHandler(new AHandlerReflector(this)),
      mState(OMX_StateLoaded),
      mTargetState(OMX_StateLoaded) {
    mLooper->setName(name);
    mLooper->registerHandler(mHandler);


    mLooper->start(
            false, // runOnCallingThread
            false, // canCallJava
            ANDROID_PRIORITY_FOREGROUND);
}

Here the SimpleSoftOMXComponent use the Message/Handler/Looper model

In Media Framework :android_src\framework\base\media\libstagefright\codecs\on2\h264dec\SoftAVC.cpp

status_t SoftAVC::initDecoder() {
    if (H264SwDecInit(&mHandle, 0) == H264SWDEC_OK) {
        return OK;
    }
    return UNKNOWN_ERROR;
}

In Media Framework :android_src\framework\base\media\libstagefright\codecs\on2\h264dec\source\H264SwDecApi.c

H264SwDecRet H264SwDecInit(H264SwDecInst *decInst, u32 noOutputReordering)
{


    pDecCont = (decContainer_t *)H264SwDecMalloc(sizeof(decContainer_t));
    pDecCont->decStat  = INITIALIZED;
    pDecCont->picNumber = 0;
    *decInst = (decContainer_t *)pDecCont;
    return(H264SWDEC_OK);


}

After  omx->allocateNode(componentName, observer, &node), Back to new OMXCodec in  OMXCodec::Create, and take a look of the constructor of OMXCodec

In Media Framework :android_src\framework\base\media\libstagefright\OMXCodec.cpp  

OMXCodec::OMXCodec(
        const sp &omx, IOMX::node_id node,
        uint32_t quirks, uint32_t flags,
        bool isEncoder,
        const char *mime,
        const char *componentName,
        const sp &source,
        const sp &nativeWindow)
    : mOMX(omx),
      mOMXLivesLocally(omx->livesLocally(getpid())),
      mNode(node),
      mQuirks(quirks),
      mFlags(flags),
      mIsEncoder(isEncoder),
      mMIME(strdup(mime)),
      mComponentName(strdup(componentName)),
      mSource(source),
      mCodecSpecificDataIndex(0),
      mState(LOADED),
      mInitialBufferSubmit(true),
      mSignalledEOS(false),
      mNoMoreOutputData(false),
      mOutputPortSettingsHaveChanged(false),
      mSeekTimeUs(-1),
      mSeekMode(ReadOptions::SEEK_CLOSEST_SYNC),
      mTargetTimeUs(-1),
      mOutputPortSettingsChangedPending(false),
      mLeftOverBuffer(NULL),
      mPaused(false),
      mNativeWindow(
              (!strncmp(componentName, "OMX.google.", 11)
              || !strcmp(componentName, "OMX.Nvidia.mpeg2v.decode"))
                        ? NULL : nativeWindow) {
    mPortStatus[kPortIndexInput] = ENABLED;
    mPortStatus[kPortIndexOutput] = ENABLED;


    setComponentRole();
}

Back to  AwesomePlayer::onPrepareAsyncEvent()

In Media Framework :android_src\framework\base\media\libstagefright\AwesomePlayer.cpp

void AwesomePlayer::finishAsyncPrepare_l() {
    if (mIsAsyncPrepare) {
        notifyListener_l(MEDIA_PREPARED);
    }
    mPrepareResult = OK;
}

沒有留言: