2012年3月19日 星期一

Android 4.0 Ice Cream Sandwich Media Framework (3)

Continued (3)

---===Stagefright===---


In APK
            mMediaPlayer = new MediaPlayer();
            mMediaPlayer.setDataSource(path);
            mMediaPlayer.prepare();
            mMediaPlayer.start();

Start tracing mMediaPlayer.start()



In Media Framework :android_src\framework\basemedia\libmediaplayerservice\MediaPlayerService.cpp

status_t MediaPlayerService::Client::start()
{
    LOGV("[%d] start", mConnId);
    sp p = getPlayer();
    p->setLooping(mLoop);
    return p->start();
}


In Media Framework :android_src\framework\base\media\libmediaplayerservice\StagefrightPlayer.cpp


status_t StagefrightPlayer::start() {
    LOGV("start");
    return mPlayer->play();
}


In Media Framework :android_src\framework\base\media\libstagefright\AwesomePlayer.cpp


status_t AwesomePlayer::play() {
    return play_l();
}


status_t AwesomePlayer::play_l() {


if (mVideoSource != NULL) {
        // Kick off video playback
        postVideoEvent_l();
}
return OK;
}


void AwesomePlayer::postVideoEvent_l(int64_t delayUs) {
    if (mVideoEventPending) {
        return;
    }
    mVideoEventPending = true;
    mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs);
}

From the above code, AwesomePlayer use the event-post to trigger the onVideoEvent method

In Media Framework :android_src\framework\base\media\libstagefright\AwesomePlayer.cpp

void AwesomePlayer::onVideoEvent() {


if (!mVideoBuffer) {
for (;;) {
            status_t err = mVideoSource->read(&mVideoBuffer, &options);
         
            if (mVideoBuffer->range_length() == 0) {
                mVideoBuffer->release();
                mVideoBuffer = NULL;
                continue;
            }
            break;
         }
}
    int64_t timeUs;
    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));


    int64_t realTimeUs, mediaTimeUs;
    if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
        && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
        mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
    }


        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;


        int64_t latenessUs = nowUs - timeUs;


        if (latenessUs > 500000ll
                && mAudioPlayer != NULL
                && mAudioPlayer->getMediaTimeMapping(
                    &realTimeUs, &mediaTimeUs)) {
            LOGI("we're much too late (%.2f secs), video skipping ahead",
                 latenessUs / 1E6);


            mVideoBuffer->release();
            mVideoBuffer = NULL;


            mSeeking = SEEK_VIDEO_ONLY;
            mSeekTimeUs = mediaTimeUs;


            postVideoEvent_l();
            return;
        }


        if (latenessUs > 40000) {
            // We're more than 40ms late.
            LOGV("we're late by %lld us (%.2f secs)",
                 latenessUs, latenessUs / 1E6);
                mSinceLastDropped = 0;
                mVideoBuffer->release();
                mVideoBuffer = NULL;
                postVideoEvent_l();
                return;
        }


        if (latenessUs < -10000) {
            // We're more than 10ms early.
            postVideoEvent_l(10000);
            return;
        }


    if (mVideoRenderer != NULL) {
        mSinceLastDropped++;
        mVideoRenderer->render(mVideoBuffer);
    }
    postVideoEvent_l();
}

The mVideoSource->read will communication with the OMX Component(SoftAVC) and do the deocding

, then store the decoded buffer in mVideoBuffer.

Note that mVideoSource is an instance of OMXCodec, that is, the communication with Component is handled by OMXCodec.

In Media Framework :android_src\framework\base\media\libstagefright\OMXCodec.cpp  

status_t OMXCodec::read(
        MediaBuffer **buffer, const ReadOptions *options) {
    if (mInitialBufferSubmit) {
        mInitialBufferSubmit = false;


        drainInputBuffers();
        if (mState == EXECUTING) {
            // Otherwise mState == RECONFIGURING and this code will trigger
            // after the output port is reenabled.
            fillOutputBuffers();
        }
}
while (mState != ERROR && !mNoMoreOutputData && mFilledBuffers.empty()) {
        if ((err = waitForBufferFilled_l()) != OK) {
            return err;
        }
    }
    size_t index = *mFilledBuffers.begin();
    mFilledBuffers.erase(mFilledBuffers.begin());
    BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index);
    *buffer = info->mMediaBuffer;


    return OK;
}


Here
drainInputBuffers means to feed decoder the un-decoded buffer,
fillOutputBuffers means to fill decoded buffer to output buffer queue
waitForBufferFilled_l() will wait until decoded buffer is filled on output queue


void OMXCodec::drainInputBuffers() {


        Vector *buffers = &mPortBuffers[kPortIndexInput];
        for (size_t i = 0; i < buffers->size(); ++i) {
            BufferInfo *info = &buffers->editItemAt(i);
            if (!drainInputBuffer(info)) {
                break;
            }
        }
}


bool OMXCodec::drainInputBuffer(BufferInfo *info){
            err = mSource->read(&srcBuffer);


            memcpy(info->mData, kNALStartCode, 4);
            memcpy((uint8_t *)info->mData + 4,
                   specific->mData, specific->mSize);


           status_t err = mOMX->emptyBuffer(
                mNode, info->mBuffer, 0, size,
                OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_CODECCONFIG,
                0);
           return true;


}

The mSource->read(&srcBuffer)  will get the buffer of media file, mSource here refer to the MPEG4Souece (defined in MPEG4Extractor.cpp) since now the media container is mp4 (if you use other container, it will be different;ex: ts container will refer to AnotherPacketSource)

In Media Framework  :android_src\framework\base\media\libstagefright\MPEG4Extractor.cpp


status_t MPEG4Source::read(
        MediaBuffer **out, const ReadOptions *options) {
        err = mGroup->acquire_buffer(&mBuffer);

            ssize_t num_bytes_read =
                mDataSource->readAt(offset, (uint8_t *)mBuffer->data(), size);

            mBuffer->set_range(0, size);
            mBuffer->meta_data()->clear();
            mBuffer->meta_data()->setInt64(
                    kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
        *out = mBuffer;
        return OK;
}





In Media Framework :android_src\framework\base\media\libstagefright\omx\SimpleSoftOMXComponent.cpp

OMX_ERRORTYPE SimpleSoftOMXComponent::emptyThisBuffer(
        OMX_BUFFERHEADERTYPE *buffer) {
    sp msg = new AMessage(kWhatEmptyThisBuffer, mHandler->id());
    msg->setPointer("header", buffer);
    msg->post();


    return OMX_ErrorNone;
}

Here emptyThisBuffer uses the Message/Handler model to send message "kWhatEmptyThisBuffer".


Since the handler is registered as itself, onMessageReceived will be triggered.

void SimpleSoftOMXComponent::onMessageReceived(const sp &msg) {


    switch (msg->what()) {
        case kWhatEmptyThisBuffer:
        case kWhatFillThisBuffer:


            OMX_BUFFERHEADERTYPE *header;
            CHECK(msg->findPointer("header", (void **)&header));


            CHECK(mState == OMX_StateExecuting && mTargetState == mState);


            bool found = false;
            for (size_t i = 0; i < mPorts.size(); ++i) {
                PortInfo *port = &mPorts.editItemAt(i);


                for (size_t j = 0; j < port->mBuffers.size(); ++j) {
                    BufferInfo *buffer = &port->mBuffers.editItemAt(j);


                    if (buffer->mHeader == header) {
                        CHECK(!buffer->mOwnedByUs);


                        buffer->mOwnedByUs = true;


                        CHECK((msg->what() == kWhatEmptyThisBuffer
                                    && port->mDef.eDir == OMX_DirInput)
                                || (port->mDef.eDir == OMX_DirOutput));


                        port->mQueue.push_back(buffer);
                        onQueueFilled(i);


                        found = true;
                        break;
                    }
                }
            }
}

onQueueFilled() will call into the "real" codec component

void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
    List &inQueue = getPortQueue(kInputPortIndex);

    List &outQueue = getPortQueue(kOutputPortIndex);
    while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
            && outQueue.size() == kNumOutputBuffers) {
        BufferInfo *inInfo = *inQueue.begin();

        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
        H264SwDecInput inPicture;
        H264SwDecOutput outPicture;
        memset(&inPicture, 0, sizeof(inPicture));
        inPicture.dataLen = inHeader->nFilledLen;
        inPicture.pStream = inHeader->pBuffer + inHeader->nOffset;
        while (inPicture.dataLen > 0) {

            ret = H264SwDecDecode(mHandle, &inPicture, &outPicture);

            if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY ||
                ret == H264SWDEC_PIC_RDY_BUFF_NOT_EMPTY) {

                inPicture.dataLen -= (u32)(outPicture.pStrmCurrPos - inPicture.pStream);

                inPicture.pStream = outPicture.pStrmCurrPos;
            }
        }
        if (mFirstPicture && !outQueue.empty()) {

            drainOneOutputBuffer(mFirstPictureId, mFirstPicture);

            delete[] mFirstPicture;

            mFirstPicture = NULL;

            mFirstPictureId = -1;
        }
        while (!outQueue.empty() &&
                mHeadersDecoded &&
                H264SwDecNextPicture(mHandle, &decodedPicture, 0)
                    == H264SWDEC_PIC_RDY) {

            int32_t picId = decodedPicture.picId;
            uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture;
            drainOneOutputBuffer(picId, data);
        }
    }
}


Here, when a buffer is decoded, and ready for rendering, drainOneOutputBuffer will be called

void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
    List &outQueue = getPortQueue(kOutputPortIndex);
    BufferInfo *outInfo = *outQueue.begin();
    outQueue.erase(outQueue.begin());
    OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
    OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId);
    outHeader->nTimeStamp = header->nTimeStamp;
    outHeader->nFlags = header->nFlags;
    outHeader->nFilledLen = mPictureSize;
    memcpy(outHeader->pBuffer + outHeader->nOffset,
            data, mPictureSize);
    mPicToHeaderMap.removeItem(picId);
    delete header;
    outInfo->mOwnedByUs = false;
    notifyFillBufferDone(outHeader);
}

notifyFillBufferDone will go through many classes, for simplifing the process, we just jump to OMXCodec::on_message()

In Media Framework :android_src\framework\base\media\libstagefright\OMXCodec.cpp   

void OMXCodec::on_message(const omx_message &msg) {

    switch (msg.type) {
        case omx_message::FILL_BUFFER_DONE:
        {

            Vector *buffers = &mPortBuffers[kPortIndexOutput];


            CHECK(i < buffers->size());
            BufferInfo *info = &buffers->editItemAt(i);


            if (mPortStatus[kPortIndexOutput] == DISABLING) {
                CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);

                status_t err = freeBuffer(kPortIndexOutput, i);
                CHECK_EQ(err, (status_t)OK);

            } else if (mPortStatus[kPortIndexOutput] != SHUTTING_DOWN) {
                CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);

                if (info->mMediaBuffer == NULL) {
                    CHECK(mOMXLivesLocally);
                    CHECK(mQuirks & kRequiresAllocateBufferOnOutputPorts);
                    CHECK(mQuirks & kDefersOutputBufferAllocation);

                    // The qcom video decoders on Nexus don't actually allocate
                    // output buffer memory on a call to OMX_AllocateBuffer
                    // the "pBuffer" member of the OMX_BUFFERHEADERTYPE
                    // structure is only filled in later.

                    info->mMediaBuffer = new MediaBuffer(
                            msg.u.extended_buffer_data.data_ptr,
                            info->mSize);
                    info->mMediaBuffer->setObserver(this);
                }

                MediaBuffer *buffer = info->mMediaBuffer;

                buffer->set_range(
                        msg.u.extended_buffer_data.range_offset,
                        msg.u.extended_buffer_data.range_length);

                buffer->meta_data()->clear();

                buffer->meta_data()->setInt64(
                        kKeyTime, msg.u.extended_buffer_data.timestamp);

                buffer->meta_data()->setPointer(
                        kKeyPlatformPrivate,
                        msg.u.extended_buffer_data.platform_private);

                buffer->meta_data()->setPointer(
                        kKeyBufferID,
                        msg.u.extended_buffer_data.buffer);

                mFilledBuffers.push_back(i);
                mBufferFilled.signal();

            }
            break;
    }
}

Now, we decode a buffer and wrapped as MediaBuffer

Back to AwesomePlayer

In Media Framework :android_src\framework\base\media\libstagefright\AwesomePlayer.cpp

void AwesomePlayer::onVideoEvent() {


if (!mVideoBuffer) {
for (;;) {
            status_t err = mVideoSource->read(&mVideoBuffer, &options);
         
            if (mVideoBuffer->range_length() == 0) {
                mVideoBuffer->release();
                mVideoBuffer = NULL;
                continue;
            }
            break;
         }
}


    int64_t timeUs;
    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));


    int64_t realTimeUs, mediaTimeUs;
    if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
        && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
        mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
    }


        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;


        int64_t latenessUs = nowUs - timeUs;


        if (latenessUs > 500000ll
                && mAudioPlayer != NULL
                && mAudioPlayer->getMediaTimeMapping(
                    &realTimeUs, &mediaTimeUs)) {
            LOGI("we're much too late (%.2f secs), video skipping ahead",
                 latenessUs / 1E6);
            mVideoBuffer->release();
            mVideoBuffer = NULL;
            mSeeking = SEEK_VIDEO_ONLY;
            mSeekTimeUs = mediaTimeUs;
            postVideoEvent_l();
            return;
        }


        if (latenessUs > 40000) {
            // We're more than 40ms late.
            LOGV("we're late by %lld us (%.2f secs)",
                 latenessUs, latenessUs / 1E6);
                mSinceLastDropped = 0;
                mVideoBuffer->release();
                mVideoBuffer = NULL;
                postVideoEvent_l();
                return;
        }


        if (latenessUs < -10000) {
            // We're more than 10ms early.
            postVideoEvent_l(10000);
            return;
        }


    if (mVideoRenderer != NULL) {
        mSinceLastDropped++;
        mVideoRenderer->render(mVideoBuffer);
    }
    postVideoEvent_l();
}

After decoding, the media time of video will be used to see if too late or not. If too late, then just drop this video buffer.

Then, postVideoEvent->onVideoEvent->decode->render->postVideoEvent again and again, until end of stream (EOS)



沒有留言: