2012年3月26日 星期一

Android 4.0 Ice Cream Sandwich Media Framework (6)


Continued (6)

---===NuPlayer===---




In part 5, initiateSetup() called from NuPlayerDecoder and post kWhatSetup message

Note the handler of messages in ACodec is set as itself.

In Media Framework  :android_src\framework\base\media\libstagefright\ACodec.cpp

void ACodec::initiateSetup(const sp &msg) {
    msg->setWhat(kWhatSetup);
    msg->setTarget(id());
    msg->post();
}


bool ACodec::UninitializedState::onMessageReceived(const sp &msg) {
    bool handled = false;
    switch (msg->what()) {
        case ACodec::kWhatSetup:
        {
            onSetup(msg);


            handled = true;
            break;
        }
    }
    return handled;
}


void ACodec::UninitializedState::onSetup(
        const sp &msg) {
    OMXClient client;
    CHECK_EQ(client.connect(), (status_t)OK);


    sp omx = client.interface();


    AString mime;
    CHECK(msg->findString("mime", &mime));


    Vector matchingCodecs;
    OMXCodec::findMatchingCodecs(
            mime.c_str(),
            false, // createEncoder
            NULL,  // matchComponentName
            0,     // flags
            &matchingCodecs);


    sp observer = new CodecObserver;
    IOMX::node_id node = NULL;


    AString componentName;


    for (size_t matchIndex = 0; matchIndex < matchingCodecs.size();
            ++matchIndex) {
        componentName = matchingCodecs.itemAt(matchIndex).string();
        status_t err = omx->allocateNode(componentName.c_str(), observer, &node);
    }


    sp notify = new AMessage(kWhatOMXMessage, mCodec->id());
    observer->setNotificationMessage(notify);


    mCodec->mComponentName = componentName;
    mCodec->mOMX = omx;
    mCodec->mNode = node;


    mCodec->mPortEOS[kPortIndexInput] =
        mCodec->mPortEOS[kPortIndexOutput] = false;


    mCodec->mInputEOSResult = OK;


    mCodec->configureCodec(mime.c_str(), msg);


    sp obj;
    if (msg->findObject("native-window", &obj)
            && strncmp("OMX.google.", componentName.c_str(), 11)) {
        sp nativeWindow(
                static_cast(obj.get()));
        CHECK(nativeWindow != NULL);
        mCodec->mNativeWindow = nativeWindow->getNativeWindow();
    }


    CHECK_EQ((status_t)OK, mCodec->initNativeWindow());


    CHECK_EQ(omx->sendCommand(node, OMX_CommandStateSet, OMX_StateIdle),
             (status_t)OK);


    mCodec->changeState(mCodec->mLoadedToIdleState);
}




struct CodecObserver : public BnOMXObserver {
    CodecObserver() {}


    void setNotificationMessage(const sp &msg) {
        mNotify = msg;
    }
}




void ACodec::configureCodec(
        const char *mime, const sp &msg) {
    setComponentRole(false /* isEncoder */, mime);


    if (!strncasecmp(mime, "video/", 6)) {
        int32_t width, height;
        CHECK(msg->findInt32("width", &width));
        CHECK(msg->findInt32("height", &height));


        CHECK_EQ(setupVideoDecoder(mime, width, height),
                 (status_t)OK);
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
        int32_t numChannels, sampleRate;
        CHECK(msg->findInt32("channel-count", &numChannels));
        CHECK(msg->findInt32("sample-rate", &sampleRate));


        CHECK_EQ(setupAACDecoder(numChannels, sampleRate), (status_t)OK);
    }
...
...
}

The above code will set the attributes of component like input/output ports' parameters

void ACodec::LoadedToIdleState::stateEntered() {
    LOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str());
    status_t err;
    err = allocateBuffers()) != OK


}


status_t ACodec::LoadedToIdleState::allocateBuffers() {
    status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput);


    if (err != OK) {
        return err;
    }


    return mCodec->allocateBuffersOnPort(kPortIndexOutput);
}

After sendCommand(node, OMX_CommandStateSet, OMX_StateIdle) ,the component will send back response, and trigger onOMXEvent

bool ACodec::LoadedToIdleState::onOMXEvent(
        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
    switch (event) {
        case OMX_EventCmdComplete:
        {
            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
            CHECK_EQ(data2, (OMX_U32)OMX_StateIdle);


            CHECK_EQ(mCodec->mOMX->sendCommand(
                        mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting),
                     (status_t)OK);


            mCodec->changeState(mCodec->mIdleToExecutingState);


            return true;
        }
    }
}

Just as  OMX_StateIdle state is set to component, the  OMX_CommandStateSet of  OMX_StateExecuting command sent to componet right away

Note that ACodec always wait the component's response in a temporary state,like LoadedToIdleState,IdleToExecutingState, even do nothing

void ACodec::IdleToExecutingState::stateEntered() {
    LOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str());
}




bool ACodec::IdleToExecutingState::onMessageReceived(const sp &msg) {
    switch (msg->what()) {
        default:
            return BaseState::onMessageReceived(msg);
    }
}

Again, the component responding the complete of command.


bool ACodec::IdleToExecutingState::onOMXEvent(
        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
    switch (event) {
        case OMX_EventCmdComplete:
        {
            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
            CHECK_EQ(data2, (OMX_U32)OMX_StateExecuting);


            mCodec->mExecutingState->resume();
            mCodec->changeState(mCodec->mExecutingState);


            return true;
        }
    }
}


void ACodec::ExecutingState::resume() {
    submitOutputBuffers();


    // Post the first input buffer.
    CHECK_GT(mCodec->mBuffers[kPortIndexInput].size(), 0u);
    BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(0);


    postFillThisBuffer(info);


    mActive = true;
}






void ACodec::ExecutingState::submitOutputBuffers() {
    for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) {
        BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i);


        LOGV("[%s] calling fillBuffer %p",
             mCodec->mComponentName.c_str(), info->mBufferID);


        CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
                 (status_t)OK);


        info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
    }
}


void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {
    if (mCodec->mPortEOS[kPortIndexInput]) {
        return;
    }


    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);


    sp notify = mCodec->mNotify->dup();
    notify->setInt32("what", ACodec::kWhatFillThisBuffer);
    notify->setPointer("buffer-id", info->mBufferID);


    info->mData->meta()->clear();
    notify->setObject("buffer", info->mData);


    sp reply = new AMessage(kWhatInputBufferFilled, mCodec->id());
    reply->setPointer("buffer-id", info->mBufferID);


    notify->setMessage("reply", reply);


    notify->post();


    info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
}

Here, in executing state, postFillThisBuffer() is called in ExecutingState::resume(), used to carry PPS SPS information.

Now use the notify to send message(ACodec::kWhatFillThisBuffer) back to NuPlayerDecoder with a reply message(kWhatInputBufferFilled).

In Media Framework  :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerDecoder.cpp

void NuPlayer::Decoder::onMessageReceived(const sp &msg) {
    switch (msg->what()) {
        case kWhatCodecNotify:
        {
            int32_t what;
            CHECK(msg->findInt32("what", &what));


            if (what == ACodec::kWhatFillThisBuffer) {
LOGD("NuPlayer::Decoder::onMessageReceived : onFillThisBuffer(msg);");
                onFillThisBuffer(msg);
            } else {
                sp notify = mNotify->dup();
                notify->setMessage("codec-request", msg);
                notify->post();
            }
            break;
        }
    }
}


void NuPlayer::Decoder::onFillThisBuffer(const sp &msg) {
    sp reply;
    CHECK(msg->findMessage("reply", &reply));


    sp outBuffer;


    if (mCSDIndex < mCSD.size()) {
        outBuffer = mCSD.editItemAt(mCSDIndex++);
        outBuffer->meta()->setInt64("timeUs", 0);


        reply->setObject("buffer", outBuffer);
        reply->post();
        return;
    }


    sp notify = mNotify->dup();
//wrapped message ACodec::kWhatFillThisBuffer to NuPlayer  

    notify->setMessage("codec-request", msg);
    notify->post();
}

As mestioned, the first few buffers is used to carry the PPS,SPS information(stored in mCSD), so the condition (mCSDIndex < mCSD.size()) is true

Here use the reply message to reply ACodec.

void ACodec::BaseState::onInputBufferFilled(const sp &msg) {
    IOMX::buffer_id bufferID;
    CHECK(msg->findPointer("buffer-id", &bufferID));


    switch (mode) {
        case RESUBMIT_BUFFERS:
        {
                 CHECK_EQ(mCodec->mOMX->emptyBuffer(
                            mCodec->mNode,
                            bufferID,
                            0,
                            buffer->size(),
                            flags,
                            timeUs),
                         (status_t)OK);


                info->mStatus = BufferInfo::OWNED_BY_COMPONENT;


                getMoreInputDataIfPossible();
            }
        }
    }
}

ACodec tries to get more input buffers filled


void ACodec::BaseState::getMoreInputDataIfPossible() {
    BufferInfo *eligible = NULL;


    for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) {
        BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);


        if (info->mStatus == BufferInfo::OWNED_BY_US) {
            eligible = info;
        }
    }


    if (eligible == NULL) {
        return;
    }


    postFillThisBuffer(eligible);
}

postFillThisBuffer() called again until the condition (mCSDIndex < mCSD.size()) is false, which means the codec specific data all have been sent to component

Then in NuPlayer::Decoder::onFillThisBuffer(const sp &msg), message of kWhatVideoNotify/kWhatAudioNotify sent back to NuPlayer to request to un-decoded frame buffers

In Media Framework  :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp


void NuPlayer::onMessageReceived(const sp &msg) {
    switch (msg->what()) {
       case kWhatVideoNotify:
        case kWhatAudioNotify:
        {
            bool audio = msg->what() == kWhatAudioNotify;


            sp codecRequest;
            CHECK(msg->findMessage("codec-request", &codecRequest));


            int32_t what;
            CHECK(codecRequest->findInt32("what", &what));


            if (what == ACodec::kWhatFillThisBuffer) {
                status_t err = feedDecoderInputData(
                        audio, codecRequest);


            }
        }
    }
}




status_t NuPlayer::feedDecoderInputData(bool audio, const sp &msg) {
    sp reply;
    CHECK(msg->findMessage("reply", &reply));


    sp accessUnit;


    bool dropAccessUnit;
    do {
        status_t err = mSource->dequeueAccessUnit(audio, &accessUnit);


        if (!audio) {
            ++mNumFramesTotal;
        }


        dropAccessUnit = false;
        if (!audio
                && mVideoLateByUs > 100000ll
                && mVideoIsAVC
                && !IsAVCReferenceFrame(accessUnit)) {
            dropAccessUnit = true;
            ++mNumFramesDropped;
        }
    } while (dropAccessUnit);


    // LOGV("returned a valid buffer of %s data", audio ? "audio" : "video");




    reply->setObject("buffer", accessUnit);
    reply->post();


    return OK;
}

In Media Framework  :android_src\framework\base\media\libmediaplayerservice\nuplayer\HTTPLiveSource.cpp


status_t NuPlayer::HTTPLiveSource::dequeueAccessUnit(
        bool audio, sp *accessUnit) {
    ATSParser::SourceType type =
        audio ? ATSParser::AUDIO : ATSParser::VIDEO;


    sp source =
        static_cast(mTSParser->getSource(type).get());


    return source->dequeueAccessUnit(accessUnit);
}


Here NuPlayer try to dequeue the H264 access unit and send to ACodec for decoding

In Media Framework  :android_src\framework\base\media\libstagefright\ACodec.cpp

bool ACodec::BaseState::onMessageReceived(const sp &msg) {
    switch (msg->what()) {
        case kWhatInputBufferFilled:
        {
            onInputBufferFilled(msg);
            break;
        }
    return true;
}

onInputBufferFilled>
mOMX->emptyBuffer>
getMoreInputDataIfPossible>
postFillThisBuffer >
feedDecoderInputData>...

again and again for decoding

=======================================================================
Let's see the response of  EMPTY_BUFFER_DONE message handling from OMX component

In Media Framework  :android_src\framework\base\media\libstagefright\ACodec.cpp

struct CodecObserver : public BnOMXObserver {


    virtual void onMessage(const omx_message &omx_msg) {
        sp msg = mNotify->dup();


        msg->setInt32("type", omx_msg.type);
        msg->setPointer("node", omx_msg.node);


        switch (omx_msg.type) {
            case omx_message::EMPTY_BUFFER_DONE:
            {
                msg->setPointer("buffer", omx_msg.u.buffer_data.buffer);
                break;
            }
        msg->post();
    }
}




bool ACodec::BaseState::onOMXMessage(const sp &msg) {


    switch (type) {
        case omx_message::EMPTY_BUFFER_DONE:
        {
            IOMX::buffer_id bufferID;
            CHECK(msg->findPointer("buffer", &bufferID));


            return onOMXEmptyBufferDone(bufferID);
        }
    }
}


bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID) {
    LOGV("[%s] onOMXEmptyBufferDone %p",
         mCodec->mComponentName.c_str(), bufferID);


    BufferInfo *info =
        mCodec->findBufferByID(kPortIndexInput, bufferID);


    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);
    info->mStatus = BufferInfo::OWNED_BY_US;


    PortMode mode = getPortMode(kPortIndexInput);


    switch (mode) {
        case RESUBMIT_BUFFERS:
            postFillThisBuffer(info);
            break;
    }


    return true;
}

As show above, when emptybufferdone, ACodec again calls postFillThisBuffer()

=======================================================================
Let's see the response of  FILL_BUFFER_DONE message handling from OMX component

In Media Framework  :android_src\framework\base\media\libstagefright\ACodec.cpp

struct CodecObserver : public BnOMXObserver {


    virtual void onMessage(const omx_message &omx_msg) {
        sp msg = mNotify->dup();


        msg->setInt32("type", omx_msg.type);
        msg->setPointer("node", omx_msg.node);


        switch (omx_msg.type) {
            case omx_message::FILL_BUFFER_DONE:
            {
                msg->setPointer(
                        "buffer", omx_msg.u.extended_buffer_data.buffer);
                msg->setInt32(
                        "range_offset",
                        omx_msg.u.extended_buffer_data.range_offset);
                msg->setInt32(
                        "range_length",
                        omx_msg.u.extended_buffer_data.range_length);
                msg->setInt32(
                        "flags",
                        omx_msg.u.extended_buffer_data.flags);
                msg->setInt64(
                        "timestamp",
                        omx_msg.u.extended_buffer_data.timestamp);
                msg->setPointer(
                        "platform_private",
                        omx_msg.u.extended_buffer_data.platform_private);
                msg->setPointer(
                        "data_ptr",
                        omx_msg.u.extended_buffer_data.data_ptr);
                break;
            }
        msg->post();
    }
}


bool ACodec::BaseState::onOMXMessage(const sp &msg) {
    int32_t type;
    CHECK(msg->findInt32("type", &type));


    IOMX::node_id nodeID;
    CHECK(msg->findPointer("node", &nodeID));
    CHECK_EQ(nodeID, mCodec->mNode);


    switch (type) {
        case omx_message::FILL_BUFFER_DONE:
        {
            IOMX::buffer_id bufferID;
            CHECK(msg->findPointer("buffer", &bufferID));


            int32_t rangeOffset, rangeLength, flags;
            int64_t timeUs;
            void *platformPrivate;
            void *dataPtr;


            CHECK(msg->findInt32("range_offset", &rangeOffset));
            CHECK(msg->findInt32("range_length", &rangeLength));
            CHECK(msg->findInt32("flags", &flags));
            CHECK(msg->findInt64("timestamp", &timeUs));
            CHECK(msg->findPointer("platform_private", &platformPrivate));
            CHECK(msg->findPointer("data_ptr", &dataPtr));


            return onOMXFillBufferDone(
                    bufferID,
                    (size_t)rangeOffset, (size_t)rangeLength,
                    (OMX_U32)flags,
                    timeUs,
                    platformPrivate,
                    dataPtr);
        }
    }
}

bool ACodec::BaseState::onOMXFillBufferDone(
        IOMX::buffer_id bufferID,
        size_t rangeOffset, size_t rangeLength,
        OMX_U32 flags,
        int64_t timeUs,
        void *platformPrivate,
        void *dataPtr) {
    LOGV("[%s] onOMXFillBufferDone %p time %lld us",
         mCodec->mComponentName.c_str(), bufferID, timeUs);


    ssize_t index;
    BufferInfo *info =
        mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);


    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);


    info->mStatus = BufferInfo::OWNED_BY_US;


    PortMode mode = getPortMode(kPortIndexOutput);


    switch (mode) {
        case RESUBMIT_BUFFERS:
        {
                if (!mCodec->mSentFormat) {
                    mCodec->sendFormatChange();
                }


                if (mCodec->mNativeWindow == NULL) {
                    info->mData->setRange(rangeOffset, rangeLength);
                }


                info->mData->meta()->setInt64("timeUs", timeUs);


                sp notify = mCodec->mNotify->dup();
                notify->setInt32("what", ACodec::kWhatDrainThisBuffer);
                notify->setPointer("buffer-id", info->mBufferID);
                notify->setObject("buffer", info->mData);


                sp reply =
                    new AMessage(kWhatOutputBufferDrained, mCodec->id());


                reply->setPointer("buffer-id", info->mBufferID);


                notify->setMessage("reply", reply);


                notify->post();


                info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;


            if (flags & OMX_BUFFERFLAG_EOS) {
                sp notify = mCodec->mNotify->dup();
                notify->setInt32("what", ACodec::kWhatEOS);
                notify->setInt32("err", mCodec->mInputEOSResult);
                notify->post();


                mCodec->mPortEOS[kPortIndexOutput] = true;
            }
            break;
        }
    }


    return true;
}

In onOMXFillBufferDone ACodec send notify message(kWhatCodecNotify) back to NuPlayerDecoder with "what"(ACodec::kWhatDrainThisBuffer) and reply message(kWhatOutputBufferDrained)

In Media Framework  :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerDecoder.cpp

void NuPlayer::Decoder::onMessageReceived(const sp &msg) {
//    LOGD("In NuPlayer::Decoder::onMessageReceived");
    switch (msg->what()) {
        case kWhatCodecNotify:
        {
            int32_t what;
            CHECK(msg->findInt32("what", &what));


                sp notify = mNotify->dup();
                notify->setMessage("codec-request", msg);
                notify->post();
            break;
        }
    }
}

Then NuPlayerDecoder give the decision to NuPlayer , note that this message is from ACodec and wrapped in notify back message

In Media Framework  :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp

void NuPlayer::onMessageReceived(const sp &msg) {
    switch (msg->what()) {


        case kWhatVideoNotify:
        case kWhatAudioNotify:
        {
            sp codecRequest;
            CHECK(msg->findMessage("codec-request", &codecRequest));
...
...
            else {
                CHECK_EQ((int)what, (int)ACodec::kWhatDrainThisBuffer);
                renderBuffer(audio, codecRequest);
            }
            break;
        }
    }
}

Note that the message from ACodec is carried(now named codecRequest) .

void NuPlayer::renderBuffer(bool audio, const sp &msg) {
    // LOGV("renderBuffer %s", audio ? "audio" : "video");


    sp reply;
    CHECK(msg->findMessage("reply", &reply));


    sp obj;
    CHECK(msg->findObject("buffer", &obj));


    sp buffer = static_cast(obj.get());


    mRenderer->queueBuffer(audio, buffer, reply);
}

In renderBuffer, the reply message(kWhatOutputBufferDrained) is passed to NuPlayerRenderer

In Media Framework  :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerRenderer.cpp

void NuPlayer::Renderer::queueBuffer(
        bool audio,
        const sp &buffer,
        const sp &notifyConsumed) {
    sp msg = new AMessage(kWhatQueueBuffer, id());
    msg->setInt32("audio", static_cast(audio));
    msg->setObject("buffer", buffer);
    msg->setMessage("notifyConsumed", notifyConsumed);
    msg->post();
}

In  NuPlayer::Renderer::queueBuffer , the reply message(kWhatOutputBufferDrained) to ACodec named notifyConsumed.

Here post kWhatQueueBuffer message


void NuPlayer::Renderer::onMessageReceived(const sp &msg) {
    LOGD("NuPlayer::Renderer::onMessageReceived %d",msg->what());
    switch (msg->what()) {
        case kWhatQueueBuffer:
        {
            onQueueBuffer(msg);
            break;
        }
    }
}




void NuPlayer::Renderer::onQueueBuffer(const sp &msg) {
    int32_t audio;
    CHECK(msg->findInt32("audio", &audio));


    sp obj;
    CHECK(msg->findObject("buffer", &obj));
    sp buffer = static_cast(obj.get());


    sp notifyConsumed;
    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));


    QueueEntry entry;
    entry.mBuffer = buffer;
    entry.mNotifyConsumed = notifyConsumed;
    entry.mOffset = 0;
    entry.mFinalResult = OK;


    if (audio) {
        mAudioQueue.push_back(entry);
        postDrainAudioQueue();
    } else {
        mVideoQueue.push_back(entry);
        postDrainVideoQueue();
    }


    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
        return;
    }


    sp firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
    sp firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;


    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
        // EOS signalled on either queue.
        syncQueuesDone();
        return;
    }


    int64_t firstAudioTimeUs;
    int64_t firstVideoTimeUs;
    CHECK(firstAudioBuffer->meta()
            ->findInt64("timeUs", &firstAudioTimeUs));
    CHECK(firstVideoBuffer->meta()
            ->findInt64("timeUs", &firstVideoTimeUs));


    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;


    LOGV("queueDiff = %.2f secs", diff / 1E6);


    if (diff > 100000ll) {
        // Audio data starts More than 0.1 secs before video.
        // Drop some audio.


        (*mAudioQueue.begin()).mNotifyConsumed->post();
        mAudioQueue.erase(mAudioQueue.begin());
        return;
    }


    syncQueuesDone();
}


void NuPlayer::Renderer::syncQueuesDone() {
    if (!mSyncQueues) {
        return;
    }


    mSyncQueues = false;


    if (!mAudioQueue.empty()) {
        postDrainAudioQueue();
    }


    if (!mVideoQueue.empty()) {//not empty
        postDrainVideoQueue();
    }
}

Here NuPlayerRender maintain the mVideoQueue and mAudioQueue for storing the decoded buffers.

Now postDrainVideoQueue() is called

void NuPlayer::Renderer::postDrainVideoQueue() {


    QueueEntry &entry = *mVideoQueue.begin();


    sp msg = new AMessage(kWhatDrainVideoQueue, id());
    msg->setInt32("generation", mVideoQueueGeneration);


    int64_t delayUs;


    if (entry.mBuffer == NULL) {
        // EOS doesn't carry a timestamp.
        delayUs = 0;
    } else {
        int64_t mediaTimeUs;
        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
        if (mAnchorTimeMediaUs < 0) {
            delayUs = 0;


            if (!mHasAudio) {
                mAnchorTimeMediaUs = mediaTimeUs;
                mAnchorTimeRealUs = ALooper::GetNowUs();
            }
        } else {
            int64_t realTimeUs =
                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;


            delayUs = realTimeUs - ALooper::GetNowUs();
        }
    }


    msg->post(delayUs);


    mDrainVideoQueuePending = true;
}

After calculating the delay of media the message(kWhatDrainVideoQueue) is post.


void NuPlayer::Renderer::onMessageReceived(const sp &msg) {
    LOGD("NuPlayer::Renderer::onMessageReceived %d",msg->what());
    switch (msg->what()) {
       case kWhatDrainVideoQueue:
        {
            mDrainVideoQueuePending = false;


            onDrainVideoQueue();


            postDrainVideoQueue();
            break;
        }
    }
}


void NuPlayer::Renderer::onDrainVideoQueue() {
    if (mVideoQueue.empty()) {
        return;
    }


    QueueEntry *entry = &*mVideoQueue.begin();


    int64_t mediaTimeUs;
    CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));


    int64_t realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
    mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;


    bool tooLate = (mVideoLateByUs > 40000);


    if (tooLate) {
        LOGV("video late by %lld us (%.2f secs)", lateByUs, lateByUs / 1E6);
    } else {
        LOGD("rendering video at media time %.2f secs", mediaTimeUs / 1E6);
    }


    entry->mNotifyConsumed->setInt32("render", !tooLate);
    entry->mNotifyConsumed->post();
    mVideoQueue.erase(mVideoQueue.begin());
    entry = NULL;


    notifyPosition();
}

entry->mNotifyConsumed is the message:kWhatOutputBufferDrained in ACodec

In Media Framework  :android_src\framework\base\media\libstagefright\ACodec.cpp

bool ACodec::BaseState::onMessageReceived(const sp &msg) {
    switch (msg->what()) {
        case kWhatOutputBufferDrained:
        {
            onOutputBufferDrained(msg);
            break;
        }
    }


    return true;
}

void ACodec::BaseState::onOutputBufferDrained(const sp &msg) {
    IOMX::buffer_id bufferID;
    CHECK(msg->findPointer("buffer-id", &bufferID));


    ssize_t index;
    BufferInfo *info =
        mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM);


    int32_t render;
    if (mCodec->mNativeWindow != NULL
            && msg->findInt32("render", &render) && render != 0) {
        // The client wants this buffer to be rendered.


        if (mCodec->mNativeWindow->queueBuffer(
                    mCodec->mNativeWindow.get(),
                    info->mGraphicBuffer.get()) == OK) {
            info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
        } else {
            mCodec->signalError();
            info->mStatus = BufferInfo::OWNED_BY_US;
        }
    } else {
        info->mStatus = BufferInfo::OWNED_BY_US;
    }


    PortMode mode = getPortMode(kPortIndexOutput);


    switch (mode) {
        case RESUBMIT_BUFFERS:
        {
            if (!mCodec->mPortEOS[kPortIndexOutput]) {
                if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
                    // We cannot resubmit the buffer we just rendered, dequeue
                    // the spare instead.


                    info = mCodec->dequeueBufferFromNativeWindow();
                }


                if (info != NULL) {
                    LOGV("[%s] calling fillBuffer %p",
                         mCodec->mComponentName.c_str(), info->mBufferID);


                    CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
                             (status_t)OK);


                    info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
                }
            }
            break;
        }
    }
}


After send the decoded buffer to NativeWindows, ACodec request the component to fill more decoded buffer by mOMX->fillBuffer(mCodec->mNode, info->mBufferID)

And

Again and again.

=======================================================================

To be continued ...... ?

沒有留言: