Continued (6)
---===NuPlayer===---
In part 5, initiateSetup() called from NuPlayerDecoder and post kWhatSetup message
Note the handler of messages in ACodec is set as itself.
In Media Framework :android_src\framework\base\media\libstagefright\ACodec.cpp
void ACodec::initiateSetup(const sp
msg->setWhat(kWhatSetup);
msg->setTarget(id());
msg->post();
}
bool ACodec::UninitializedState::onMessageReceived(const sp
bool handled = false;
switch (msg->what()) {
case ACodec::kWhatSetup:
{
onSetup(msg);
handled = true;
break;
}
}
return handled;
}
void ACodec::UninitializedState::onSetup(
const sp
OMXClient client;
CHECK_EQ(client.connect(), (status_t)OK);
sp
AString mime;
CHECK(msg->findString("mime", &mime));
Vector
OMXCodec::findMatchingCodecs(
mime.c_str(),
false, // createEncoder
NULL, // matchComponentName
0, // flags
&matchingCodecs);
sp
IOMX::node_id node = NULL;
AString componentName;
for (size_t matchIndex = 0; matchIndex < matchingCodecs.size();
++matchIndex) {
componentName = matchingCodecs.itemAt(matchIndex).string();
status_t err = omx->allocateNode(componentName.c_str(), observer, &node);
}
sp
observer->setNotificationMessage(notify);
mCodec->mComponentName = componentName;
mCodec->mOMX = omx;
mCodec->mNode = node;
mCodec->mPortEOS[kPortIndexInput] =
mCodec->mPortEOS[kPortIndexOutput] = false;
mCodec->mInputEOSResult = OK;
mCodec->configureCodec(mime.c_str(), msg);
sp
if (msg->findObject("native-window", &obj)
&& strncmp("OMX.google.", componentName.c_str(), 11)) {
sp
static_cast
CHECK(nativeWindow != NULL);
mCodec->mNativeWindow = nativeWindow->getNativeWindow();
}
CHECK_EQ((status_t)OK, mCodec->initNativeWindow());
CHECK_EQ(omx->sendCommand(node, OMX_CommandStateSet, OMX_StateIdle),
(status_t)OK);
mCodec->changeState(mCodec->mLoadedToIdleState);
}
struct CodecObserver : public BnOMXObserver {
CodecObserver() {}
void setNotificationMessage(const sp
mNotify = msg;
}
}
void ACodec::configureCodec(
const char *mime, const sp
setComponentRole(false /* isEncoder */, mime);
if (!strncasecmp(mime, "video/", 6)) {
int32_t width, height;
CHECK(msg->findInt32("width", &width));
CHECK(msg->findInt32("height", &height));
CHECK_EQ(setupVideoDecoder(mime, width, height),
(status_t)OK);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
int32_t numChannels, sampleRate;
CHECK(msg->findInt32("channel-count", &numChannels));
CHECK(msg->findInt32("sample-rate", &sampleRate));
CHECK_EQ(setupAACDecoder(numChannels, sampleRate), (status_t)OK);
}
...
...
}
The above code will set the attributes of component like input/output ports' parameters
void ACodec::LoadedToIdleState::stateEntered() {
LOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str());
status_t err;
err = allocateBuffers()) != OK
}
status_t ACodec::LoadedToIdleState::allocateBuffers() {
status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput);
if (err != OK) {
return err;
}
return mCodec->allocateBuffersOnPort(kPortIndexOutput);
}
After sendCommand(node, OMX_CommandStateSet, OMX_StateIdle) ,the component will send back response, and trigger onOMXEvent
bool ACodec::LoadedToIdleState::onOMXEvent(
OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
switch (event) {
case OMX_EventCmdComplete:
{
CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
CHECK_EQ(data2, (OMX_U32)OMX_StateIdle);
CHECK_EQ(mCodec->mOMX->sendCommand(
mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting),
(status_t)OK);
mCodec->changeState(mCodec->mIdleToExecutingState);
return true;
}
}
}
Just as OMX_StateIdle state is set to component, the OMX_CommandStateSet of OMX_StateExecuting command sent to componet right away
Note that ACodec always wait the component's response in a temporary state,like LoadedToIdleState,IdleToExecutingState, even do nothing
void ACodec::IdleToExecutingState::stateEntered() {
LOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str());
}
bool ACodec::IdleToExecutingState::onMessageReceived(const sp
switch (msg->what()) {
default:
return BaseState::onMessageReceived(msg);
}
}
Again, the component responding the complete of command.
bool ACodec::IdleToExecutingState::onOMXEvent(
OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
switch (event) {
case OMX_EventCmdComplete:
{
CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
CHECK_EQ(data2, (OMX_U32)OMX_StateExecuting);
mCodec->mExecutingState->resume();
mCodec->changeState(mCodec->mExecutingState);
return true;
}
}
}
void ACodec::ExecutingState::resume() {
submitOutputBuffers();
// Post the first input buffer.
CHECK_GT(mCodec->mBuffers[kPortIndexInput].size(), 0u);
BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(0);
postFillThisBuffer(info);
mActive = true;
}
void ACodec::ExecutingState::submitOutputBuffers() {
for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) {
BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i);
LOGV("[%s] calling fillBuffer %p",
mCodec->mComponentName.c_str(), info->mBufferID);
CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
(status_t)OK);
info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
}
}
void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {
if (mCodec->mPortEOS[kPortIndexInput]) {
return;
}
CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
sp
notify->setInt32("what", ACodec::kWhatFillThisBuffer);
notify->setPointer("buffer-id", info->mBufferID);
info->mData->meta()->clear();
notify->setObject("buffer", info->mData);
sp
reply->setPointer("buffer-id", info->mBufferID);
notify->setMessage("reply", reply);
notify->post();
info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
}
Here, in executing state, postFillThisBuffer() is called in ExecutingState::resume(), used to carry PPS SPS information.
Now use the notify to send message(ACodec::kWhatFillThisBuffer) back to NuPlayerDecoder with a reply message(
In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerDecoder.cpp
void NuPlayer::Decoder::onMessageReceived(const sp
switch (msg->what()) {
case kWhatCodecNotify:
{
int32_t what;
CHECK(msg->findInt32("what", &what));
if (what == ACodec::kWhatFillThisBuffer) {
LOGD("NuPlayer::Decoder::onMessageReceived : onFillThisBuffer(msg);");
onFillThisBuffer(msg);
} else {
sp
notify->setMessage("codec-request", msg);
notify->post();
}
break;
}
}
}
void NuPlayer::Decoder::onFillThisBuffer(const sp
sp
CHECK(msg->findMessage("reply", &reply));
sp
if (mCSDIndex < mCSD.size()) {
outBuffer = mCSD.editItemAt(mCSDIndex++);
outBuffer->meta()->setInt64("timeUs", 0);
reply->setObject("buffer", outBuffer);
reply->post();
return;
}
sp
//wrapped message ACodec::kWhatFillThisBuffer to NuPlayer
notify->setMessage("codec-request", msg);
notify->post();
}
As mestioned, the first few buffers is used to carry the PPS,SPS information(stored in mCSD), so the condition (mCSDIndex < mCSD.size()) is true
Here use the reply message to reply ACodec.
void ACodec::BaseState::onInputBufferFilled(const sp
IOMX::buffer_id bufferID;
CHECK(msg->findPointer("buffer-id", &bufferID));
switch (mode) {
case RESUBMIT_BUFFERS:
{
CHECK_EQ(mCodec->mOMX->emptyBuffer(
mCodec->mNode,
bufferID,
0,
buffer->size(),
flags,
timeUs),
(status_t)OK);
info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
getMoreInputDataIfPossible();
}
}
}
}
ACodec tries to get more input buffers filled
void ACodec::BaseState::getMoreInputDataIfPossible() {
BufferInfo *eligible = NULL;
for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) {
BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
if (info->mStatus == BufferInfo::OWNED_BY_US) {
eligible = info;
}
}
if (eligible == NULL) {
return;
}
postFillThisBuffer(eligible);
}
postFillThisBuffer() called again until the condition (mCSDIndex < mCSD.size()) is false, which means the codec specific data all have been sent to component
Then in NuPlayer::Decoder::onFillThisBuffer(const sp
In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp
void NuPlayer::onMessageReceived(const sp
switch (msg->what()) {
case kWhatVideoNotify:
case kWhatAudioNotify:
{
bool audio = msg->what() == kWhatAudioNotify;
sp
CHECK(msg->findMessage("codec-request", &codecRequest));
int32_t what;
CHECK(codecRequest->findInt32("what", &what));
if (what == ACodec::kWhatFillThisBuffer) {
status_t err = feedDecoderInputData(
audio, codecRequest);
}
}
}
}
status_t NuPlayer::feedDecoderInputData(bool audio, const sp
sp
CHECK(msg->findMessage("reply", &reply));
sp
bool dropAccessUnit;
do {
status_t err = mSource->dequeueAccessUnit(audio, &accessUnit);
if (!audio) {
++mNumFramesTotal;
}
dropAccessUnit = false;
if (!audio
&& mVideoLateByUs > 100000ll
&& mVideoIsAVC
&& !IsAVCReferenceFrame(accessUnit)) {
dropAccessUnit = true;
++mNumFramesDropped;
}
} while (dropAccessUnit);
// LOGV("returned a valid buffer of %s data", audio ? "audio" : "video");
reply->setObject("buffer", accessUnit);
reply->post();
return OK;
}
In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\HTTPLiveSource.cpp
status_t NuPlayer::HTTPLiveSource::dequeueAccessUnit(
bool audio, sp
ATSParser::SourceType type =
audio ? ATSParser::AUDIO : ATSParser::VIDEO;
sp
static_cast
return source->dequeueAccessUnit(accessUnit);
}
Here NuPlayer try to dequeue the H264 access unit and send to ACodec for decoding
In Media Framework :android_src\framework\base\media\libstagefright\ACodec.cpp
bool ACodec::BaseState::onMessageReceived(const sp
switch (msg->what()) {
case kWhatInputBufferFilled:
{
onInputBufferFilled(msg);
break;
}
return true;
}
onInputBufferFilled>
mOMX->emptyBuffer>
getMoreInputDataIfPossible>
postFillThisBuffer >
feedDecoderInputData>...
again and again for decoding
=======================================================================
Let's see the response of EMPTY_BUFFER_DONE message handling from OMX component
In Media Framework :android_src\framework\base\media\libstagefright\ACodec.cpp
struct CodecObserver : public BnOMXObserver {
virtual void onMessage(const omx_message &omx_msg) {
sp
msg->setInt32("type", omx_msg.type);
msg->setPointer("node", omx_msg.node);
switch (omx_msg.type) {
case omx_message::EMPTY_BUFFER_DONE:
{
msg->setPointer("buffer", omx_msg.u.buffer_data.buffer);
break;
}
msg->post();
}
}
bool ACodec::BaseState::onOMXMessage(const sp
switch (type) {
case omx_message::EMPTY_BUFFER_DONE:
{
IOMX::buffer_id bufferID;
CHECK(msg->findPointer("buffer", &bufferID));
return onOMXEmptyBufferDone(bufferID);
}
}
}
bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID) {
LOGV("[%s] onOMXEmptyBufferDone %p",
mCodec->mComponentName.c_str(), bufferID);
BufferInfo *info =
mCodec->findBufferByID(kPortIndexInput, bufferID);
CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);
info->mStatus = BufferInfo::OWNED_BY_US;
PortMode mode = getPortMode(kPortIndexInput);
switch (mode) {
case RESUBMIT_BUFFERS:
postFillThisBuffer(info);
break;
}
return true;
}
As show above, when emptybufferdone, ACodec again calls postFillThisBuffer()
=======================================================================
Let's see the response of FILL_BUFFER_DONE message handling from OMX component
In Media Framework :android_src\framework\base\media\libstagefright\ACodec.cpp
struct CodecObserver : public BnOMXObserver {
virtual void onMessage(const omx_message &omx_msg) {
sp
msg->setInt32("type", omx_msg.type);
msg->setPointer("node", omx_msg.node);
switch (omx_msg.type) {
case omx_message::FILL_BUFFER_DONE:
{
msg->setPointer(
"buffer", omx_msg.u.extended_buffer_data.buffer);
msg->setInt32(
"range_offset",
omx_msg.u.extended_buffer_data.range_offset);
msg->setInt32(
"range_length",
omx_msg.u.extended_buffer_data.range_length);
msg->setInt32(
"flags",
omx_msg.u.extended_buffer_data.flags);
msg->setInt64(
"timestamp",
omx_msg.u.extended_buffer_data.timestamp);
msg->setPointer(
"platform_private",
omx_msg.u.extended_buffer_data.platform_private);
msg->setPointer(
"data_ptr",
omx_msg.u.extended_buffer_data.data_ptr);
break;
}
msg->post();
}
}
bool ACodec::BaseState::onOMXMessage(const sp
int32_t type;
CHECK(msg->findInt32("type", &type));
IOMX::node_id nodeID;
CHECK(msg->findPointer("node", &nodeID));
CHECK_EQ(nodeID, mCodec->mNode);
switch (type) {
case omx_message::FILL_BUFFER_DONE:
{
IOMX::buffer_id bufferID;
CHECK(msg->findPointer("buffer", &bufferID));
int32_t rangeOffset, rangeLength, flags;
int64_t timeUs;
void *platformPrivate;
void *dataPtr;
CHECK(msg->findInt32("range_offset", &rangeOffset));
CHECK(msg->findInt32("range_length", &rangeLength));
CHECK(msg->findInt32("flags", &flags));
CHECK(msg->findInt64("timestamp", &timeUs));
CHECK(msg->findPointer("platform_private", &platformPrivate));
CHECK(msg->findPointer("data_ptr", &dataPtr));
return onOMXFillBufferDone(
bufferID,
(size_t)rangeOffset, (size_t)rangeLength,
(OMX_U32)flags,
timeUs,
platformPrivate,
dataPtr);
}
}
}
bool ACodec::BaseState::onOMXFillBufferDone(
IOMX::buffer_id bufferID,
size_t rangeOffset, size_t rangeLength,
OMX_U32 flags,
int64_t timeUs,
void *platformPrivate,
void *dataPtr) {
LOGV("[%s] onOMXFillBufferDone %p time %lld us",
mCodec->mComponentName.c_str(), bufferID, timeUs);
ssize_t index;
BufferInfo *info =
mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);
info->mStatus = BufferInfo::OWNED_BY_US;
PortMode mode = getPortMode(kPortIndexOutput);
switch (mode) {
case RESUBMIT_BUFFERS:
{
if (!mCodec->mSentFormat) {
mCodec->sendFormatChange();
}
if (mCodec->mNativeWindow == NULL) {
info->mData->setRange(rangeOffset, rangeLength);
}
info->mData->meta()->setInt64("timeUs", timeUs);
sp
notify->setInt32("what", ACodec::kWhatDrainThisBuffer);
notify->setPointer("buffer-id", info->mBufferID);
notify->setObject("buffer", info->mData);
sp
new AMessage(kWhatOutputBufferDrained, mCodec->id());
reply->setPointer("buffer-id", info->mBufferID);
notify->setMessage("reply", reply);
notify->post();
info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;
if (flags & OMX_BUFFERFLAG_EOS) {
sp
notify->setInt32("what", ACodec::kWhatEOS);
notify->setInt32("err", mCodec->mInputEOSResult);
notify->post();
mCodec->mPortEOS[kPortIndexOutput] = true;
}
break;
}
}
return true;
}
In onOMXFillBufferDone ACodec send notify message(kWhatCodecNotify) back to NuPlayerDecoder with "what"(ACodec::kWhatDrainThisBuffer) and reply message(kWhatOutputBufferDrained)
In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerDecoder.cpp
void NuPlayer::Decoder::onMessageReceived(const sp
// LOGD("In NuPlayer::Decoder::onMessageReceived");
switch (msg->what()) {
case kWhatCodecNotify:
{
int32_t what;
CHECK(msg->findInt32("what", &what));
sp
notify->setMessage("codec-request", msg);
notify->post();
break;
}
}
}
Then NuPlayerDecoder give the decision to NuPlayer , note that this message is from ACodec and wrapped in notify back message
In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayer.cpp
void NuPlayer::onMessageReceived(const sp
switch (msg->what()) {
case kWhatVideoNotify:
case kWhatAudioNotify:
{
sp
CHECK(msg->findMessage("codec-request", &codecRequest));
...
...
else {
CHECK_EQ((int)what, (int)ACodec::kWhatDrainThisBuffer);
renderBuffer(audio, codecRequest);
}
break;
}
}
}
Note that the message from ACodec is carried(now named codecRequest) .
void NuPlayer::renderBuffer(bool audio, const sp
// LOGV("renderBuffer %s", audio ? "audio" : "video");
sp
CHECK(msg->findMessage("reply", &reply));
sp
CHECK(msg->findObject("buffer", &obj));
sp
mRenderer->queueBuffer(audio, buffer, reply);
}
In renderBuffer, the reply message(kWhatOutputBufferDrained) is passed to NuPlayerRenderer
In Media Framework :android_src\framework\base\media\libmediaplayerservice\nuplayer\NuPlayerRenderer.cpp
void NuPlayer::Renderer::queueBuffer(
bool audio,
const sp
const sp
sp
msg->setInt32("audio", static_cast
msg->setObject("buffer", buffer);
msg->setMessage("notifyConsumed", notifyConsumed);
msg->post();
}
In NuPlayer::Renderer::queueBuffer , the reply message(kWhatOutputBufferDrained) to ACodec named notifyConsumed.
Here post kWhatQueueBuffer message
void NuPlayer::Renderer::onMessageReceived(const sp
LOGD("NuPlayer::Renderer::onMessageReceived %d",msg->what());
switch (msg->what()) {
case kWhatQueueBuffer:
{
onQueueBuffer(msg);
break;
}
}
}
void NuPlayer::Renderer::onQueueBuffer(const sp
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
sp
CHECK(msg->findObject("buffer", &obj));
sp
sp
CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed));
QueueEntry entry;
entry.mBuffer = buffer;
entry.mNotifyConsumed = notifyConsumed;
entry.mOffset = 0;
entry.mFinalResult = OK;
if (audio) {
mAudioQueue.push_back(entry);
postDrainAudioQueue();
} else {
mVideoQueue.push_back(entry);
postDrainVideoQueue();
}
if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
return;
}
sp
sp
if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
// EOS signalled on either queue.
syncQueuesDone();
return;
}
int64_t firstAudioTimeUs;
int64_t firstVideoTimeUs;
CHECK(firstAudioBuffer->meta()
->findInt64("timeUs", &firstAudioTimeUs));
CHECK(firstVideoBuffer->meta()
->findInt64("timeUs", &firstVideoTimeUs));
int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
LOGV("queueDiff = %.2f secs", diff / 1E6);
if (diff > 100000ll) {
// Audio data starts More than 0.1 secs before video.
// Drop some audio.
(*mAudioQueue.begin()).mNotifyConsumed->post();
mAudioQueue.erase(mAudioQueue.begin());
return;
}
syncQueuesDone();
}
void NuPlayer::Renderer::syncQueuesDone() {
if (!mSyncQueues) {
return;
}
mSyncQueues = false;
if (!mAudioQueue.empty()) {
postDrainAudioQueue();
}
if (!mVideoQueue.empty()) {//not empty
postDrainVideoQueue();
}
}
Here NuPlayerRender maintain the mVideoQueue and mAudioQueue for storing the decoded buffers.
Now postDrainVideoQueue() is called
void NuPlayer::Renderer::postDrainVideoQueue() {
QueueEntry &entry = *mVideoQueue.begin();
sp
msg->setInt32("generation", mVideoQueueGeneration);
int64_t delayUs;
if (entry.mBuffer == NULL) {
// EOS doesn't carry a timestamp.
delayUs = 0;
} else {
int64_t mediaTimeUs;
CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
if (mAnchorTimeMediaUs < 0) {
delayUs = 0;
if (!mHasAudio) {
mAnchorTimeMediaUs = mediaTimeUs;
mAnchorTimeRealUs = ALooper::GetNowUs();
}
} else {
int64_t realTimeUs =
(mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
delayUs = realTimeUs - ALooper::GetNowUs();
}
}
msg->post(delayUs);
mDrainVideoQueuePending = true;
}
After calculating the delay of media the message(kWhatDrainVideoQueue) is post.
void NuPlayer::Renderer::onMessageReceived(const sp
LOGD("NuPlayer::Renderer::onMessageReceived %d",msg->what());
switch (msg->what()) {
case kWhatDrainVideoQueue:
{
mDrainVideoQueuePending = false;
onDrainVideoQueue();
postDrainVideoQueue();
break;
}
}
}
void NuPlayer::Renderer::onDrainVideoQueue() {
if (mVideoQueue.empty()) {
return;
}
QueueEntry *entry = &*mVideoQueue.begin();
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
int64_t realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
bool tooLate = (mVideoLateByUs > 40000);
if (tooLate) {
LOGV("video late by %lld us (%.2f secs)", lateByUs, lateByUs / 1E6);
} else {
LOGD("rendering video at media time %.2f secs", mediaTimeUs / 1E6);
}
entry->mNotifyConsumed->setInt32("render", !tooLate);
entry->mNotifyConsumed->post();
mVideoQueue.erase(mVideoQueue.begin());
entry = NULL;
notifyPosition();
}
entry->mNotifyConsumed is the message:kWhatOutputBufferDrained in ACodec
In Media Framework :android_src\framework\base\media\libstagefright\ACodec.cpp
bool ACodec::BaseState::onMessageReceived(const sp
switch (msg->what()) {
case kWhatOutputBufferDrained:
{
onOutputBufferDrained(msg);
break;
}
}
return true;
}
void ACodec::BaseState::onOutputBufferDrained(const sp
IOMX::buffer_id bufferID;
CHECK(msg->findPointer("buffer-id", &bufferID));
ssize_t index;
BufferInfo *info =
mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM);
int32_t render;
if (mCodec->mNativeWindow != NULL
&& msg->findInt32("render", &render) && render != 0) {
// The client wants this buffer to be rendered.
if (mCodec->mNativeWindow->queueBuffer(
mCodec->mNativeWindow.get(),
info->mGraphicBuffer.get()) == OK) {
info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
} else {
mCodec->signalError();
info->mStatus = BufferInfo::OWNED_BY_US;
}
} else {
info->mStatus = BufferInfo::OWNED_BY_US;
}
PortMode mode = getPortMode(kPortIndexOutput);
switch (mode) {
case RESUBMIT_BUFFERS:
{
if (!mCodec->mPortEOS[kPortIndexOutput]) {
if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
// We cannot resubmit the buffer we just rendered, dequeue
// the spare instead.
info = mCodec->dequeueBufferFromNativeWindow();
}
if (info != NULL) {
LOGV("[%s] calling fillBuffer %p",
mCodec->mComponentName.c_str(), info->mBufferID);
CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
(status_t)OK);
info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
}
}
break;
}
}
}
After send the decoded buffer to NativeWindows, ACodec request the component to fill more decoded buffer by mOMX->fillBuffer(mCodec->mNode, info->mBufferID)
And
Again and again.
=======================================================================
To be continued ...... ?
沒有留言:
張貼留言