Renderer对video的处理是判断video是否已经延迟过长,如果是就不渲染了。
渲染开始是在ACodec的onOMXFillBufferDone,ACodec发消息到MediaCodec,MediaCodec会updateBuffers然后调用onOutputBufferAvailable中通知Decoder有可用的output buffer.
Decoder接收消息后就handleAnOutputBuffer.之后Decoder和Renderer交互,会判断是否渲染,通知Renderer会回调Decoder的onRenderBuffer(msg),然后Decoder会和MediaCodec交互,
MediaCodec判断是软件渲染或者硬件渲染,软件渲染就交给SofterwareRender,硬件渲染就交给ACodec。不管MediaCodec决定这么渲染,都会回调到ACodec中的onOutputBufferDrained(msg),继续fillbuffer,之后ACodec被OMX继续回调onOMXFillBufferDone.
0.Decoder接收MediaCodec发来的消息(MediaCodec::onOutputBufferAvailable)
Decoder接收到MediaCodec的通知,也可以用的CB_OUTPUT_AVAILABLE,就调用handleAnOutputBuffer(index, offset, size, timeUs, flags)
void NuPlayer::Decoder::onMessageReceived(const sp &msg) {
ALOGV("[%s] onMessage: %s", mComponentName.c_str(), msg->debugString().c_str());
switch (msg->what()) {
case kWhatCodecNotify:
{
int32_t cbID;
CHECK(msg->findInt32("callbackID", &cbID));
ALOGV("[%s] kWhatCodecNotify: cbID = %d, paused = %d",
mIsAudio ? "audio" : "video", cbID, mPaused);
if (mPaused) {
break;
}
switch (cbID) {
case MediaCodec::CB_INPUT_AVAILABLE:
{
int32_t index;
CHECK(msg->findInt32("index", &index));
handleAnInputBuffer(index);
break;
}
case MediaCodec::CB_OUTPUT_AVAILABLE:
{
int32_t index;
size_t offset;
size_t size;
int64_t timeUs;
int32_t flags;
CHECK(msg->findInt32("index", &index));
CHECK(msg->findSize("offset", &offset));
CHECK(msg->findSize("size", &size));
CHECK(msg->findInt64("timeUs", &timeUs));
CHECK(msg->findInt32("flags", &flags));
handleAnOutputBuffer(index, offset, size, timeUs, flags);//开始handleAnOutputBuffer
break;
}
case MediaCodec::CB_OUTPUT_FORMAT_CHANGED:
{
sp format;
CHECK(msg->findMessage("format", &format));
handleOutputFormatChange(format);
break;
}
case MediaCodec::CB_ERROR:
{
status_t err;
CHECK(msg->findInt32("err", &err));
ALOGE("Decoder (%s) reported error : 0x%x",
mIsAudio ? "audio" : "video", err);
handleError(err);
break;
}
default:
{
TRESPASS();
break;
}
}
break;
}
case kWhatRenderBuffer:
{
if (!isStaleReply(msg)) {
onRenderBuffer(msg);
}
break;
}
case kWhatSetVideoSurface:
{
sp replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
sp obj;
CHECK(msg->findObject("surface", &obj));
sp surface = static_cast(obj.get()); // non-null
int32_t err = INVALID_OPERATION;
// NOTE: in practice mSurface is always non-null, but checking here for completeness
if (mCodec != NULL && mSurface != NULL) {
// TODO: once AwesomePlayer is removed, remove this automatic connecting
// to the surface by MediaPlayerService.
//
// at this point MediaPlayerService::client has already connected to the
// surface, which MediaCodec does not expect
err = native_window_api_disconnect(surface.get(), NATIVE_WINDOW_API_MEDIA);
if (err == OK) {
err = mCodec->setSurface(surface);
ALOGI_IF(err, "codec setSurface returned: %d", err);
if (err == OK) {
// reconnect to the old surface as MPS::Client will expect to
// be able to disconnect from it.
(void)native_window_api_connect(mSurface.get(), NATIVE_WINDOW_API_MEDIA);
mSurface = surface;
}
}
if (err != OK) {
// reconnect to the new surface on error as MPS::Client will expect to
// be able to disconnect from it.
(void)native_window_api_connect(surface.get(), NATIVE_WINDOW_API_MEDIA);
}
}
sp response = new AMessage;
response->setInt32("err", err);
response->postReply(replyID);
break;
}
default:
DecoderBase::onMessageReceived(msg);
break;
}
}
1.NuPlayer::Decoder::handleAnOutputBuffer
bool NuPlayer::Decoder::handleAnOutputBuffer(
size_t index,
size_t offset,
size_t size,
int64_t timeUs,
int32_t flags) {
// CHECK_LT(bufferIx, mOutputBuffers.size());
sp buffer;
mCodec->getOutputBuffer(index, &buffer);//找到buffer(sp)
if (index >= mOutputBuffers.size()) {
for (size_t i = mOutputBuffers.size(); i <= index; ++i) {
mOutputBuffers.add();
}
}
mOutputBuffers.editItemAt(index) = buffer;
buffer->setRange(offset, size);
buffer->meta()->clear();
buffer->meta()->setInt64("timeUs", timeUs);//设置timeUs
setPcmFormat(buffer->meta());
bool eos = flags & MediaCodec::BUFFER_FLAG_EOS;
// we do not expect CODECCONFIG or SYNCFRAME for decoder
sp reply = new AMessage(kWhatRenderBuffer, this);//发给Renderer使用的reply
reply->setSize("buffer-ix", index);//设置buffer-ix
reply->setInt32("generation", mBufferGeneration);
if (eos) {
ALOGI("[%s] saw output EOS", mIsAudio ? "audio" : "video");
buffer->meta()->setInt32("eos", true);
reply->setInt32("eos", true);
} else if (mSkipRenderingUntilMediaTimeUs >= 0) {
if (timeUs < mSkipRenderingUntilMediaTimeUs) {
ALOGV("[%s] dropping buffer at time %lld as requested.",
mComponentName.c_str(), (long long)timeUs);
reply->post();
return true;
}
mSkipRenderingUntilMediaTimeUs = -1;
}
mNumFramesTotal += !mIsAudio;
// wait until 1st frame comes out to signal resume complete
notifyResumeCompleteIfNecessary();
if (mRenderer != NULL) {
// send the buffer to renderer.
mRenderer->queueBuffer(mIsAudio, buffer, reply);//mRenderer对buffer做同步的处理,并消费reply
if (eos && !isDiscontinuityPending()) {
mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);
}
}
return true;
}
2.Decoder向Renderer进行queueBuffer
NuPlayer::Renderer::queueBuffer
void NuPlayer::Renderer::queueBuffer(
bool audio,
const sp &buffer,
const sp ¬ifyConsumed) {
sp msg = new AMessage(kWhatQueueBuffer, this);
msg->setInt32("queueGeneration", getQueueGeneration(audio));
msg->setInt32("audio", static_cast(audio));
msg->setBuffer("buffer", buffer);
msg->setMessage("notifyConsumed", notifyConsumed);//把reply设置到notifyConsumed中
msg->post();//发送消息
}
3.Renderer处理消息并调用onQueueBuffer
case kWhatQueueBuffer:
{
onQueueBuffer(msg);
break;
}
void NuPlayer::Renderer::onQueueBuffer(const sp &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
if (dropBufferIfStale(audio, msg)) {
return;
}
if (audio) {
mHasAudio = true;
} else {
mHasVideo = true;
}
if (mHasVideo) {
if (mVideoScheduler == NULL) {
mVideoScheduler = new VideoFrameScheduler();
mVideoScheduler->init();
}
}
sp buffer;
CHECK(msg->findBuffer("buffer", &buffer));
sp notifyConsumed;
CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed));//Decoder中产生的reply(供Renderer向Decoder通信)
QueueEntry entry;//放在QueueEntry entry,buffer,reply等
entry.mBuffer = buffer;
entry.mNotifyConsumed = notifyConsumed;
entry.mOffset = 0;
entry.mFinalResult = OK;
entry.mBufferOrdinal = ++mTotalBuffersQueued;
if (audio) {
Mutex::Autolock autoLock(mLock);
mAudioQueue.push_back(entry);
postDrainAudioQueue_l();//音频的处理
} else {
mVideoQueue.push_back(entry);
postDrainVideoQueue();//视频的处理,postDrainVideoQueue
}
Mutex::Autolock autoLock(mLock);
if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
return;
}
sp firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
sp firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
// EOS signalled on either queue.
syncQueuesDone_l();
return;
}
int64_t firstAudioTimeUs;
int64_t firstVideoTimeUs;
CHECK(firstAudioBuffer->meta()
->findInt64("timeUs", &firstAudioTimeUs));
CHECK(firstVideoBuffer->meta()
->findInt64("timeUs", &firstVideoTimeUs));
int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
ALOGV("queueDiff = %.2f secs", diff / 1E6);
if (diff > 100000ll) {
// Audio data starts More than 0.1 secs before video.
// Drop some audio.
(*mAudioQueue.begin()).mNotifyConsumed->post();
mAudioQueue.erase(mAudioQueue.begin());
return;
}
syncQueuesDone_l();
}
4.Renderer在onQueueBuffer会postDrainVideoQueue
// Called without mLock acquired.
void NuPlayer::Renderer::postDrainVideoQueue() {
if (mDrainVideoQueuePending
|| getSyncQueues()
|| (mPaused && mVideoSampleReceived)) {
return;
}
if (mVideoQueue.empty()) {
return;
}
QueueEntry &entry = *mVideoQueue.begin();//试着取出entry,判断是否为空
sp msg = new AMessage(kWhatDrainVideoQueue, this);
msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
if (entry.mBuffer == NULL) {
// EOS doesn't carry a timestamp.
msg->post();
mDrainVideoQueuePending = true;
return;
}
int64_t delayUs;
int64_t nowUs = ALooper::GetNowUs();
int64_t realTimeUs;
if (mFlags & FLAG_REAL_TIME) {
int64_t mediaTimeUs;
CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
realTimeUs = mediaTimeUs;
} else {
int64_t mediaTimeUs;
CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
{
Mutex::Autolock autoLock(mLock);
if (mAnchorTimeMediaUs < 0) {
mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
mAnchorTimeMediaUs = mediaTimeUs;
realTimeUs = nowUs;
} else {
realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
}
}
if (!mHasAudio) {
// smooth out videos >= 10fps
mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000);
}
// Heuristics to handle situation when media time changed without a
// discontinuity. If we have not drained an audio buffer that was
// received after this buffer, repost in 10 msec. Otherwise repost
// in 500 msec.
delayUs = realTimeUs - nowUs;
if (delayUs > 500000) {
int64_t postDelayUs = 500000;
if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) {
postDelayUs = 10000;
}
msg->setWhat(kWhatPostDrainVideoQueue);
msg->post(postDelayUs);
mVideoScheduler->restart();
ALOGI("possible video time jump of %dms, retrying in %dms",
(int)(delayUs / 1000), (int)(postDelayUs / 1000));
mDrainVideoQueuePending = true;
return;
}
}
realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
delayUs = realTimeUs - nowUs;
ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
// post 2 display refreshes before rendering is due
msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);/post kWhatDrainVideoQueue
mDrainVideoQueuePending = true;
}
5.Renderer处理上一步发送的消息,并onDrainVideoQueue()取出entry,而且接着postDrainVideoQueue
case kWhatDrainVideoQueue:
{
int32_t generation;
CHECK(msg->findInt32("drainGeneration", &generation));
if (generation != getDrainGeneration(false /* audio */)) {
break;
}
mDrainVideoQueuePending = false;
onDrainVideoQueue();//在VideoQueue中,即QueueEntry entry中取相关的数据
postDrainVideoQueue();//循环取video数据,接着判断是否渲染,参看4
break;
}
6.NuPlayer::Renderer::onDrainVideoQueue()真的取出entry,判断视频的时间是否延迟太多,是否还要渲染
void NuPlayer::Renderer::onDrainVideoQueue() {
if (mVideoQueue.empty()) {
return;
}
QueueEntry *entry = &*mVideoQueue.begin();
if (entry->mBuffer == NULL) {
// EOS
notifyEOS(false /* audio */, entry->mFinalResult);
mVideoQueue.erase(mVideoQueue.begin());
entry = NULL;
setVideoLateByUs(0);
return;
}
int64_t nowUs = -1;
int64_t realTimeUs;
if (mFlags & FLAG_REAL_TIME) {
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
} else {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));//track里面的时间
nowUs = ALooper::GetNowUs();
realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);//realTimeUs好像是现在的时间??
}
bool tooLate = false;
if (!mPaused) {
if (nowUs == -1) {
nowUs = ALooper::GetNowUs();
}
setVideoLateByUs(nowUs - realTimeUs);
tooLate = (mVideoLateByUs > 40000);//视频延迟了40000us就不渲染了
if (tooLate) {
ALOGV("video late by %lld us (%.2f secs)",
(long long)mVideoLateByUs, mVideoLateByUs / 1E6);
} else {
int64_t mediaUs = 0;
mMediaClock->getMediaTime(realTimeUs, &mediaUs);
ALOGV("rendering video at media time %.2f secs",
(mFlags & FLAG_REAL_TIME ? realTimeUs :
mediaUs) / 1E6);//已秒位单位的track时间,用mediaUs
}
} else {
setVideoLateByUs(0);
if (!mVideoSampleReceived && !mHasAudio) {
// This will ensure that the first frame after a flush won't be used as anchor
// when renderer is in paused state, because resume can happen any time after seek.
Mutex::Autolock autoLock(mLock);
clearAnchorTime_l();
}
}
entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);//以ns位单位,reply设置timestampNs;在这里Renderer给了一个timestampNs
entry->mNotifyConsumed->setInt32("render", !tooLate);//以及是否render
entry->mNotifyConsumed->post();//Decoder发送给Renderer的reply,sp reply = new AMessage(kWhatRenderBuffer, this),在此时使用;post到Decoder中
mVideoQueue.erase(mVideoQueue.begin());
entry = NULL;
mVideoSampleReceived = true;
if (!mPaused) {
if (!mVideoRenderingStarted) {
mVideoRenderingStarted = true;
notifyVideoRenderingStart();
}
Mutex::Autolock autoLock(mLock);
notifyIfMediaRenderingStarted_l();
}
}
7.如果Renderer设置了是否要渲染,并向Decoder发回消息,
Decoder 接受 Renderer(mVideoQueue的QueueEntry中保存着回调)发回的消息(entry->mNotifyConsumed->post())
case kWhatRenderBuffer:
{
if (!isStaleReply(msg)) {
onRenderBuffer(msg);
}
break;
}
8.Decoder的onRenderBuffer
void NuPlayer::Decoder::onRenderBuffer(const sp &msg) {
status_t err;
int32_t render;
size_t bufferIx;
int32_t eos;
CHECK(msg->findSize("buffer-ix", &bufferIx));//找到buffer-ix
if (!mIsAudio) {
int64_t timeUs;
sp buffer = mOutputBuffers[bufferIx];
buffer->meta()->findInt64("timeUs", &timeUs);
if (mCCDecoder != NULL && mCCDecoder->isSelected()) {
mCCDecoder->display(timeUs);
}
}
if (msg->findInt32("render", &render) && render) {
int64_t timestampNs;
CHECK(msg->findInt64("timestampNs", ×tampNs));
err = mCodec->renderOutputBufferAndRelease(bufferIx, timestampNs);//发给MediaCodec渲染加release
} else {
mNumOutputFramesDropped += !mIsAudio;
err = mCodec->releaseOutputBuffer(bufferIx);//不渲染直接release
}
if (err != OK) {
ALOGE("failed to release output buffer for %s (err=%d)",
mComponentName.c_str(), err);
handleError(err);
}
if (msg->findInt32("eos", &eos) && eos
&& isDiscontinuityPending()) {
finishHandleDiscontinuity(true /* flushOnTimeChange */);
}
}
9.Decoder会和MediaCodec做交互,做渲染。MediaCodec是判断视频渲染的关键。
交给MediaCodec的renderOutputBufferAndRelease 或者 releaseOutputBuffer;一个会真的渲染一个不渲染
9.1
status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
sp
msg = new AMessage(kWhatReleaseOutputBuffer, this);//发送消息
msg->setSize("index", index);
msg->setInt32("render", true);//设置是否渲染
msg->setInt64("timestampNs", timestampNs);//timestampNs
sp response;
return PostAndAwaitResponse(msg, &response);
}
9.2
status_t MediaCodec::releaseOutputBuffer(size_t index) {
sp msg = new AMessage(kWhatReleaseOutputBuffer, this);
msg->setSize("index", index);
sp response;
return PostAndAwaitResponse(msg, &response);
}
10.MediaCodec的onReleaseOutputBuffer(msg)处理; 这里是判断用硬件渲染或者软件渲染的关键点。
case kWhatReleaseOutputBuffer:
{
sp replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (!isExecuting()) {
PostReplyWithError(replyID, INVALID_OPERATION);
break;
} else if (mFlags & kFlagStickyError) {
PostReplyWithError(replyID, getStickyError());
break;
}
status_t err = onReleaseOutputBuffer(msg);//onReleaseOutputBuffer(msg)处理
PostReplyWithError(replyID, err);
break;
}
//onReleaseOutputBuffer
status_t MediaCodec::onReleaseOutputBuffer(const sp &msg) {
size_t index;
CHECK(msg->findSize("index", &index));
int32_t render;
if (!msg->findInt32("render", &render)) { //设置了render为true则渲染,否则不渲染直接release buffer
render = 0;
}
if (!isExecuting()) {
return -EINVAL;
}
if (index >= mPortBuffers[kPortIndexOutput].size()) {
return -ERANGE;
}
BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index);
if (info->mNotify == NULL || !info->mOwnedByClient) {
return -EACCES;
}
// synchronization boundary for getBufferAndFormat
{
Mutex::Autolock al(mBufferLock);
info->mOwnedByClient = false;
}
if (render && info->mData != NULL && info->mData->size() != 0) { //render是否为true
info->mNotify->setInt32("render", true);//reply设置render为true
int64_t mediaTimeUs = -1;
info->mData->meta()->findInt64("timeUs", &mediaTimeUs);
int64_t renderTimeNs = 0;
if (!msg->findInt64("timestampNs", &renderTimeNs)) {//Renderer给的timestampNs
// use media timestamp if client did not request a specific render timestamp
ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
renderTimeNs = mediaTimeUs * 1000;
}
info->mNotify->setInt64("timestampNs", renderTimeNs);
if (mSoftRenderer != NULL) {//是否用softRender,如果设置过SoftwareRender而则用软件渲染,不然就让ACodec去硬件渲染
std::list doneFrames = mSoftRenderer->render(
info->mData->data(), info->mData->size(),
mediaTimeUs, renderTimeNs, NULL, info->mFormat);
// if we are running, notify rendered frames
if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
sp notify = mOnFrameRenderedNotification->dup();
sp data = new AMessage;
if (CreateFramesRenderedMessage(doneFrames, data)) {
notify->setMessage("data", data);
notify->post();
}
}
}
}
info->mNotify->post();//info-mNotify保存着ACodec传过来的reply消息
info->mNotify = NULL;
return OK;
}
11.info->mNotify是ACodec给MediaCodec的reply,把info->mNotify消息post之后,见《MediaCodec与ACodec的通知分析》。
主要是真正的硬件渲染,和ACodec继续fillbuffer。
在ACodec的onOMXFillBufferDone产生的reply消息见如下,这个消息会设置给MediaCodec来回调ACodc。
sp reply =
new AMessage(kWhatOutputBufferDrained, mCodec);//ACodec生成reply消息,最终会传给MediaCodec
12.info->mNotify->post()到ACodec.
在MediaCodec::onReleaseOutputBuffer(const sp &msg)中消费reply消息后由ACodec接受并处理
ACodec:
case kWhatOutputBufferDrained:
{
onOutputBufferDrained(msg);
break;
}
13.ACodec的onOutputBufferDrained(msg);
ACodec的onOutputBufferDrained(msg);//真正的硬件渲染
void ACodec::BaseState::onOutputBufferDrained(const sp &msg) {
IOMX::buffer_id bufferID;
CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));//找到bufferID
ssize_t index;
BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);//根据bufferID找到ACodec的BufferInfo
BufferInfo::Status status = BufferInfo::getSafeStatus(info);
if (status != BufferInfo::OWNED_BY_DOWNSTREAM) {
ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
mCodec->dumpBuffers(kPortIndexOutput);
mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
return;
}
android_native_rect_t crop;
if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) {
status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop);
ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err);
}
bool skip = mCodec->getDSModeHint(msg);
int32_t render;
if (!skip && mCodec->mNativeWindow != NULL
&& msg->findInt32("render", &render) && render != 0
&& info->mData != NULL && info->mData->size() != 0) {
ATRACE_NAME("render");
// The client wants this buffer to be rendered.
// save buffers sent to the surface so we can get render time when they return
int64_t mediaTimeUs = -1;
info->mData->meta()->findInt64("timeUs", &mediaTimeUs);
if (mediaTimeUs >= 0) {
mCodec->mRenderTracker.onFrameQueued(
mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd)));
}
int64_t timestampNs = 0;
if (!msg->findInt64("timestampNs", ×tampNs)) {
// use media timestamp if client did not request a specific render timestamp
if (info->mData->meta()->findInt64("timeUs", ×tampNs)) {
ALOGV("using buffer PTS of %lld", (long long)timestampNs);
timestampNs *= 1000;
}
}
status_t err;
err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs);
ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err);
info->checkReadFence("onOutputBufferDrained before queueBuffer");
err = mCodec->mNativeWindow->queueBuffer(
mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);//插入mNaiveWindow(surface)进行渲染
info->mFenceFd = -1;
if (err == OK) {
info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
} else {
ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err);
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
info->mStatus = BufferInfo::OWNED_BY_US;
// keeping read fence as write fence to avoid clobbering
info->mIsReadFence = false;
}
} else {
if (mCodec->mNativeWindow != NULL &&
(info->mData == NULL || info->mData->size() != 0)) {
// move read fence into write fence to avoid clobbering
info->mIsReadFence = false;
ATRACE_NAME("frame-drop");
}
info->mStatus = BufferInfo::OWNED_BY_US;
}
PortMode mode = getPortMode(kPortIndexOutput);
switch (mode) {
case KEEP_BUFFERS:
{
// XXX fishy, revisit!!! What about the FREE_BUFFERS case below?
if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
// We cannot resubmit the buffer we just rendered, dequeue
// the spare instead.
info = mCodec->dequeueBufferFromNativeWindow();
}
break;
}
case RESUBMIT_BUFFERS:
{
if (!mCodec->mPortEOS[kPortIndexOutput]) {
if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
// We cannot resubmit the buffer we just rendered, dequeue
// the spare instead.
info = mCodec->dequeueBufferFromNativeWindow();
}
if (info != NULL) {
ALOGV("[%s] calling fillBuffer %u",
mCodec->mComponentName.c_str(), info->mBufferID);
info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS");
status_t err = mCodec->mOMX->fillBuffer(
mCodec->mNode, info->mBufferID, info->mFenceFd);//mOMX重启fillBuffer,填充完毕后回调ACodec::BaseState::onOMXFillBufferDone
info->mFenceFd = -1;
if (err == OK) {
info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
} else {
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
}
}
}
break;
}
case FREE_BUFFERS:
{
status_t err = mCodec->freeBuffer(kPortIndexOutput, index);
if (err != OK) {
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
}
break;
}
default:
ALOGE("Invalid port mode: %d", mode);
return;
}
}
14.ACodec硬件渲染后会fillBuffer, component之后又会回调ACodec::BaseState::onOMXFillBufferDone。软件渲染时MeidaCodec的info->mNotify->post()会继续fillbuffer,
之后ACodec被component回调ACodec::BaseState::onOMXFillBufferDone,分析见《MediaCodec与ACodec通知分析》。
15.连接Decoder到MediaCodec的关系
NuPlayer::Decoder::onConfigure,设置了MediaCodec到Decoder的回调
void NuPlayer::Decoder::onConfigure(const sp &format) {
CHECK(mCodec == NULL);
mFormatChangePending = false;
mTimeChangePending = false;
++mBufferGeneration;
AString mime;
CHECK(format->findString("mime", &mime));
mIsAudio = !strncasecmp("audio/", mime.c_str(), 6);
mIsVideoAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str());
mComponentName = mime;
mComponentName.append(" decoder");
ALOGV("[%s] onConfigure (surface=%p)", mComponentName.c_str(), mSurface.get());
mCodec = AVUtils::get()->createCustomComponentByName(mCodecLooper, mime.c_str(), false /* encoder */, format);
if (mCodec == NULL) {
mCodec = MediaCodec::CreateByType(mCodecLooper, mime.c_str(), false /* encoder */);
}
int32_t secure = 0;
if (format->findInt32("secure", &secure) && secure != 0) {
if (mCodec != NULL) {
mCodec->getName(&mComponentName);
mComponentName.append(".secure");
mCodec->release();
ALOGI("[%s] creating", mComponentName.c_str());
mCodec = MediaCodec::CreateByComponentName(
mCodecLooper, mComponentName.c_str(), NULL /* err */, mPid);
}
}
if (mCodec == NULL) {
ALOGE("Failed to create %s%s decoder",
(secure ? "secure " : ""), mime.c_str());
handleError(UNKNOWN_ERROR);
return;
}
mIsSecure = secure;
mCodec->getName(&mComponentName);
status_t err;
if (mSurface != NULL) {
// disconnect from surface as MediaCodec will reconnect
err = native_window_api_disconnect(
mSurface.get(), NATIVE_WINDOW_API_MEDIA);
// We treat this as a warning, as this is a preparatory step.
// Codec will try to connect to the surface, which is where
// any error signaling will occur.
ALOGW_IF(err != OK, "failed to disconnect from surface: %d", err);
}
err = mCodec->configure(
format, mSurface, NULL /* crypto */, 0 /* flags */);
if (err != OK) {
ALOGE("Failed to configure %s decoder (err=%d)", mComponentName.c_str(), err);
mCodec->release();
mCodec.clear();
handleError(err);
return;
}
rememberCodecSpecificData(format);
// the following should work in configured state
CHECK_EQ((status_t)OK, mCodec->getOutputFormat(&mOutputFormat));
CHECK_EQ((status_t)OK, mCodec->getInputFormat(&mInputFormat));
mStats->setString("mime", mime.c_str());
mStats->setString("component-name", mComponentName.c_str());
if (!mIsAudio) {
int32_t width, height;
if (mOutputFormat->findInt32("width", &width)
&& mOutputFormat->findInt32("height", &height)) {
mStats->setInt32("width", width);
mStats->setInt32("height", height);
}
}
sp reply = new AMessage(kWhatCodecNotify, this);//会发给Decoder的消息
mCodec->setCallback(reply);//设置回调Decoder
err = mCodec->start();
if (err != OK) {
ALOGE("Failed to start %s decoder (err=%d)", mComponentName.c_str(), err);
mCodec->release();
mCodec.clear();
handleError(err);
return;
}
releaseAndResetMediaBuffers();
mPaused = false;
mResumePending = false;
}
Decoder到MediaCodec,设的回调
MediaCodec::setCallback(const sp &callback)
status_t MediaCodec::setCallback(const sp &callback) {
sp msg = new AMessage(kWhatSetCallback, this);
msg->setMessage("callback", callback);
sp response;
return PostAndAwaitResponse(msg, &response);
}
case kWhatSetCallback:
{
sp replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (mState == UNINITIALIZED
|| mState == INITIALIZING
|| isExecuting()) {
// callback can't be set after codec is executing,
// or before it's initialized (as the callback
// will be cleared when it goes to INITIALIZED)
PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
sp callback;
CHECK(msg->findMessage("callback", &callback));
mCallback = callback;//赋给MediaCodec的mCallback,用于dump之后回调到Decoder
if (mCallback != NULL) {
ALOGI("MediaCodec will operate in async mode");
mFlags |= kFlagIsAsync;//此时设置为kFlagIsAsync,指明MediaCodec will operate in async mode
} else {
mFlags &= ~kFlagIsAsync;
}
sp response = new AMessage;
response->postReply(replyID);
break;
}