前言:
安卓中播放视频一般有两个接口,一个是mediaplayer,一个是mediacodec,其中mediaplayer是对mediacodec / genericsource / render 的封装,对APP而言使用比较方便。相比而言mediacodec留给用户发挥的空间更多,APP可以自己实现很多功能。目前只有MXplayer使用的是mediaplayer接口,其他APP一般都是使用的mediacodec接口。本文接下来就会讲解mediacodec的具体实现过程。
上层实现:
上层通过MediaCodec.java接口调用MediaCodec。具体过程如下:
// 参考cts/tests/tests/media/src/android/media/cts/MediaCodecTest.java
MediaExtractor mediaExtractor = null;
MediaCodec mediaCodec = null;
try{
mediaExtractor = getMediaExtractorForMimeType(inputResourceId, "video/");
MediaFormat mediaFormat =
mediaExtractor.getTrackFormat(mediaExtractor.getSampleTrackIndex()); // 获取video的类型
String mimeType = mediaFormat.getString(MediaFormat.KEY_MIME); // 获取mimeType
mediaCodec = MediaCodec.createDecoderByType(mime); // 创建decoder
mediaCodec.configure(format, null, null, 0); // configure
mediaCodec.start(); // start
while(!eos && !Thread.interrupted()){
int bufferIndex = mediaCodec.dequeueInputBuffer(0); // dequeueInputBuffer
if (bufferIndex != NO_BUFFER_INDEX) {
ByteBuffer buffer = mediaCodec.getInputBuffers()[bufferIndex]; //getInputBuffers
int size = mediaExtractor.readSampleData(buffer, 0);
long timestampUs = mediaExtractor.getSampleTime();
mediaExtractor.advance();
signaledEos = mediaExtractor.getSampleTrackIndex() == -1
|| timestampUs == lastBufferTimestampUs;
mediaCodec.queueInputBuffer(bufferIndex, // queueInputBuffer
0,
size,
timestampUs,
signaledEos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
}
// If we don't have an output buffer, try to get one now.
if (outputBufferIndex == NO_BUFFER_INDEX) {
outputBufferIndex = mediaCodec.dequeueOutputBuffer(outputBufferInfo, 0); // dequeueOutputBuffer
}
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED
|| outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED
|| outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
outputBufferIndex = NO_BUFFER_INDEX;
} else if (outputBufferIndex != NO_BUFFER_INDEX) {
eos = (outputBufferInfo.flags &
MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
boolean render = outputBufferInfo.size > 0;
mediaCodec.releaseOutputBuffer(outputBufferIndex, render); //releaseOutputBuffer
if (render) {
outputSurface.awaitNewImage();
}
outputBufferIndex = NO_BUFFER_INDEX;
}
}
return eos;
} catch (IOException e) {
throw new RuntimeException("error reading input resource", e);
} finally {
if (mediaCodec != null) {
mediaCodec.stop(); // stop
mediaCodec.release(); // release
}
if (mediaExtractor != null) {
mediaExtractor.release();
}
if (outputSurface != null) {
outputSurface.release();
}
}
MediaCodec的具体实现:
createDecoderByType
setCallback
configure
start
dequeueInputBuffer
getInputBuffers
queueInputBuffer
dequeueOutputBuffer
releaseOutputBuffer
stop
release
1. createDecoderByType
public static MediaCodec createDecoderByType(@NonNull String type)
throws IOException {
return new MediaCodec(type, true /* nameIsType */, false /* encoder */);
}
根据type创建decoder。此处MediaCode的构造函数如下:
private MediaCodec(
@NonNull String name, boolean nameIsType, boolean encoder) {
Looper looper;
if ((looper = Looper.myLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
} else if ((looper = Looper.getMainLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
} else {
mEventHandler = null;
}
mCallbackHandler = mEventHandler;
mOnFrameRenderedHandler = mEventHandler;
mBufferLock = new Object();
native_setup(name, nameIsType, encoder);
}
此处首先设置了callback和rennder的handler,然后调用native_setup函数。native_setup的具体实现在frameworks/base/media/jni/android_media_MediaCodec.cpp中。
static void android_media_MediaCodec_native_setup(
JNIEnv *env, jobject thiz,
jstring name, jboolean nameIsType, jboolean encoder) {
if (name == NULL) {
jniThrowException(env, "java/lang/NullPointerException", NULL);
return;
}
const char *tmp = env->GetStringUTFChars(name, NULL);
if (tmp == NULL) {
return;
}
sp codec = new JMediaCodec(env, thiz, tmp, nameIsType, encoder);
const status_t err = codec->initCheck();
if (err == NAME_NOT_FOUND) {
// fail and do not try again.
jniThrowException(env, "java/lang/IllegalArgumentException",
String8::format("Failed to initialize %s, error %#x", tmp, err));
env->ReleaseStringUTFChars(name, tmp);
return;
} if (err == NO_MEMORY) {
throwCodecException(env, err, ACTION_CODE_TRANSIENT,
String8::format("Failed to initialize %s, error %#x", tmp, err));
env->ReleaseStringUTFChars(name, tmp);
return;
} else if (err != OK) {
// believed possible to try again
jniThrowException(env, "java/io/IOException",
String8::format("Failed to find matching codec %s, error %#x", tmp, err));
env->ReleaseStringUTFChars(name, tmp);
return;
}
env->ReleaseStringUTFChars(name, tmp);
codec->registerSelf();
setMediaCodec(env,thiz, codec);
}
在JMediaCodec的构造函数中会调用native mediacodec的creatByType函数创建decoder。
2. configure
MediaCodec.java中configure的调用过程如下:
private void configure(
@Nullable MediaFormat format, @Nullable Surface surface,
@Nullable MediaCrypto crypto, @Nullable IHwBinder descramblerBinder,
@ConfigureFlag int flags) {
if (crypto != null && descramblerBinder != null) {
throw new IllegalArgumentException("Can't use crypto and descrambler together!");
}
String[] keys = null;
Object[] values = null;
if (format != null) {
Map formatMap = format.getMap();
keys = new String[formatMap.size()];
values = new Object[formatMap.size()];
int i = 0;
for (Map.Entry entry: formatMap.entrySet()) {
if (entry.getKey().equals(MediaFormat.KEY_AUDIO_SESSION_ID)) {
int sessionId = 0;
try {
sessionId = (Integer)entry.getValue();
}
catch (Exception e) {
throw new IllegalArgumentException("Wrong Session ID Parameter!");
}
keys[i] = "audio-hw-sync";
values[i] = AudioSystem.getAudioHwSyncForSession(sessionId);
} else {
keys[i] = entry.getKey();
values[i] = entry.getValue();
}
++i;
}
}
mHasSurface = surface != null;
native_configure(keys, values, surface, crypto, descramblerBinder, flags);
}
此处就是把MediaFormat格式的format参数转换为底层可以识别的key-value格式。然后调用native_configure,实际调用android_media_MediaCodec_native_configure。
static void android_media_MediaCodec_native_configure(
JNIEnv *env,
jobject thiz,
jobjectArray keys, jobjectArray values,
jobject jsurface,
jobject jcrypto,
jobject descramblerBinderObj,
jint flags) {
sp codec = getMediaCodec(env, thiz);
if (codec == NULL) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
sp format;
status_t err = ConvertKeyValueArraysToMessage(env, keys, values, &format);
if (err != OK) {
jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
return;
}
sp bufferProducer;
if (jsurface != NULL) {
sp surface(android_view_Surface_getSurface(env, jsurface));
if (surface != NULL) {
bufferProducer = surface->getIGraphicBufferProducer();
} else {
jniThrowException(
env,
"java/lang/IllegalArgumentException",
"The surface has been released");
return;
}
}
sp crypto;
if (jcrypto != NULL) {
crypto = JCrypto::GetCrypto(env, jcrypto);
}
sp descrambler;
if (descramblerBinderObj != NULL) {
descrambler = GetDescrambler(env, descramblerBinderObj);
}
err = codec->configure(format, bufferProducer, crypto, descrambler, flags);
throwExceptionAsNecessary(env, err);
}
status_t JMediaCodec::configure(
const sp &format,
const sp &bufferProducer,
const sp &crypto,
const sp &descrambler,
int flags) {
sp client;
if (bufferProducer != NULL) {
mSurfaceTextureClient =
new Surface(bufferProducer, true /* controlledByApp */);
} else {
mSurfaceTextureClient.clear();
}
return mCodec->configure(
format, mSurfaceTextureClient, crypto, descrambler, flags);
}
然后就是调用native的MediaCodec::configure函数,此处就是解析format的内容并设置给底层。此处也会把surface设置给底层。
MediaCodec::configure中会发送消息kWhatConfigure,收到消息后调用ACodec的configurecodec。ACodec的configurecodec中调用setupVideoEncoder or setupVideoDecoder。在setupVideoDecoder函数中有两个参数,从APP传递的msg,和需要在此赋值的outputFormat。setupVideoDecoder从msg中获取colorFormat,然后调用setVideoPortFormatType将colorFormat设置下去,如果返回错误,表示不支持这个格式,然后会调用setSupportedOutputFormat(在APP不设置color-format时也会调用此函数)。setSupportedOutputFormat函数中会for循环去找当前支持的格式。并把找到的第一个格式设置给底层。
3. start
public final void start() {
native_start(); // 调用native start,实际会调用android_media_MediaCodec_start
synchronized(mBufferLock) {
cacheBuffers(true /* input */);
cacheBuffers(false /* input */);
}
}
static void android_media_MediaCodec_start(JNIEnv *env, jobject thiz) {
ALOGV("android_media_MediaCodec_start");
sp codec = getMediaCodec(env, thiz);
if (codec == NULL) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
status_t err = codec->start(); // JMediaCodec::start
throwExceptionAsNecessary(env, err, ACTION_CODE_FATAL, "start failed");
}
status_t JMediaCodec::start() {
return mCodec->start(); // MediaCodec::start
}
通过java->jni->native,最终调用的是MediaCodec::start()。
status_t MediaCodec::start() {
sp msg = new AMessage(kWhatStart, this);
status_t err;
Vector resources;
MediaResource::Type type = (mFlags & kFlagIsSecure) ?
MediaResource::kSecureCodec : MediaResource::kNonSecureCodec;
MediaResource::SubType subtype =
mIsVideo ? MediaResource::kVideoCodec : MediaResource::kAudioCodec;
resources.push_back(MediaResource(type, subtype, 1));
// Don't know the buffer size at this point, but it's fine to use 1 because
// the reclaimResource call doesn't consider the requester's buffer size for now.
resources.push_back(MediaResource(MediaResource::kGraphicMemory, 1));
for (int i = 0; i <= kMaxRetry; ++i) {
if (i > 0) {
// Don't try to reclaim resource for the first time.
if (!mResourceManagerService->reclaimResource(resources)) {
break;
}
// Recover codec from previous error before retry start.
err = reset();
if (err != OK) {
ALOGE("retrying start: failed to reset codec");
break;
}
sp response;
err = PostAndAwaitResponse(mConfigureMsg, &response);
if (err != OK) {
ALOGE("retrying start: failed to configure codec");
break;
}
}
sp response;
err = PostAndAwaitResponse(msg, &response);
if (!isResourceError(err)) {
break;
}
}
return err;
}
此处如果有资源不够的问题就会去回收资源,然后reset,否则就发送kWhatStart消息。然后开始调用ACodec::initiateStart()。ACodec::initiateStart()会发送消息kWhatStart,然后调用ACodec::LoadedState::onStart()。ACodec::LoadedState::onStart()此函数中给omx发送消息,设置state为mLoadedToIdleState。
mLoadedToIdleState状态之后如果收到OMX_EventCmdComplete就会转换到Executing状态。
4. dequeueInputBuffer / getInputBuffers / queueInputBuffer / dequeueOutputBuffer / releaseOutputBuffer
// step1 请求一个输入缓存的bufferIndex
int bufferIndex = mediaCodec.dequeueInputBuffer(TIME_OUT_US /* timeoutUs */);
// step2 根据bufferIndex得到buffer
ByteBuffer buffer = mediaCodec.getInputBuffer(bufferIndex);
// step3 给buffer中写入数据
int size = mediaExtractor.readSampleData(buffer, 0 /* offset */);
// step4 buffer入队
mediaCodec.queueInputBuffer(bufferIndex,0 /* offset */,size,
timestampUs,
signaledEos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
当MediaCodec处于Executing状态之后就可以对数据进行处理了。首先通过dequeueInputBuffer(long timeoutUs)请求一个输入缓存,timeoutUs代表等待时间,设置为-1代表无限等待。通过getInputBuffers()得到的是输入缓存数组,通过index和输入缓存数组可以得到当前请求的输入缓存,在使用前要clear一下,避免之前的缓存数据影响当前数据。接着就是把数据添加到输入缓存中,并调用queueInputBuffer(...)把缓存数据入队。
MediaCodec.BufferInfo outputBufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(outputBufferInfo, TIMEOUT_USEC);
outputBuffers = mediaCodec.getOutputBuffers();
configData.put(outputBuffers[outputBufferIndex]);
mediaCodec.releaseOutputBuffer(outputBufferIndex, false /* render */);
获取输出缓存和获取输入缓存类似,首先通过dequeueOutputBuffer(BufferInfo info, long timeoutUs)来请求一个输出缓存,这里需要传入一个BufferInfo对象,用于存储ByteBuffer的信息。然后通过返回的index得到输出缓存,并通过BufferInfo获取ByteBuffer的信息。然后把获取的buffer取出,最后relese这个buffer。
4.1 dequeueInputBuffer(只有在没有inputsurface的时候可以用)
status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
sp msg = new AMessage(kWhatDequeueInputBuffer, this);
msg->setInt64("timeoutUs", timeoutUs);
......
if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
return err;
}
......
}
bool MediaCodec::handleDequeueInputBuffer(const sp &replyID, bool newRequest) {
......
ssize_t index = dequeuePortBuffer(kPortIndexInput);
......
sp response = new AMessage;
response->setSize("index", index);
response->postReply(replyID);
return true;
}
ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
List *availBuffers = &mAvailPortBuffers[portIndex];
if (availBuffers->empty()) {
return -EAGAIN;
}
size_t index = *availBuffers->begin();
availBuffers->erase(availBuffers->begin());
BufferInfo *info = &mPortBuffers[portIndex][index];
CHECK(!info->mOwnedByClient);
{
Mutex::Autolock al(mBufferLock);
info->mOwnedByClient = true;
// set image-data
if (info->mData->format() != NULL) {
sp imageData;
if (info->mData->format()->findBuffer("image-data", &imageData)) {
info->mData->meta()->setBuffer("image-data", imageData);
}
int32_t left, top, right, bottom;
if (info->mData->format()->findRect("crop", &left, &top, &right, &bottom)) {
info->mData->meta()->setRect("crop-rect", left, top, right, bottom);
}
}
}
return index;
}
所以,dequeueInputBuffer最终返回的index就是mAvailPortBuffers[kPortIndexInput]->begin()。
问题: kPortIndexInput和mAvailPortBuffers的具体来历。
kPortIndexInput是0。kPortIndexIOutput是1。
mAvailPortBuffers在updateBuffers时push_back。updateBuffers在MediaCodec收到kWhatDrainThisBuffer或kWhatFillThisBuffer时被调用。
BufferCallback::onInputBufferAvailable中发送kWhatFillThisBuffer。
ACodecBufferChannel::fillThisBuffer中调用onInputBufferAvailable
ACodec::BaseState::postFillThisBuffer中调用fillThisBuffer
ACodec::ExecutingState::resume()调用submitOutputBuffers()然后遍历所有可用buffer调用postFillThisBuffer。
ACodec在IdleToExecutingState收到OMX_EventCmdComplete就会调用resume,然后进入Executing State。
此处具体过程如下:
bool ACodec::IdleToExecutingState::onOMXEvent(
OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
switch (event) {
case OMX_EventCmdComplete:
{ .....
mCodec->mExecutingState->resume();
mCodec->changeState(mCodec->mExecutingState);
return true;
.....
}
}
}
void ACodec::ExecutingState::resume() {
ALOGV("ZYang: resume");
.....
submitOutputBuffers();
.....
// Post all available input buffers
for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) {
BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
if (info->mStatus == BufferInfo::OWNED_BY_US) {
postFillThisBuffer(info);
}
}
mActive = true;
}
void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {
.....
info->mData->setFormat(mCodec->mInputFormat);
mCodec->mBufferChannel->fillThisBuffer(info->mBufferID);
info->mData.clear();
info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
}
void ACodecBufferChannel::fillThisBuffer(IOMX::buffer_id bufferId) {
.....
mCallback->onInputBufferAvailable(
std::distance(array->begin(), it),
it->mClientBuffer);
}
void BufferCallback::onInputBufferAvailable(
size_t index, const sp &buffer) {
// 发送消息kWhatFillThisBuffer
}
void MediaCodec::onMessageReceived(const sp &msg) {
switch (msg->what()) {
case kWhatCodecNotify:
{
int32_t what;
CHECK(msg->findInt32("what", &what));
switch (what) {
case kWhatFillThisBuffer:
{
/* size_t index = */updateBuffers(kPortIndexInput, msg);
.....
}
}
}
size_t MediaCodec::updateBuffers(
int32_t portIndex, const sp &msg) {
.....
sp buffer = static_cast(obj.get());
{
.....
mPortBuffers[portIndex][index].mData = buffer;
}
mAvailPortBuffers[portIndex].push_back(index);
return index;
}
问题: mCodec->mBuffers的来历?
void ACodec::LoadedToIdleState::stateEntered() {
....
if ((err = allocateBuffers()) != OK) {
.....
mCodec->changeState(mCodec->mLoadedState);
}
}
status_t ACodec::LoadedToIdleState::allocateBuffers() {
status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput);
if (err != OK) {
return err;
}
err = mCodec->allocateBuffersOnPort(kPortIndexOutput);
if (err != OK) {
return err;
}
err = mCodec->allocateBuffersOnPort(kPortIndexInputExtradata);
err = mCodec->allocateBuffersOnPort(kPortIndexOutputExtradata);
if (err != OK) {
err = OK; // Ignore Extradata buffer allocation failure
}
mCodec->mCallback->onStartCompleted();
return OK;
}
status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
// 会给mBuffers[portIndex].push(info);
.....
}
5. stop
public final void stop() {
native_stop();
freeAllTrackedBuffers();
synchronized (mListenerLock) {
if (mCallbackHandler != null) {
mCallbackHandler.removeMessages(EVENT_SET_CALLBACK);
mCallbackHandler.removeMessages(EVENT_CALLBACK);
}
if (mOnFrameRenderedHandler != null) {
mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED);
}
}
}
static void android_media_MediaCodec_stop(JNIEnv *env, jobject thiz) {
ALOGV("android_media_MediaCodec_stop");
sp codec = getMediaCodec(env, thiz);
if (codec == NULL) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
status_t err = codec->stop();
throwExceptionAsNecessary(env, err);
}
MediaCodec和APP的数据交互:
1. dequeueOutputBuffer
APP需要从底层拿出yuv去渲染就会调用dequeueOutputBuffer,在native层的处理如下:
status_t MediaCodec::dequeueOutputBuffer( // APP调用到native的接口
size_t *index,
size_t *offset,
size_t *size,
int64_t *presentationTimeUs,
uint32_t *flags,
int64_t timeoutUs) {
sp msg = new AMessage(kWhatDequeueOutputBuffer, this); // 发送消息,此处会设置timeoutUs
msg->setInt64("timeoutUs", timeoutUs);
sp response;
status_t err;
if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
return err;
}
CHECK(response->findSize("index", index));
CHECK(response->findSize("offset", offset));
CHECK(response->findSize("size", size));
CHECK(response->findInt64("timeUs", presentationTimeUs));
CHECK(response->findInt32("flags", (int32_t *)flags));
return OK;
}
void MediaCodec::onMessageReceived(const sp &msg) {
case kWhatDequeueOutputBuffer: // 收到消息的处理
{
sp replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (handleDequeueOutputBuffer(replyID, true /* new request */)) {
break;
}
int64_t timeoutUs;
CHECK(msg->findInt64("timeoutUs", &timeoutUs));
if (timeoutUs == 0ll) {
PostReplyWithError(replyID, -EAGAIN);
break;
}
mFlags |= kFlagDequeueOutputPending;
mDequeueOutputReplyID = replyID;
if (timeoutUs > 0ll) {
sp timeoutMsg =
new AMessage(kWhatDequeueOutputTimedOut, this);
timeoutMsg->setInt32(
"generation", ++mDequeueOutputTimeoutGeneration);
timeoutMsg->post(timeoutUs);
}
break;
}
}
bool MediaCodec::handleDequeueOutputBuffer(const sp &replyID, bool newRequest) {
if {
// 此处有很多异常处理
} else {
sp response = new AMessage;
ssize_t index = dequeuePortBuffer(kPortIndexOutput); // 从outputbuffer中取出一个buffer给APP
const sp &buffer =
mPortBuffers[kPortIndexOutput][index].mData; // 取出的buffer就是此处的buffer
// 把buffer的信息传给response,返回给app
response->setSize("index", index);
response->setSize("offset", buffer->offset());
response->setSize("size", buffer->size());
int64_t timeUs;
CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
statsBufferReceived(timeUs);
response->setInt64("timeUs", timeUs);
int32_t flags;
CHECK(buffer->meta()->findInt32("flags", &flags));
response->setInt32("flags", flags);
response->postReply(replyID);
}
return true;
}
ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
List *availBuffers = &mAvailPortBuffers[portIndex]; //取出output的可用buffer
size_t index = *availBuffers->begin(); // 从buffer中取出第一个的index
availBuffers->erase(availBuffers->begin()); // erase第一个buffer
BufferInfo *info = &mPortBuffers[portIndex][index]; // 再此取出buffer info
CHECK(!info->mOwnedByClient);
{
Mutex::Autolock al(mBufferLock);
info->mOwnedByClient = true;
// set image-data
if (info->mData->format() != NULL) {
sp imageData;
if (info->mData->format()->findBuffer("image-data", &imageData)) {
info->mData->meta()->setBuffer("image-data", imageData);
}
int32_t left, top, right, bottom;
if (info->mData->format()->findRect("crop", &left, &top, &right, &bottom)) {
info->mData->meta()->setRect("crop-rect", left, top, right, bottom);
}
}
}
return index;
}
// 问题: mPortBuffers和availBuffers的来由
MediaCodec收到kWhatDrainThisBuffer和kWhatFillThisBuffer时分别会update output和input buffer,把此时收到消息带的bufer放入mPortBuffers和availBuffers。
size_t MediaCodec::updateBuffers(
int32_t portIndex, const sp &msg) {
CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
size_t index;
CHECK(msg->findSize("index", &index));
sp obj;
CHECK(msg->findObject("buffer", &obj));// 从msg中获取buffer的信息
sp buffer = static_cast(obj.get()); // 创建MediaCodecBuffer
{
Mutex::Autolock al(mBufferLock);
if (mPortBuffers[portIndex].size() <= index) { // 如果buffer数量不够,更新size
mPortBuffers[portIndex].resize(align(index + 1, kNumBuffersAlign));
}
mPortBuffers[portIndex][index].mData = buffer; //把buffer加入mPortBuffers
}
mAvailPortBuffers[portIndex].push_back(index); // 把buffer的index加入mAvailPortBuffers
return index;
}
kWhatDrainThisBuffer和kWhatFillThisBuffer的来由
//kWhatDrainThisBuffer消息的来由
ACodec::BaseState::onOMXFillBufferDone(……){ // acodec收到omx的FBD消息
mCodec->mBufferChannel->drainThisBuffer(info->mBufferID, flags);
// 此处会调用drainThisBuffer
}
void ACodecBufferChannel::drainThisBuffer(
IOMX::buffer_id bufferId,
OMX_U32 omxFlags) {
mCallback->onOutputBufferAvailable( // 调用onOutputBufferAvailable
std::distance(array->begin(), it),
it->mClientBuffer);
}
void BufferCallback::onOutputBufferAvailable(
size_t index, const sp &buffer) {
sp notify(mNotify->dup());
notify->setInt32("what", kWhatDrainThisBuffer);// 此处发送消息kWhatDrainThisBuffer
notify->setSize("index", index);
notify->setObject("buffer", buffer);
notify->post();
}
// 总结一下就是acodec收到omx的fbd之后就会告诉mediacodec有可用的outoutbuffer,然后mediacodec更新outputbuffer的list。
//kWhatFillThisBuffer消息的来由
// ACodec收到EDB或者kWhatInputBufferFilled
void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {
if (mCodec->mPortEOS[kPortIndexInput]) {
return;
}
CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
info->mData->setFormat(mCodec->mInputFormat);
mCodec->mBufferChannel->fillThisBuffer(info->mBufferID);
info->mData.clear();
info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
}
void ACodecBufferChannel::fillThisBuffer(IOMX::buffer_id bufferId) {
mCallback->onInputBufferAvailable( // 调用onInputBufferAvailable
std::distance(array->begin(), it),
it->mClientBuffer);
}
void BufferCallback::onInputBufferAvailable(
size_t index, const sp &buffer) {
sp notify(mNotify->dup());
notify->setInt32("what", kWhatFillThisBuffer);// 此处发送消息kWhatFillThisBuffer
notify->setSize("index", index);
notify->setObject("buffer", buffer);
notify->post();
}
2. releaseOutputBuffer
当APP渲染完一帧图片之后就会调用此函数把buffer返还给mediacodec。
具体流程如下:
status_t MediaCodec::releaseOutputBuffer(size_t index) {
sp msg = new AMessage(kWhatReleaseOutputBuffer, this); // 发送消息
msg->setSize("index", index);
sp response;
return PostAndAwaitResponse(msg, &response);
}
void MediaCodec::onMessageReceived(const sp &msg) {
case kWhatReleaseOutputBuffer:
{
sp replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
status_t err = onReleaseOutputBuffer(msg); // onReleaseOutputBuffer
PostReplyWithError(replyID, err);
break;
}
}
status_t MediaCodec::onReleaseOutputBuffer(const sp &msg) {
size_t index;
CHECK(msg->findSize("index", &index));
int32_t render;
if (!msg->findInt32("render", &render)) {
render = 0;
}
BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
// synchronization boundary for getBufferAndFormat
sp buffer;
{
Mutex::Autolock al(mBufferLock);
info->mOwnedByClient = false;
buffer = info->mData;
info->mData.clear();
}
if (render && buffer->size() != 0) { // 如果需要render
int64_t mediaTimeUs = -1;
buffer->meta()->findInt64("timeUs", &mediaTimeUs);
int64_t renderTimeNs = 0;
if (!msg->findInt64("timestampNs", &renderTimeNs)) {
// use media timestamp if client did not request a specific render timestamp
ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
renderTimeNs = mediaTimeUs * 1000;
}
if (mSoftRenderer != NULL) {
std::list doneFrames = mSoftRenderer->render(
buffer->data(), buffer->size(), mediaTimeUs, renderTimeNs,
mPortBuffers[kPortIndexOutput].size(), buffer->format());
// if we are running, notify rendered frames
if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
sp notify = mOnFrameRenderedNotification->dup();
sp data = new AMessage;
if (CreateFramesRenderedMessage(doneFrames, data)) {
notify->setMessage("data", data);
notify->post();
}
}
}
mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
} else { // 不需要render
mBufferChannel->discardBuffer(buffer);
}
return OK;
}
// 需要render时
status_t ACodecBufferChannel::renderOutputBuffer(
const sp &buffer, int64_t timestampNs) {
std::shared_ptr> array(
std::atomic_load(&mOutputBuffers));
BufferInfoIterator it = findClientBuffer(array, buffer);
ALOGV("renderOutputBuffer #%d", it->mBufferId);
sp msg = mOutputBufferDrained->dup();
msg->setObject("buffer", buffer);
msg->setInt32("buffer-id", it->mBufferId);
msg->setInt32("render", true);
msg->setInt64("timestampNs", timestampNs);
msg->post(); // 发送消息kWhatOutputBufferDrained
return OK;
}
// 不需要render时
status_t ACodecBufferChannel::discardBuffer(const sp &buffer) {
std::shared_ptr> array(
std::atomic_load(&mInputBuffers));
BufferInfoIterator it = findClientBuffer(array, buffer);
ALOGV("discardBuffer #%d", it->mBufferId);
sp msg = mOutputBufferDrained->dup();
msg->setObject("buffer", it->mCodecBuffer);
msg->setInt32("buffer-id", it->mBufferId);
msg->setInt32("discarded", true);
msg->post(); // 发送消息kWhatOutputBufferDrained
return OK;
}
// 处理消息kWhatOutputBufferDrained
void ACodec::BaseState::onOutputBufferDrained(const sp &msg) {
IOMX::buffer_id bufferID;
CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
sp obj;
CHECK(msg->findObject("buffer", &obj));
sp buffer = static_cast(obj.get()); // 拿到buffer
int32_t discarded = 0;
msg->findInt32("discarded", &discarded);
ssize_t index;
BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); // 拿到buffer info
BufferInfo::Status status = BufferInfo::getSafeStatus(info);
if (status != BufferInfo::OWNED_BY_DOWNSTREAM) {
ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
mCodec->dumpBuffers(kPortIndexOutput);
mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
return;
}
int64_t timeUs = -1;
buffer->meta()->findInt64("timeUs", &timeUs);
bool skip = mCodec->getDSModeHint(msg, timeUs);
info->mData = buffer; // 把buffer给info->mData
int32_t render;
if (!skip && mCodec->mNativeWindow != NULL
&& msg->findInt32("render", &render) && render != 0
&& !discarded && buffer->size() != 0) {
// 如果需要render
android_native_rect_t crop;
if (buffer->format()->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) {
// NOTE: native window uses extended right-bottom coordinate
++crop.right;
++crop.bottom;
if (memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) {
mCodec->mLastNativeWindowCrop = crop;
status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop);
ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err);
}
}
int32_t dataSpace;
if (buffer->format()->findInt32("android._dataspace", &dataSpace)
&& dataSpace != mCodec->mLastNativeWindowDataSpace) {
status_t err = native_window_set_buffers_data_space(
mCodec->mNativeWindow.get(), (android_dataspace)dataSpace);
mCodec->mLastNativeWindowDataSpace = dataSpace;
ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err);
}
if (buffer->format()->contains("hdr-static-info")) {
HDRStaticInfo info;
if (ColorUtils::getHDRStaticInfoFromFormat(buffer->format(), &info)
&& memcmp(&mCodec->mLastHDRStaticInfo, &info, sizeof(info))) {
setNativeWindowHdrMetadata(mCodec->mNativeWindow.get(), &info);
mCodec->mLastHDRStaticInfo = info;
}
}
// save buffers sent to the surface so we can get render time when they return
int64_t mediaTimeUs = -1;
buffer->meta()->findInt64("timeUs", &mediaTimeUs);
if (mediaTimeUs >= 0) {
mCodec->mRenderTracker.onFrameQueued(
mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd)));
}
int64_t timestampNs = 0;
if (!msg->findInt64("timestampNs", ×tampNs)) {
// use media timestamp if client did not request a specific render timestamp
if (buffer->meta()->findInt64("timeUs", ×tampNs)) {
ALOGV("using buffer PTS of %lld", (long long)timestampNs);
timestampNs *= 1000;
}
}
status_t err;
err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs);
ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err);
info->checkReadFence("onOutputBufferDrained before queueBuffer");
err = mCodec->mNativeWindow->queueBuffer(
mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
info->mFenceFd = -1;
if (err == OK) {
info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
} else {
ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err);
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
info->mStatus = BufferInfo::OWNED_BY_US;
// keeping read fence as write fence to avoid clobbering
info->mIsReadFence = false;
}
} else {
if (mCodec->mNativeWindow != NULL && (discarded || buffer->size() != 0)) {
// move read fence into write fence to avoid clobbering
info->mIsReadFence = false;
ATRACE_NAME("frame-drop");
}
info->mStatus = BufferInfo::OWNED_BY_US; // 重点:buffer OWNED_BY_US
}
PortMode mode = getPortMode(kPortIndexOutput);
switch (mode) {
case KEEP_BUFFERS:
{
if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
info = mCodec->dequeueBufferFromNativeWindow();
}
break;
}
case RESUBMIT_BUFFERS:
{
if (!mCodec->mPortEOS[kPortIndexOutput]) {
if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
info = mCodec->dequeueBufferFromNativeWindow(); // 从nativewindow dequeue buffer
}
if (info != NULL) {
ALOGV("[%s] calling fillBuffer %u",
mCodec->mComponentName.c_str(), info->mBufferID);
info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS");
status_t err = mCodec->fillBuffer(info);
}
}
}
break;
}
case FREE_BUFFERS:
{
status_t err = mCodec->freeBuffer(kPortIndexOutput, index);
if (err != OK) {
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
}
break;
}
default:
ALOGE("Invalid port mode: %d", mode);
return;
}
}