status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
if (mNativeWindow != NULL && portIndex == kPortIndexOutput)
{
return allocateOutputBuffersFromNativeWindow();//使用surface渲染,为输出分配图形缓存GraphicBuffer
}
可以看到当申请的是NativeWindow形式的buffer,这说明了输出的buffer是需要去直接完成render的,所以输出缓存区要从本地窗口buffer进行申请:
status_t OMXCodec::allocateOutputBuffersFromNativeWindow()
{
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
CODEC_LOGE("getParameter failed: %d", err);
return err;
}
err = native_window_set_buffers_geometry(
mNativeWindow.get(),
def.format.video.nFrameWidth,
def.format.video.nFrameHeight,
def.format.video.eColorFormat);
// Set up the native window.
OMX_U32 usage = 0;
err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage);
ALOGV("native_window_set_usage usage=0x%lx", usage);
err = native_window_set_usage(
mNativeWindow.get(), usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
if (err != 0) {
ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
return err;
}
err = native_window_set_buffer_count(
mNativeWindow.get(), def.nBufferCountActual);
if (err != 0) {
ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
-err);
return err;
}
// Dequeue buffers and send them to OMX
for (OMX_U32 i = 0; i < def.nBufferCountActual; i++) {
ANativeWindowBuffer* buf;
err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf);
if (err != 0) {
ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
break;
}
sp graphicBuffer(new GraphicBuffer(buf, false));
BufferInfo info;
info.mData = NULL;
info.mSize = def.nBufferSize;
info.mStatus = OWNED_BY_US;
info.mMem = NULL;
info.mMediaBuffer = new MediaBuffer(graphicBuffer);
info.mMediaBuffer->setObserver(this);
mPortBuffers[kPortIndexOutput].push(info);
IOMX::buffer_id bufferId;
err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
&bufferId);
if (err != 0) {
CODEC_LOGE("registering GraphicBuffer with OMX IL component "
"failed: %d", err);
break;
}
}
}
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
CODEC_LOGE("getParameter failed: %d", err);
return err;
}
err = native_window_set_buffers_geometry(
mNativeWindow.get(),
def.format.video.nFrameWidth,
def.format.video.nFrameHeight,
def.format.video.eColorFormat);
err = native_window_set_buffer_count(
mNativeWindow.get(), def.nBufferCountActual);
if (err != 0) {
ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
-err);
return err;
}
for (OMX_U32 i = 0; i < def.nBufferCountActual; i++) {
ANativeWindowBuffer* buf;
err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf);
if (err != 0) {
ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
break;
}
Step4: 新建graphicBuffer对象,设置此对象给OMX作为解码输出的buffer
// Dequeue buffers and send them to OMX
for (OMX_U32 i = 0; i < def.nBufferCountActual; i++) {
ANativeWindowBuffer* buf;
err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf);//请求SF完成buffer的申请
if (err != 0) {
ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
break;
}
sp graphicBuffer(new GraphicBuffer(buf, false));//新建了一个应用端的图形缓存区对象保存ANativeWindowBuffer
BufferInfo info;
info.mData = NULL;
info.mSize = def.nBufferSize;
info.mStatus = OWNED_BY_US;
info.mMem = NULL;
info.mMediaBuffer = new MediaBuffer(graphicBuffer);
info.mMediaBuffer->setObserver(this);
mPortBuffers[kPortIndexOutput].push(info);//加入到输入口
IOMX::buffer_id bufferId;
err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
&bufferId);//当前申请的图像buffer通知底层解码器作为输出buffer
在完成了Buf的申请后,需要控制底层的解码器将输出端口调整为当前的buf端口中故有了mOMX->useGraphicBuffer的操作,并将最终的buffer_id切换保存到mPortBuffers[kPortIndexOutput]中去。
status_t OMXNodeInstance::useGraphicBuffer(
OMX_U32 portIndex, const sp& graphicBuffer,
OMX::buffer_id *buffer) {
Mutex::Autolock autoLock(mLock);
OMX_STRING name = const_cast(
"OMX.google.android.index.useAndroidNativeBuffer");
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
BufferMeta *bufferMeta = new BufferMeta(graphicBuffer);
OMX_BUFFERHEADERTYPE *header;
OMX_VERSIONTYPE ver;
ver.s.nVersionMajor = 1;
ver.s.nVersionMinor = 0;
ver.s.nRevision = 0;
ver.s.nStep = 0;
UseAndroidNativeBufferParams params = {
sizeof(UseAndroidNativeBufferParams), ver, portIndex, bufferMeta,
&header, graphicBuffer,
};
err = OMX_SetParameter(mHandle, index, ¶ms); //设置参数给OMX
}
OMX_SetParameter实际为OMX的钩子函数,最终调用到了OMX里面的
SEC_OMX_VideoDecodeSetParameter---- SEC_OSAL_SetANBParameter--useAndroidNativeBuffer //这是HAL层的函数了。只有exynos平台的才有这个函数,高通平台的是其他形式的函数OMX_ERRORTYPE useAndroidNativeBuffer(
SEC_OMX_BASEPORT *pSECPort,
OMX_BUFFERHEADERTYPE **ppBufferHdr,
OMX_U32 nPortIndex,
OMX_PTR pAppPrivate,
OMX_U32 nSizeBytes,
OMX_U8 *pBuffer)
{
OMX_BUFFERHEADERTYPE *temp_bufferHeader = NULL;
for (i = 0; i < pSECPort->portDefinition.nBufferCountActual; i++) {
if (pSECPort->bufferStateAllocate[i] == BUFFER_STATE_FREE) {
pSECPort->bufferHeader[i] = temp_bufferHeader;
pSECPort->bufferStateAllocate[i] = (BUFFER_STATE_ASSIGNED | HEADER_STATE_ALLOCATED);
INIT_SET_SIZE_VERSION(temp_bufferHeader, OMX_BUFFERHEADERTYPE);
temp_bufferHeader->pBuffer = pBuffer;
*ppBufferHdr = temp_bufferHeader;
goto EXIT;
}
}
}
OMX_ERRORTYPE SEC_OMX_BufferProcess(OMX_HANDLETYPE hComponent)
{
while ((SEC_Check_BufferProcess_State(pSECComponent)) && (!pSECComponent->bExitBufferProcessThread)) {
SEC_OSAL_SleepMillisec(0);
SEC_OSAL_MutexLock(outputUseBuffer->bufferMutex);
if ((outputUseBuffer->dataValid != OMX_TRUE) &&
(!CHECK_PORT_BEING_FLUSHED(secOutputPort))) {
SEC_OSAL_MutexUnlock(outputUseBuffer->bufferMutex);
ret = SEC_OutputBufferGetQueue(pSECComponent);
if ((ret == OMX_ErrorUndefined) ||
(secInputPort->portState != OMX_StateIdle) ||
(secOutputPort->portState != OMX_StateIdle)) {
break;
}
} else {
SEC_OSAL_MutexUnlock(outputUseBuffer->bufferMutex);
}
if (pSECComponent->remainOutputData == OMX_FALSE) {
if (pSECComponent->reInputData == OMX_FALSE) {
SEC_OSAL_MutexLock(inputUseBuffer->bufferMutex);
if ((SEC_Preprocessor_InputData(pOMXComponent) == OMX_FALSE) &&
(!CHECK_PORT_BEING_FLUSHED(secInputPort))) {
SEC_OSAL_MutexUnlock(inputUseBuffer->bufferMutex);
ret = SEC_InputBufferGetQueue(pSECComponent);
break;
}
SEC_OSAL_MutexUnlock(inputUseBuffer->bufferMutex);
}
SEC_OSAL_MutexLock(inputUseBuffer->bufferMutex);
SEC_OSAL_MutexLock(outputUseBuffer->bufferMutex);
ret = pSECComponent->sec_mfc_bufferProcess(pOMXComponent, inputData, outputData);
}
}
OMX_ERRORTYPE SEC_MFC_H264_Decode_Nonblock(OMX_COMPONENTTYPE *pOMXComponent, SEC_OMX_DATA *pInputData, SEC_OMX_DATA *pOutputData)
{
void *pOutputBuf = (void *)pOutputData->dataBuffer;
void *pSrcBuf[3] = {NULL, };
void *pYUVBuf[3] = {NULL, };
if ((pSECOutputPort->bIsANBEnabled == OMX_TRUE) && (csc_method == CSC_METHOD_HW)) {
SEC_OSAL_GetPhysANB(pOutputData->dataBuffer, pYUVBuf);
pSrcBuf[0] = outputInfo.YPhyAddr;
pSrcBuf[1] = outputInfo.CPhyAddr;
}
csc_set_dst_buffer(
pVideoDec->csc_handle, /* handle */
pYUVBuf[0], /* y addr */
pYUVBuf[1], /* u addr or uv addr */
pYUVBuf[2], /* v addr or none */
0); /* ion fd */
csc_convert(pVideoDec->csc_handle);
}
typedef enum _SEC_OMX_COLOR_FORMATTYPE {
OMX_SEC_COLOR_FormatNV12TPhysicalAddress = 0x7F000001, /**< Reserved region for introducing Vendor Extensions */
OMX_SEC_COLOR_FormatNV12LPhysicalAddress = 0x7F000002,
OMX_SEC_COLOR_FormatNV12LVirtualAddress = 0x7F000003,
OMX_SEC_COLOR_FormatNV12Tiled = 0x7FC00002, /* 0x7FC00002 */
#ifdef S3D_SUPPORT
OMX_SEC_COLOR_FormatNV12Tiled_SBS_LR = 0x7FC00003, /* 0x7FC00003 */
OMX_SEC_COLOR_FormatNV12Tiled_SBS_RL = 0x7FC00004, /* 0x7FC00004 */
OMX_SEC_COLOR_FormatNV12Tiled_TB_LR = 0x7FC00005, /* 0x7FC00005 */
OMX_SEC_COLOR_FormatNV12Tiled_TB_RL = 0x7FC00006, /* 0x7FC00006 */
OMX_SEC_COLOR_FormatYUV420SemiPlanar_SBS_LR = 0x7FC00007, /* 0x7FC00007 */
OMX_SEC_COLOR_FormatYUV420SemiPlanar_SBS_RL = 0x7FC00008, /* 0x7FC00008 */
OMX_SEC_COLOR_FormatYUV420SemiPlanar_TB_LR = 0x7FC00009, /* 0x7FC00009 */
OMX_SEC_COLOR_FormatYUV420SemiPlanar_TB_RL = 0x7FC0000A, /* 0x7FC0000A */
OMX_SEC_COLOR_FormatYUV420Planar_SBS_LR = 0x7FC0000B, /* 0x7FC0000B */
OMX_SEC_COLOR_FormatYUV420Planar_SBS_RL = 0x7FC0000C, /* 0x7FC0000C */
OMX_SEC_COLOR_FormatYUV420Planar_TB_LR = 0x7FC0000D, /* 0x7FC0000D */
OMX_SEC_COLOR_FormatYUV420Planar_TB_RL = 0x7FC0000E, /* 0x7FC0000E */
#endif
OMX_SEC_COLOR_FormatNV21LPhysicalAddress = 0x7F000010,
OMX_SEC_COLOR_FormatNV21Linear = 0x7F000011,
/* for Android Native Window */
OMX_SEC_COLOR_FormatANBYUV420SemiPlanar = 0x100,
/* for Android SurfaceMediaSource*/
OMX_COLOR_FormatAndroidOpaque = 0x7F000789
}SEC_OMX_COLOR_FORMATTYPE;
virtual void render(MediaBuffer *buffer) {
ATRACE_CALL();
int64_t timeUs;
CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
native_window_set_buffers_timestamp(mNativeWindow.get(), timeUs * 1000);
status_t err = mNativeWindow->queueBuffer(
mNativeWindow.get(), buffer->graphicBuffer().get(), -1);//直接使用queuebuffer进行渲染显示
if (err != 0) {
ALOGE("queueBuffer failed with error %s (%d)", strerror(-err),
-err);
return;