AwesomePlayer 中会构造一个OMX的客户端OMXClient,变量定义如下:
OMXClient mClient;
让我们看看 OMXClient的结构:
class OMXClient {
public:
OMXClient();
status_t connect();
void disconnect();
sp<IOMX> interface() {
return mOMX;
}
private:
sp<IOMX> mOMX;
OMXClient(const OMXClient &);
OMXClient &operator=(const OMXClient &);
};
其中的IOMX 变量 mOMX 就是和OMX服务进行binder通讯的。
在 AwesomePlayer 的构造函数中会调用
CHECK_EQ(mClient.connect(), (status_t)OK);
status_t OMXClient::connect() {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.player"));
sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
CHECK(service.get() != NULL);
mOMX = service->getOMX();
CHECK(mOMX.get() != NULL);
if (!mOMX->livesLocally(NULL /* node */, getpid())) {
ALOGI("Using client-side OMX mux.");
mOMX = new MuxOMX(mOMX);
}
return OK;
}
sp<IOMX> MediaPlayerService::getOMX() {
Mutex::Autolock autoLock(mLock);
if (mOMX.get() == NULL) {
mOMX = new OMX;
}
return mOMX;
}
OMXClient::connect函数通过binder机制 获得到MediaPlayerService,然后通过MediaPlayerService来创建OMX的实例。这样OMXClient就获得到了OMX的入口,接下来就可以通过binder机制来获得OMX提供的服务。也就是说OMXClient 是android中 openmax 的入口。
在创建音视频解码mVideoSource、mAudioSource的时候会把OMXClient中的sp<IOMX> mOMX的实例 传给mVideoSource、mAudioSource来共享使用这个OMX的入口。
也就是说一个AwesomePlayer对应着 一个IOMX 变量,AwesomePlayer中的音视频解码器共用这个IOMX变量来获得OMX服务。
sp<IOMX> interface() {
return mOMX;
}
mAudioSource = OMXCodec::Create(
mClient.interface(), mAudioTrack->getFormat(),
false, // createEncoder
mAudioTrack);
mVideoSource = OMXCodec::Create(
mClient.interface(), mVideoTrack->getFormat(),
false, // createEncoder
mVideoTrack,
NULL, flags, USE_SURFACE_ALLOC ? mNativeWindow : NULL);
每个AwesomePlayer 只有一个OMX服务的入口,但是AwesomePlayer不一定就只需要1种解码器。有可能音视频都有,或者有很多种。这个时候这些解码器都需要OMX的服务,也就是OMX那头需要建立不同的解码器的组件来对应着AwesomePlayer中不同的code。OMX中非常重要的2个成员就是 OMXMaster 和 OMXNodeInstance。OMX通过这俩个成员来创建和维护不同的openmax 解码器组件,为AwesomePlayer中不同解码提供服务。让我们看看他们是怎么实现这些工作的。
1. OMX中 OMXNodeInstance 负责创建并维护不同的实例,这些实例是根据上面需求创建的,以node作为唯一标识。这样播放器中每个OMXCodec在OMX服务端都对应有了自己的OMXNodeInstance实例。
2.OMXMaster 维护底层软硬件解码库,根据OMXNodeInstance中想要的解码器来创建解码实体组件。
接下来我们假设视频解码器需要的是AVC,来看看解码器创建的流程。
(默认走软解码)
OMX构造函数中会进行初始化。
OMXMaster *mMaster;
OMX::OMX() : mMaster(new OMXMaster), mNodeCounter(0) { }
OMXMaster::OMXMaster() : mVendorLibHandle(NULL) { addVendorPlugin(); addPlugin(new SoftOMXPlugin); }OMXMaster 负责OMX中编解码器插件管理,软件解码和硬件解码都是 使用OMX标准,挂载plugins的方式来进行管理。
kComponents[] = { { "OMX.google.aac.decoder", "aacdec", "audio_decoder.aac" }, { "OMX.google.aac.encoder", "aacenc", "audio_encoder.aac" }, { "OMX.google.amrnb.decoder", "amrdec", "audio_decoder.amrnb" }, { "OMX.google.amrnb.encoder", "amrnbenc", "audio_encoder.amrnb" }, { "OMX.google.amrwb.decoder", "amrdec", "audio_decoder.amrwb" }, { "OMX.google.amrwb.encoder", "amrwbenc", "audio_encoder.amrwb" }, { "OMX.google.h264.decoder", "h264dec", "video_decoder.avc" }, { "OMX.google.h264.encoder", "h264enc", "video_encoder.avc" }, { "OMX.google.g711.alaw.decoder", "g711dec", "audio_decoder.g711alaw" }, { "OMX.google.g711.mlaw.decoder", "g711dec", "audio_decoder.g711mlaw" }, { "OMX.google.h263.decoder", "mpeg4dec", "video_decoder.h263" }, { "OMX.google.h263.encoder", "mpeg4enc", "video_encoder.h263" }, { "OMX.google.mpeg4.decoder", "mpeg4dec", "video_decoder.mpeg4" }, { "OMX.google.mpeg4.encoder", "mpeg4enc", "video_encoder.mpeg4" }, { "OMX.google.mp3.decoder", "mp3dec", "audio_decoder.mp3" }, { "OMX.google.vorbis.decoder", "vorbisdec", "audio_decoder.vorbis" }, { "OMX.google.vpx.decoder", "vpxdec", "video_decoder.vpx" }, { "OMX.google.raw.decoder", "rawdec", "audio_decoder.raw" }, { "OMX.google.flac.encoder", "flacenc", "audio_encoder.flac" }, };硬件编解码是通过 addVendorPlugin(); 加载 libstagefrighthw.so.各个芯片平台可以遵循openmax 标准,生成libstagefrighthw.so的库来提供android应用。
void OMXMaster::addVendorPlugin() { addPlugin("libstagefrighthw.so"); }
status_t AwesomePlayer::initVideoDecoder(uint32_t flags) { ATRACE_CALL(); mVideoSource = OMXCodec::Create( mClient.interface(), mVideoTrack->getFormat(), false, // createEncoder mVideoTrack, NULL, flags, USE_SURFACE_ALLOC ? mNativeWindow : NULL); status_t err = mVideoSource->start(); return mVideoSource != NULL ? OK : UNKNOWN_ERROR; }保留主要部分,去除编码相关
sp<MediaSource> OMXCodec::Create( const sp<IOMX> &omx, const sp<MetaData> &meta, bool createEncoder, const sp<MediaSource> &source, const char *matchComponentName, uint32_t flags, const sp<ANativeWindow> &nativeWindow) { int32_t requiresSecureBuffers; const char *mime; bool success = meta->findCString(kKeyMIMEType, &mime); CHECK(success); Vector<String8> matchingCodecs; Vector<uint32_t> matchingCodecQuirks; findMatchingCodecs( mime, createEncoder, matchComponentName, flags, &matchingCodecs, &matchingCodecQuirks); sp<OMXCodecObserver> observer = new OMXCodecObserver; IOMX::node_id node = 0; for (size_t i = 0; i < matchingCodecs.size(); ++i) { const char *componentNameBase = matchingCodecs[i].string(); uint32_t quirks = matchingCodecQuirks[i]; const char *componentName = componentNameBase; AString tmp; status_t err = omx->allocateNode(componentName, observer, &node); if (err == OK) { ALOGV("Successfully allocated OMX node '%s'", componentName); sp<OMXCodec> codec = new OMXCodec( omx, node, quirks, flags, createEncoder, mime, componentName, source, nativeWindow); observer->setCodec(codec); err = codec->configureCodec(meta); if (err == OK) { if (!strcmp("OMX.Nvidia.mpeg2v.decode", componentName)) { codec->mFlags |= kOnlySubmitOneInputBufferAtOneTime; } return codec; } ALOGV("Failed to configure codec '%s'", componentName); } } return NULL; }
1.根据mVideoTrack传进来的视频信息,查找相匹配的解码器。
bool success = meta->findCString(kKeyMIMEType, &mime); findMatchingCodecs( mime, createEncoder, matchComponentName, flags, &matchingCodecs, &matchingCodecQuirks);
sp<OMXCodecObserver> observer = new OMXCodecObserver; IOMX::node_id node = 0;
status_t err = omx->allocateNode(componentName, observer, &node);这个allocateNode 就是文章最开始讲的,在OMX那头创建一个和mVideoSource相匹配的解码实例。用node值作为唯一标识。
status_t OMX::allocateNode(const char *name, const sp<IOMXObserver> &observer, node_id *node) { Mutex::Autolock autoLock(mLock); *node = 0; OMXNodeInstance *instance = new OMXNodeInstance(this, observer); OMX_COMPONENTTYPE *handle; OMX_ERRORTYPE err = mMaster->makeComponentInstance( name, &OMXNodeInstance::kCallbacks, instance, &handle); if (err != OMX_ErrorNone) { ALOGV("FAILED to allocate omx component '%s'", name); instance->onGetHandleFailed(); return UNKNOWN_ERROR; } *node = makeNodeID(instance); mDispatchers.add(*node, new CallbackDispatcher(instance)); instance->setHandle(*node, handle); mLiveNodes.add(observer->asBinder(), instance); observer->asBinder()->linkToDeath(this); return OK; }
OMX_ERRORTYPE OMXMaster::makeComponentInstance( const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, OMX_COMPONENTTYPE **component) { Mutex::Autolock autoLock(mLock); *component = NULL; ssize_t index = mPluginByComponentName.indexOfKey(String8(name)); if (index < 0) { return OMX_ErrorInvalidComponentName; } OMXPluginBase *plugin = mPluginByComponentName.valueAt(index); OMX_ERRORTYPE err = plugin->makeComponentInstance(name, callbacks, appData, component); if (err != OMX_ErrorNone) { return err; } mPluginByInstance.add(*component, plugin); return err; }
OMX_ERRORTYPE SoftOMXPlugin::makeComponentInstance( const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, OMX_COMPONENTTYPE **component) { ALOGV("makeComponentInstance '%s'", name); for (size_t i = 0; i < kNumComponents; ++i) { if (strcmp(name, kComponents[i].mName)) { continue; } AString libName = "libstagefright_soft_"; libName.append(kComponents[i].mLibNameSuffix); libName.append(".so"); void *libHandle = dlopen(libName.c_str(), RTLD_NOW); if (libHandle == NULL) { ALOGE("unable to dlopen %s", libName.c_str()); return OMX_ErrorComponentNotFound; } typedef SoftOMXComponent *(*CreateSoftOMXComponentFunc)( const char *, const OMX_CALLBACKTYPE *,OMX_PTR, OMX_COMPONENTTYPE **); CreateSoftOMXComponentFunc createSoftOMXComponent = (CreateSoftOMXComponentFunc)dlsym( libHandle, "_Z22createSoftOMXComponentPKcPK16OMX_CALLBACKTYPE" "PvPP17OMX_COMPONENTTYPE"); if (createSoftOMXComponent == NULL) { dlclose(libHandle); libHandle = NULL; return OMX_ErrorComponentNotFound; } sp<SoftOMXComponent> codec = (*createSoftOMXComponent)(name, callbacks, appData, component); if (codec == NULL) { dlclose(libHandle); libHandle = NULL; return OMX_ErrorInsufficientResources; } OMX_ERRORTYPE err = codec->initCheck(); if (err != OMX_ErrorNone) { dlclose(libHandle); libHandle = NULL; return err; } codec->incStrong(this); codec->setLibHandle(libHandle); return OMX_ErrorNone; } return OMX_ErrorInvalidComponentName; }通过上面传下来的解码器的name,找到对应库的名字。假如是264的话,要加载的库就是 libstagefright_soft_h264dec.so,也就是对应上层264解码的话,omx解码组件会加载对应的 libstagefright_soft_ h264dec.so库。相对应的软解代码在 Android4.1.1\frameworks\av\media\libstagefright\codecs\on2\h264dec 中。
android::SoftOMXComponent *createSoftOMXComponent( const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, OMX_COMPONENTTYPE **component) { return new android::SoftAVC(name, callbacks, appData, component); }