status_t MediaPlayer::setDataSource(
const char *url, const KeyedVector *headers)
{
ALOGV("setDataSource(%s)", url);
status_t err = BAD_VALUE;
if (url != NULL) {
const sp& service(getMediaPlayerService());
if (service != 0) {
sp player(service->create(this, mAudioSessionId));
if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
(NO_ERROR != player->setDataSource(url, headers))) {
player.clear();
}
err = attachNewPlayer(player);
}
}
return err;
}
status_t AwesomePlayer::setDataSource_l(const sp &extractor) {…
if (!haveVideo && !strncasecmp(mime.string(), "video/", 6)) {
setVideoSource(extractor->getTrack(i));}
else if (!haveAudio && !strncasecmp(mime.string(), "audio/", 6)) {
setAudioSource(extractor->getTrack(i));
.....
}
}
void AwesomePlayer::setVideoSource(sp source) {
CHECK(source != NULL);
mVideoTrack = source;
}
void AwesomePlayer::setAudioSource(sp source) {
CHECK(source != NULL);
mAudioTrack = source;
}
awesomeplayer初始化的时候,在prepare中获得了mVideoSourc,mAudioSource。
会调用到下面的prepareAsync_l
status_t AwesomePlayer::prepareAsync_l() {
if (!mQueueStarted) {
mQueue.start();
mQueueStarted = true;
}
modifyFlags(PREPARING, SET);
mAsyncPrepareEvent = new AwesomeEvent(
this, &AwesomePlayer::onPrepareAsyncEvent);
mQueue.postEvent(mAsyncPrepareEvent);
return OK;
}
mAsyncPrepareEvent被触发的时候,调用onPrepareAsyncEvent函数,继而调用beginPrepareAsync_l。
void AwesomePlayer::beginPrepareAsync_l() {
if (mVideoTrack != NULL && mVideoSource == NULL) {
status_t err = initVideoDecoder();
if (err != OK) {
abortPrepare(err);
return;
}
}
if (mAudioTrack != NULL && mAudioSource == NULL) {
status_t err = initAudioDecoder();
if (err != OK) {
abortPrepare(err);
return;
}
}
}
status_t AwesomePlayer::initVideoDecoder(uint32_t flags) {
.....
mVideoSource = OMXCodec::Create(
mClient.interface(), mVideoTrack->getFormat(),
false, // createEncoder
mVideoTrack,
NULL, flags, USE_SURFACE_ALLOC ? mNativeWindow : NULL);
.....
}
status_t AwesomePlayer::initVideoDecoder(uint32_t flags) {
....
mAudioSource = OMXCodec::Create(
mClient.interface(), mAudioTrack->getFormat(),
false, // createEncoder
mAudioTrack);
....
}
android系统中用openmax来做编解码,所以android向上抽象了一层OMXCodec,提供给上层播放器AwesomePlayer用。
类定义是:
class OMXClient {
public:
OMXClient();
status_t connect();
void disconnect();
sp interface() {
return mOMX;
}
private:
sp mOMX;
OMXClient(const OMXClient &);
OMXClient &operator=(const OMXClient &);
};
OMXClient 有个IOMX 的变量 mOMX ,这个就是和OMX服务进行binder通讯的。
status_t OMXClient::connect() {
sp sm = defaultServiceManager();
sp binder = sm->getService(String16("media.player"));
sp service = interface_cast(binder);
CHECK(service.get() != NULL);
mOMX = service->getOMX();
CHECK(mOMX.get() != NULL);
if (!mOMX->livesLocally(NULL /* node */, getpid())) {
ALOGI("Using client-side OMX mux.");
mOMX = new MuxOMX(mOMX);
}
return OK;
}
sp MediaPlayerService::getOMX() {
Mutex::Autolock autoLock(mLock);
if (mOMX.get() == NULL) {
mOMX = new OMX; //实例化一个OMX
}
return mOMX;
}
OMXClient::connect函数是通过binder机制 获得到MediaPlayerService,然后通过MediaPlayerService来创建OMX的实例。
也就是说一个AwesomePlayer对应着 一个IOMX 变量,AwesomePlayer中的音视频解码器共用这个IOMX变量来获得OMX服务。
sp interface() {
return mOMX;
}
status_t AwesomePlayer::initVideoDecoder(uint32_t flags) {
…………
mVideoSource = OMXCodec::Create(
mClient.interface(), mVideoTrack->getFormat(),
false, // createEncoder
mVideoTrack,
NULL, flags, USE_SURFACE_ALLOC ? mNativeWindow : NULL);
…………..
status_t err = mVideoSource->start();
}
其中重点是OMXCodec::Create函数
sp OMXCodec::Create(
const sp &omx,
const sp &meta, bool createEncoder,
const sp &source,
const char *matchComponentName,
uint32_t flags,
const sp &nativeWindow) {
int32_t requiresSecureBuffers;
const char *mime;
bool success = meta->findCString(kKeyMIMEType, &mime);
CHECK(success);
Vector matchingCodecs;
Vector matchingCodecQuirks;
findMatchingCodecs(
mime, createEncoder, matchComponentName, flags,
&matchingCodecs, &matchingCodecQuirks);
//根据mVideoTrack传进来的视频格式信息,查找相匹配的解码器。
sp observer = new OMXCodecObserver;
IOMX::node_id node = 0;
//创建OMXCodecObserver 实例,OMXCodecObserver功能后续会详细介绍。
for (size_t i = 0; i < matchingCodecs.size(); ++i) {
const char *componentNameBase = matchingCodecs[i].string();
uint32_t quirks = matchingCodecQuirks[i];
const char *componentName = componentNameBase;
AString tmp;
status_t err = omx->allocateNode(componentName, observer, &node);
//通过omx入口 依靠binder 机制调用OMX服务中的allocateNode(),这一步把匹配得到的解码器组件名、OMXCodecObserver实例和初始化为0的node一并传入。
//这一步实际上是分配一个对解码器的控制结点。后续对解码器的操作,都依靠这个node
if (err == OK) {
ALOGV("Successfully allocated OMX node '%s'", componentName);
sp codec = new OMXCodec(
omx, node, quirks, flags,
createEncoder, mime, componentName,
source, nativeWindow);
observer->setCodec(codec);
err = codec->configureCodec(meta);
//配置,以及初始化解码器
if (err == OK) {
if (!strcmp("OMX.Nvidia.mpeg2v.decode", componentName)) {
codec->mFlags |= kOnlySubmitOneInputBufferAtOneTime;
}
return codec;
}
ALOGV("Failed to configure codec '%s'", componentName);
}
}
return NULL;
}
每个AwesomePlayer实例 只有一个OMX服务的入口,但是AwesomePlayer不一定就只需要1种解码器。音视频都要有,部分场景下还有多路音频,或者多路视频。
OMX构造函数中会进行初始化。
OMXMaster *mMaster;
OMX::OMX()
: mMaster(new OMXMaster),
mNodeCounter(0) {
}
OMXMaster::OMXMaster()
: mVendorLibHandle(NULL) {
addVendorPlugin();
addPlugin(new SoftOMXPlugin);
}
void OMXMaster::addVendorPlugin() {
addPlugin("libstagefrighthw.so");
}
void OMXMaster::addPlugin(const char *libname) {
mVendorLibHandle = dlopen(libname, RTLD_NOW);
…………………………….
if (createOMXPlugin) {
addPlugin((*createOMXPlugin)());-----创建OMXPlugin,并添加进我们的列表里
}
}
OMXPluginBase *createOMXPlugin() {
return new ExynosOMXPlugin;
}
ExynosOMXPlugin::ExynosOMXPlugin()
: mLibHandle(dlopen("libExynosOMX_Core.so", RTLD_NOW)),
mInit(NULL),
mDeinit(NULL),
mComponentNameEnum(NULL),
mGetHandle(NULL),
mFreeHandle(NULL),
mGetRolesOfComponentHandle(NULL) {
if (mLibHandle != NULL) {
mInit = (InitFunc)dlsym(mLibHandle, "Exynos_OMX_Init");
mDeinit = (DeinitFunc)dlsym(mLibHandle, "Exynos_OMX_Deinit");
mComponentNameEnum =
(ComponentNameEnumFunc)dlsym(mLibHandle, "Exynos_OMX_ComponentNameEnum");
mGetHandle = (GetHandleFunc)dlsym(mLibHandle, "Exynos_OMX_GetHandle");
mFreeHandle = (FreeHandleFunc)dlsym(mLibHandle, "Exynos_OMX_FreeHandle");
mGetRolesOfComponentHandle =
(GetRolesOfComponentFunc)dlsym(
mLibHandle, "Exynos_OMX_GetRolesOfComponent");
(*mInit)();
}
}
这样,就可以使用exynos 处理器的解码器了。
status_t OMX::allocateNode(
const char *name, const sp &observer, node_id *node) {
Mutex::Autolock autoLock(mLock);
*node = 0;
OMXNodeInstance *instance = new OMXNodeInstance(this, observer);
OMX_COMPONENTTYPE *handle;
OMX_ERRORTYPE err = mMaster->makeComponentInstance(
name, &OMXNodeInstance::kCallbacks,
instance, &handle);
if (err != OMX_ErrorNone) {
ALOGV("FAILED to allocate omx component '%s'", name);
instance->onGetHandleFailed();
return UNKNOWN_ERROR;
}
*node = makeNodeID(instance);
mDispatchers.add(*node, new CallbackDispatcher(instance));
instance->setHandle(*node, handle);
mLiveNodes.add(observer->asBinder(), instance);
observer->asBinder()->linkToDeath(this);
return OK;
}
OMX_ERRORTYPE OMXMaster::makeComponentInstance(
const char *name,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component) {
Mutex::Autolock autoLock(mLock);
*component = NULL;
ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
//根据我们在media_codec.xml的解码器名字,在插件列表找到其索引
源码路径中,./device/samsung/exynos4412/media_profiles.xml,里面定义了解码器的类型,full build 后会保存在“/etc/media_codecs.xml”,
OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
//根据索引找到ExynosOMXPlugin
OMX_ERRORTYPE err =
plugin->makeComponentInstance(name, callbacks, appData, component);
mPluginByInstance.add(*component, plugin);
return err;
}
OMX_ERRORTYPE ExynosOMXPlugin::makeComponentInstance(
const char *name,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component) {
if (mLibHandle == NULL) {
return OMX_ErrorUndefined;
}
return (*mGetHandle)(
reinterpret_cast(component),
const_cast(name),
appData, const_cast(callbacks));
}
struct OMXNodeInstance {
OMXNodeInstance(
OMX *owner, const sp &observer);
void setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle);
status_t freeNode(OMXMaster *master);
status_t sendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param);
status_t getParameter(OMX_INDEXTYPE index, void *params, size_t size);
status_t setParameter(OMX_INDEXTYPE index, const void *params, size_t size);
status_t getConfig(OMX_INDEXTYPE index, void *params, size_t size);
status_t setConfig(OMX_INDEXTYPE index, const void *params, size_t size);
status_t getState(OMX_STATETYPE* state);
status_t enableGraphicBuffers(OMX_U32 portIndex, OMX_BOOL enable);
status_t getGraphicBufferUsage(OMX_U32 portIndex, OMX_U32* usage);
status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable);
status_t prepareForAdaptivePlayback(
OMX_U32 portIndex, OMX_BOOL enable,
OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight);
status_t useBuffer(
OMX_U32 portIndex, const sp ¶ms,
OMX::buffer_id *buffer);//Client通过此函数将已分配好的Buffer传给Component,让其使用。
status_t useGraphicBuffer(
OMX_U32 portIndex, const sp &graphicBuffer,
OMX::buffer_id *buffer);
status_t updateGraphicBufferInMeta(
OMX_U32 portIndex, const sp &graphicBuffer,
OMX::buffer_id buffer);
status_t createInputSurface(
OMX_U32 portIndex, sp *bufferProducer);
status_t signalEndOfInputStream();
status_t allocateBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data);//Client通过调用此函数让Component分配Buffer。
status_t allocateBufferWithBackup(
OMX_U32 portIndex, const sp ¶ms,
OMX::buffer_id *buffer);
status_t freeBuffer(OMX_U32 portIndex, OMX::buffer_id buffer);//Client通过调用此函数让Component释放allocateBuffer()分配的Buffer。
status_t fillBuffer(OMX::buffer_id buffer);//Client通过调用此函数传递空的Buffer给Component,让其将处理好的数据填入其中。此函数会调用OMX标准接口OMX_FillThisBuffer()。
status_t emptyBuffer(
OMX::buffer_id buffer,
OMX_U32 rangeOffset, OMX_U32 rangeLength,
OMX_U32 flags, OMX_TICKS timestamp);//Client通过调用此函数传递输入Buffer给Component,让其读取其中的数据进行编解码等处理。此函数会调用OMX标准接口OMX_
status_t emptyDirectBuffer(
OMX_BUFFERHEADERTYPE *header,
OMX_U32 rangeOffset, OMX_U32 rangeLength,
OMX_U32 flags, OMX_TICKS timestamp);
status_t getExtensionIndex(
const char *parameterName, OMX_INDEXTYPE *index);
status_t setInternalOption(
OMX_U32 portIndex,
IOMX::InternalOptionType type,
const void *data,
size_t size);
void onMessage(const omx_message &msg);
void onObserverDied(OMXMaster *master);
void onGetHandleFailed();
void onEvent(OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2);
static OMX_CALLBACKTYPE kCallbacks;
status_t useGraphicBuffer2_l(
OMX_U32 portIndex, const sp &graphicBuffer,
OMX::buffer_id *buffer);
static OMX_ERRORTYPE OnEvent(
OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_EVENTTYPE eEvent,
OMX_IN OMX_U32 nData1,
OMX_IN OMX_U32 nData2,
OMX_IN OMX_PTR pEventData);
static OMX_ERRORTYPE OnEmptyBufferDone(
OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);// Component完成对输入buffer的读取后,调用此回调函数,向Client发送EmptyBufferDone消息。
static OMX_ERRORTYPE OnFillBufferDone(
OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);//Component完成相应处理将输出数据填入输出Buffer后,调用此回调函数,向Client发送FillBufferDone消息。
status_t storeMetaDataInBuffers_l(OMX_U32 portIndex, OMX_BOOL enable);
sp getGraphicBufferSource();
void setGraphicBufferSource(const sp& bufferSource);
OMXNodeInstance(const OMXNodeInstance &);
OMXNodeInstance &operator=(const OMXNodeInstance &);
};
这些方法执行时,都是先通过findInstance在mNodeIDToInstance列表中找到对应的NodeInstance,然后调用NodeInstance对应的方法。
OMX_CALLBACKTYPE OMXNodeInstance::kCallbacks = {
&OnEvent, &OnEmptyBufferDone, &OnFillBufferDone
};
它把三个OMXNodeInstance类的静态方法注册给了kCallbacks。
OMX_ERRORTYPE err = mMaster->makeComponentInstance(
name, &OMXNodeInstance::kCallbacks,
instance, &handle);
事件处理函数传给了组件ComponentInstance。也就是传给了具体芯片平台相关的OMX IL 层。
OMX_ERRORTYPE OMXNodeInstance::OnEmptyBufferDone
OMX_ERRORTYPE OMXNodeInstance::OnFillBufferDone
OMX_ERRORTYPE OMXNodeInstance::OnEvent
而这几个函数又会去调用OMX中对应的函数,也就是下面这三个:
OMX_ERRORTYPE OMX::OnEmptyBufferDone
OMX_ERRORTYPE OMX::OnFillBufferDone
OMX_ERRORTYPE OMX::OnEvent
总结一下, 这几个方法都采用相同的路子:
bool OMX::CallbackDispatcher::loop() {
for (;;) {
omx_message msg;
{
Mutex::Autolock autoLock(mLock);
while (!mDone && mQueue.empty()) {
mQueueChanged.wait(mLock);
}
if (mDone) {
break;
}
msg = *mQueue.begin();
mQueue.erase(mQueue.begin());
}
dispatch(msg);
}
return false;
}