camera的callback是在cameraclient中的initialize接口中注册的,而这个接口是由上层初始化调用的。
status_t CameraClient::initialize(camera_module_t *module) {
int callingPid = getCallingPid();
status_t res;
LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId);
// Verify ops permissions
res = startCameraOps();
if (res != OK) {
return res;
}
char camera_device_name[10];
snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);
mHardware = new CameraHardwareInterface(camera_device_name);
res = mHardware->initialize(&module->common);
if (res != OK) {
ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
mHardware.clear();
return NO_INIT;
}
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
(void *)mCameraId);
// Enable zoom, error, focus, and metadata messages by default
enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS |
CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE);
LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId);
return OK;
}
res = mHardware->initialize(&module->common);
调用接口头文件作为和HAL通信的桥梁,其实就是一层接口封装:
CameraHardwareInterface.h
status_t initialize(hw_module_t *module)
{
ALOGI("Opening camera %s", mName.string());
int rc = module->methods->open(module, mName.string(),
(hw_device_t **)&mDevice);
if (rc != OK) {
ALOGE("Could not open camera %s: %d", mName.string(), rc);
return rc;
}
initHalPreviewWindow();
return rc;
}
在这边调用open函数打开camera。这边的open在HAL已经注册。
static int camera_device_open(const hw_module_t* module, const char* name,
hw_device_t** device);
static struct hw_module_methods_t camera_module_methods = {
open: camera_device_open
};
所以当上层传的id 也就是open的第一个参数module能够与cameraHAL匹配,就会调用到cameraHAL层的接口:camera_device_open
而这个函数camera_device_open 起着关键的作用,初始化注册camera相关的接口,让HAL层的接口与Framework层CamaraClient 的接口一一对应。
int camera_device_open(const hw_module_t* module, const char* name,
hw_device_t** device)
{
int rv = 0;
int cameraid;
rk_camera_device_t* camera_device = NULL;
camera_device_ops_t* camera_ops = NULL;
android::CameraHal* camera = NULL;
android::Mutex::Autolock lock(gCameraHalDeviceLock);
LOGI("camera_device open : name = %s", name);
if (name != NULL) {
cameraid = atoi(name);
if(cameraid > gCamerasNumber) {
LOGE("camera service provided cameraid out of bounds, "
"cameraid = %d, num supported = %d",
cameraid, gCamerasNumber);
rv = -EINVAL;
goto fail;
}
if(gCamerasOpen >= CAMERAS_SUPPORTED_SIMUL_MAX) {
LOGE("maximum number(%d) of cameras already open",gCamerasOpen);
rv = -ENOMEM;
goto fail;
}
camera_device = (rk_camera_device_t*)malloc(sizeof(*camera_device));
if(!camera_device) {
LOGE("camera_device allocation fail");
rv = -ENOMEM;
goto fail;
}
camera_ops = (camera_device_ops_t*)malloc(sizeof(*camera_ops));
if(!camera_ops) {
LOGE("camera_ops allocation fail");
rv = -ENOMEM;
goto fail;
}
memset(camera_device, 0, sizeof(*camera_device));
memset(camera_ops, 0, sizeof(*camera_ops));
camera_device->base.common.tag = HARDWARE_DEVICE_TAG;
camera_device->base.common.version = 0;
camera_device->base.common.module = (hw_module_t *)(module);
camera_device->base.common.close = camera_device_close;
camera_device->base.ops = camera_ops;
camera_ops->set_preview_window = camera_set_preview_window;
camera_ops->set_callbacks = camera_set_callbacks;
camera_ops->enable_msg_type = camera_enable_msg_type;
camera_ops->disable_msg_type = camera_disable_msg_type;
camera_ops->msg_type_enabled = camera_msg_type_enabled;
camera_ops->start_preview = camera_start_preview;
camera_ops->stop_preview = camera_stop_preview;
camera_ops->preview_enabled = camera_preview_enabled;
camera_ops->store_meta_data_in_buffers = camera_store_meta_data_in_buffers;
camera_ops->start_recording = camera_start_recording;
camera_ops->stop_recording = camera_stop_recording;
camera_ops->recording_enabled = camera_recording_enabled;
camera_ops->release_recording_frame = camera_release_recording_frame;
camera_ops->auto_focus = camera_auto_focus;
camera_ops->cancel_auto_focus = camera_cancel_auto_focus;
camera_ops->take_picture = camera_take_picture;
camera_ops->cancel_picture = camera_cancel_picture;
camera_ops->set_parameters = camera_set_parameters;
camera_ops->get_parameters = camera_get_parameters;
camera_ops->put_parameters = camera_put_parameters;
camera_ops->send_command = camera_send_command;
camera_ops->release = camera_release;
camera_ops->dump = camera_dump;
*device = &camera_device->base.common;
// -------- RockChip specific stuff --------
camera_device->cameraid = cameraid;
camera = new android::CameraHal(cameraid);
if(!camera) {
LOGE("Couldn't create instance of CameraHal class");
rv = -ENOMEM;
goto fail;
}
gCameraHals[cameraid] = camera;
gCamerasOpen++;
}
return rv;
fail:
if(camera_device) {
free(camera_device);
camera_device = NULL;
}
if(camera_ops) {
free(camera_ops);
camera_ops = NULL;
}
if(camera) {
delete camera;
camera = NULL;
}
*device = NULL;
return rv;
}
这个open的操作也把CameraHal类初始化了,一系列线程都起来处于loop状态了。
这里继续看 CameraClient的initialize
status_t CameraClient::initialize(camera_module_t *module) {
int callingPid = getCallingPid();
status_t res;
LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId);
// Verify ops permissions
res = startCameraOps();
if (res != OK) {
return res;
}
char camera_device_name[10];
snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);
mHardware = new CameraHardwareInterface(camera_device_name);
res = mHardware->initialize(&module->common);
if (res != OK) {
ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
mHardware.clear();
return NO_INIT;
}
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
(void *)mCameraId);
// Enable zoom, error, focus, and metadata messages by default
enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS |
CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE);
LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId);
return OK;
}
callback的注册也是在这边初始化的时候就注册好了:
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
(void *)mCameraId);
对应下层也在接口层文件里:
void setCallbacks(notify_callback notify_cb,
data_callback data_cb,
data_callback_timestamp data_cb_timestamp,
void* user)
{
mNotifyCb = notify_cb;
mDataCb = data_cb;
mDataCbTimestamp = data_cb_timestamp;
mCbUser = user;
ALOGV("%s(%s)", __FUNCTION__, mName.string());
if (mDevice->ops->set_callbacks) {
mDevice->ops->set_callbacks(mDevice,
__notify_cb,
__data_cb,
__data_cb_timestamp,
__get_memory,
this);
}
}
主要的内容是把上层传下来的值进行赋值到本地变量中,另外最终要的就是判断set_callbacks这个接口是否被初始化,这个接口在open camera的时候就被初始化了:
camera_ops->set_callbacks = camera_set_callbacks;
这里 mDevice->ops->set_callbacks和camera_ops->set_callbacks指的是同一个结构体里面的接口。认真留意一下,这边的封装setcallbacks加了一个函数指针__get_memory,他的实现也是在这个文件里CameraHardwareInterfece.h里面。这里留意一下即可,会关系到后面camera的内存申请过程。记住这个回调是在这里注册的即可。
接下来就是真正的注册callback:
static void __data_cb(int32_t msg_type,
const camera_memory_t *data, unsigned int index,
camera_frame_metadata_t *metadata,
void *user)
{
ALOGV("%s", __FUNCTION__);
CameraHardwareInterface *__this =
static_cast(user);
sp mem(static_cast(data->handle));
if (index >= mem->mNumBufs) {
ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
index, mem->mNumBufs);
return;
}
__this->mDataCb(msg_type, mem->mBuffers[index], metadata, __this->mCbUser);
}
发现他的原理就是把刚才存到本地的callback接口包装了一下,不过这里大有文章,底层callback的数据类型是camera_memory_t,在这一层被转换成mem->mBuffers[index]类型也就是IMemory的类型跟在CameraClient类中的callback 参数类型一致,可以理解为适配吧。
这边的操作是把data->handle这个句柄用c++里面的强制转换成CameraHeapMemory类这边在另外一篇文章会详细说明camera的整体数据流方向和内存管理方式。
到这里camera 初始化完成。
继续看camerahal层setcallbacks的实现:
void CameraHal::setCallbacks(camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void *user)
{
LOG_FUNCTION_NAME
Mutex::Autolock lock(mLock);
mEventNotifier->setCallbacks(notify_cb, data_cb,data_cb_timestamp,get_memory,user);
LOG_FUNCTION_NAME_EXIT
}
会调用到:
void AppMsgNotifier::setCallbacks(camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void *user)
{
LOG_FUNCTION_NAME
mNotifyCb = notify_cb;
mDataCb = data_cb;
mDataCbTimestamp = data_cb_timestamp;
mRequestMemory = get_memory;
mCallbackCookie = user;
LOG_FUNCTION_NAME_EXIT
}
到这里成功注册到hal层里的函数指针上。留意这个初始化:
mRequestMemory = get_memory;这是申请memmory的过程。
这是注册过程,反过来看一下回调过程:
mDataCb(CAMERA_MSG_PREVIEW_FRAME, tmpPreviewMemory, 0,NULL,mCallbackCookie);
上面是camerahal层的回调调用。会调到cameraclient层的datacallback不过中间有一个隐式的转换过程:
他先回回调到camerahardwareinterface中的callback:
static void __data_cb(int32_t msg_type,
const camera_memory_t *data, unsigned int index,
camera_frame_metadata_t *metadata,
void *user)
{
ALOGV("%s", __FUNCTION__);
CameraHardwareInterface *__this =
static_cast(user);
sp mem(static_cast(data->handle));
if (index >= mem->mNumBufs) {
ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
index, mem->mNumBufs);
return;
}
__this->mDataCb(msg_type, mem->mBuffers[index], metadata, __this->mCbUser);
}
上面的代码把hal层camera_memory_t类型的数据转换成CameraHeapMemory的类型,最后回调到上层的是IMemory类型的mem->mBuffers[index];接下来是回调到cameraclient的dataCallback:
void CameraClient::dataCallback(int32_t msgType,
const sp& dataPtr, camera_frame_metadata_t *metadata, void* user) {
LOG2("dataCallback(%d)", msgType);
Mutex* lock = getClientLockFromCookie(user);
if (lock == NULL) return;
Mutex::Autolock alock(*lock);
CameraClient* client =
static_cast(getClientFromCookie(user));
if (client == NULL) return;
if (!client->lockIfMessageWanted(msgType)) return;
if (dataPtr == 0 && metadata == NULL) {
ALOGE("Null data returned in data callback");
client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
return;
}
switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) {
case CAMERA_MSG_PREVIEW_FRAME:
client->handlePreviewData(msgType, dataPtr, metadata);
break;
case CAMERA_MSG_POSTVIEW_FRAME:
client->handlePostview(dataPtr);
break;
case CAMERA_MSG_RAW_IMAGE:
client->handleRawPicture(dataPtr);
break;
case CAMERA_MSG_COMPRESSED_IMAGE:
client->handleCompressedPicture(dataPtr);
break;
default:
client->handleGenericData(msgType, dataPtr, metadata);
break;
}
}
最开始client对象的获取要考虑到安全性:源代码中是这么解释的。
// Provide client pointer for callbacks. Client lock returned from getClientLockFromCookie should
be acquired for this to be safe
这里会继续调用到:
case CAMERA_MSG_PREVIEW_FRAME:
client->handlePreviewData(msgType, dataPtr, metadata);
具体实现:
void CameraClient::handlePreviewData(int32_t msgType,
const sp& mem,
camera_frame_metadata_t *metadata) {
ssize_t offset;
size_t size;
sp heap = mem->getMemory(&offset, &size);
// local copy of the callback flags
int flags = mPreviewCallbackFlag;
// is callback enabled?
if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) {
// If the enable bit is off, the copy-out and one-shot bits are ignored
LOG2("frame callback is disabled");
mLock.unlock();
return;
}
// hold a strong pointer to the client
sp c = mRemoteCallback;
// clear callback flags if no client or one-shot mode
if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) {
LOG2("Disable preview callback");
mPreviewCallbackFlag &= ~(CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK |
CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK |
CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK);
disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
}
if (c != 0) {
// Is the received frame copied out or not?
if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) {
LOG2("frame is copied");
copyFrameAndPostCopiedFrame(msgType, c, heap, offset, size, metadata);
} else {
LOG2("frame is forwarded");
mLock.unlock();
c->dataCallback(msgType, mem, metadata);
}
} else {
mLock.unlock();
}
}
这里会对data数据进行处理:
sp heap = mem->getMemory(&offset, &size);
其实传到上层的一整个过程基本就是拆了再装,装了再拆,具体细节会在camera memory篇章深入分析。
继续调用datacallback会通过binder机制调用上层的接口。此时的客户端是libcameraservice,服务端是camera client。与callback相关的binder类:
BpCameraClient,BnCameraClient,ICameraClient
各自的角色这里就不在赘述。
此时到上层的BnCameraClient看,
case DATA_CALLBACK: {
ALOGV("DATA_CALLBACK");
CHECK_INTERFACE(ICameraClient, data, reply);
int32_t msgType = data.readInt32();
sp<IMemory> imageData = interface_cast<IMemory>(data.readStrongBinder());
camera_frame_metadata_t *metadata = NULL;
if (data.dataAvail() > 0) {
metadata = new camera_frame_metadata_t;
metadata->number_of_faces = data.readInt32();
metadata->faces = (camera_face_t *) data.readInplace(
sizeof(camera_face_t) * metadata->number_of_faces);
}
dataCallback(msgType, imageData, metadata);
if (metadata) delete metadata;
return NO_ERROR;
} break;
首先是通过binder端口获取data数据:
sp imageData = interface_cast(data.readStrongBinder());
然后调用dataCallback
在这种CS模式下,上层的Camera继承了BnCameraClient,所以相关的实现都在Camer类中:
// callback from camera service when frame or image is ready
void Camera::dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
sp<CameraListener> listener;
{
Mutex::Autolock _l(mLock);
listener = mListener;
}
if (listener != NULL) {
listener->postData(msgType, dataPtr, metadata);
}
}
我们先看下这个监听的实例是在什么时候初始化的,想象一下不难知道应该是在camera初始化的时候:
// connect to camera service
static void android_hardware_Camera_native_setup(JNIEnv *env, jobject thiz,
jobject weak_this, jint cameraId, jstring clientPackageName)
{
// Convert jstring to String16
const char16_t *rawClientName = env->GetStringChars(clientPackageName, NULL);
jsize rawClientNameLen = env->GetStringLength(clientPackageName);
String16 clientName(rawClientName, rawClientNameLen);
env->ReleaseStringChars(clientPackageName, rawClientName);
sp<Camera> camera = Camera::connect(cameraId, clientName,
Camera::USE_CALLING_UID);
if (camera == NULL) {
jniThrowRuntimeException(env, "Fail to connect to camera service");
return;
}
// make sure camera hardware is alive
if (camera->getStatus() != NO_ERROR) {
jniThrowRuntimeException(env, "Camera initialization failed");
return;
}
jclass clazz = env->GetObjectClass(thiz);
if (clazz == NULL) {
jniThrowRuntimeException(env, "Can't find android/hardware/Camera");
return;
}
// We use a weak reference so the Camera object can be garbage collected.
// The reference is only used as a proxy for callbacks.
sp<JNICameraContext> context = new JNICameraContext(env, weak_this, clazz, camera);
context->incStrong((void*)android_hardware_Camera_native_setup);
camera->setListener(context);
// save context in opaque field
env->SetIntField(thiz, fields.context, (int)context.get());
}
camera->setListener(context);这个就是把初始化的好的context存到camera类里的成员变量里。
这里会调到java-c接口文件中接口,在android_hardware_Camera.cpp文件里:
class JNICameraContext: public CameraListener
void JNICameraContext::postData(int32_t msgType, const sp& dataPtr,
camera_frame_metadata_t *metadata)
{
// VM pointer will be NULL if object is released
Mutex::Autolock _l(mLock);
JNIEnv *env = AndroidRuntime::getJNIEnv();
if (mCameraJObjectWeak == NULL) {
ALOGW("callback on dead camera object");
return;
}
int32_t dataMsgType = msgType & ~CAMERA_MSG_PREVIEW_METADATA;
// return data based on callback type
switch (dataMsgType) {
case CAMERA_MSG_VIDEO_FRAME:
// should never happen
break;
// For backward-compatibility purpose, if there is no callback
// buffer for raw image, the callback returns null.
case CAMERA_MSG_RAW_IMAGE:
ALOGV("rawCallback");
if (mRawImageCallbackBuffers.isEmpty()) {
env->CallStaticVoidMethod(mCameraJClass, fields.post_event,
mCameraJObjectWeak, dataMsgType, 0, 0, NULL);
} else {
copyAndPost(env, dataPtr, dataMsgType);
}
break;
// There is no data.
case 0:
break;
default:
ALOGV("dataCallback(%d, %p)", dataMsgType, dataPtr.get());
copyAndPost(env, dataPtr, dataMsgType);
break;
}
// post frame metadata to Java
if (metadata && (msgType & CAMERA_MSG_PREVIEW_METADATA)) {
postMetadata(env, CAMERA_MSG_PREVIEW_METADATA, metadata);
}
}
根据回调的类型继续调用copyAndPost(env, dataPtr, dataMsgType);
void JNICameraContext::copyAndPost(JNIEnv* env, const sp<IMemory>& dataPtr, int msgType)
{
jbyteArray obj = NULL;
// allocate Java byte array and copy data
if (dataPtr != NULL) {
ssize_t offset;
size_t size;
sp<IMemoryHeap> heap = dataPtr->getMemory(&offset, &size);
ALOGV("copyAndPost: off=%ld, size=%d", offset, size);
uint8_t *heapBase = (uint8_t*)heap->base();
if (heapBase != NULL) {
const jbyte* data = reinterpret_cast<const jbyte*>(heapBase + offset);
if (msgType == CAMERA_MSG_RAW_IMAGE) {
obj = getCallbackBuffer(env, &mRawImageCallbackBuffers, size);
} else if (msgType == CAMERA_MSG_PREVIEW_FRAME && mManualBufferMode) {
obj = getCallbackBuffer(env, &mCallbackBuffers, size);
if (mCallbackBuffers.isEmpty()) {
ALOGV("Out of buffers, clearing callback!");
mCamera->setPreviewCallbackFlags(CAMERA_FRAME_CALLBACK_FLAG_NOOP);
mManualCameraCallbackSet = false;
if (obj == NULL) {
return;
}
}
} else {
ALOGV("Allocating callback buffer");
obj = env->NewByteArray(size);
}
if (obj == NULL) {
ALOGE("Couldn't allocate byte array for JPEG data");
env->ExceptionClear();
} else {
env->SetByteArrayRegion(obj, 0, size, data);
}
} else {
ALOGE("image heap is NULL");
}
}
// post image data to Java
env->CallStaticVoidMethod(mCameraJClass, fields.post_event,
mCameraJObjectWeak, msgType, 0, 0, obj);
if (obj) {
env->DeleteLocalRef(obj);
}
}
这里就会调用java的接口,把data数据发到java层,java层有一个函数会来接受这个:
@Override
public void handleMessage(Message msg) {
switch(msg.what) {
case CAMERA_MSG_SHUTTER:
if (mShutterCallback != null) {
mShutterCallback.onShutter();
}
return;
case CAMERA_MSG_RAW_IMAGE:
if (mRawImageCallback != null) {
mRawImageCallback.onPictureTaken((byte[])msg.obj, mCamera);
}
return;
case CAMERA_MSG_COMPRESSED_IMAGE:
if (mJpegCallback != null) {
mJpegCallback.onPictureTaken((byte[])msg.obj, mCamera);
}
return;
case CAMERA_MSG_PREVIEW_FRAME:
PreviewCallback pCb = mPreviewCallback;
if (pCb != null) {
if (mOneShot) {
// Clear the callback variable before the callback
// in case the app calls setPreviewCallback from
// the callback function
mPreviewCallback = null;
} else if (!mWithBuffer) {
// We're faking the camera preview mode to prevent
// the app from being flooded with preview frames.
// Set to oneshot mode again.
setHasPreviewCallback(true, false);
}
pCb.onPreviewFrame((byte[])msg.obj, mCamera);
}
return;
case CAMERA_MSG_POSTVIEW_FRAME:
if (mPostviewCallback != null) {
mPostviewCallback.onPictureTaken((byte[])msg.obj, mCamera);
}
return;
case CAMERA_MSG_FOCUS:
AutoFocusCallback cb = null;
synchronized (mAutoFocusCallbackLock) {
cb = mAutoFocusCallback;
}
if (cb != null) {
boolean success = msg.arg1 == 0 ? false : true;
cb.onAutoFocus(success, mCamera);
}
return;
case CAMERA_MSG_ZOOM:
if (mZoomListener != null) {
mZoomListener.onZoomChange(msg.arg1, msg.arg2 != 0, mCamera);
}
return;
case CAMERA_MSG_PREVIEW_METADATA:
if (mFaceListener != null) {
mFaceListener.onFaceDetection((Face[])msg.obj, mCamera);
}
return;
case CAMERA_MSG_ERROR :
Log.e(TAG, "Error " + msg.arg1);
if (mErrorCallback != null) {
mErrorCallback.onError(msg.arg1, mCamera);
}
return;
case CAMERA_MSG_FOCUS_MOVE:
if (mAutoFocusMoveCallback != null) {
mAutoFocusMoveCallback.onAutoFocusMoving(msg.arg1 == 0 ? false : true, mCamera);
}
return;
default:
Log.e(TAG, "Unknown message type " + msg.what);
return;
}
}
}
这里调到java层了,就不分析了。