最近在调Android4.4的Camera;驱动部分由于之前调过,很容易实现了;主要就是xxxxx.dts里面设备信息的添加和Camera驱动里面的信息匹配就可以了;Android4.4一般配套的kernel都是3.10版本;设备的组织都是用设备树;针对ARM架构好像是kernel 3.2以后才有;但是学会了会发现用dts组织设备驱动调试更简单了;只是刚接触的时候会比较迷糊;
注意:只有静态设备才用dts组织;像挂载在usb设备上面可以动态监测的设备都不用dts组织的;
我最近调试Android4.4的Camera,为了实现autoFocus功能把的堵了好久,网上资料针对Android4.4的Camera HAL又特别少;我一看代码就蒙了;完全从新开始;Android4.4的 Camera HAL 升级了;版本用的V2版本;google针对Camera的HAL层也一直在优化; 你一看源码目录就会发现4.4默认hardware层是用的libcamera2;apk源码使用的也是 Camera2;到Android5.0以后就推荐用Camera HAL V3版本;我简单看了源码,和V2版本又有了变化;
我这里简单说下如果Android4.4 实现autofocus怎么实现?主要实现的函数有哪些;
第一个函数是:CameraModule.cpp中的:
int trigger_action(const struct camera2_device *device,
uint32_t trigger_id,
int32_t ext1,
int32_t ext2)
trigger_id的取值是:
CAMERA2_TRIGGER_AUTOFOCUS、
CAMERA2_TRIGGER_CANCEL_AUTOFOCUS、
CAMERA2_TRIGGER_PRECAPTURE_METERING;
ext1的值是:上层传递下来的当前AfState的值;是Camera_metadata_tags.h文件中定义的:
typedef enum camera_metadata_enum_android_control_af_state {
ANDROID_CONTROL_AF_STATE_INACTIVE,
ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN,
ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED,
ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN,
ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED,
ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED,
ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED,
} camera_metadata_enum_android_control_af_state_t;
ext2的值一般传下来的都是0;没有实际意思;但是对焦完成后通过mNotifyCb()函数返回的时候还是有意义的;
第二个函数:camera2_notify_callback mNotifyCb;
mNotifyCb不用实现,它是framework层实现的Camera2Device.cpp实现的函数,是个的回调函数,通过setNotifyCallback()传递到Camera HAL层使用的通知回调函数;Camera auto focus完成必须返回ext1和ext2的值;Camera HAL auto focus完成返回的ext1代表对焦完成后的AfState;如果ext1的返回的参数不对;Camera就会出现卡在一直对焦状态,对焦的白色圈圈不会消失等情况;ext2返回值是:触发最新状态的ID(其实就上层传递下来的ext1的值);mNotifyCb实现:
void Camera2Device::notificationCallback(int32_t msg_type,
int32_t ext1,
int32_t ext2,
int32_t ext3,
void *user) {
ATRACE_CALL();
NotificationListener *listener = reinterpret_cast(user);
ALOGV("%s: Notification %d, arguments %d, %d, %d", __FUNCTION__, msg_type,
ext1, ext2, ext3);
if (listener != NULL) {
switch (msg_type) {
case CAMERA2_MSG_ERROR:
listener->notifyError(ext1, ext2, ext3);
break;
case CAMERA2_MSG_SHUTTER: {
// TODO: Only needed for camera2 API, which is unsupported
// by HAL2 directly.
// nsecs_t timestamp = (nsecs_t)ext2 | ((nsecs_t)(ext3) << 32 );
// listener->notifyShutter(requestId, timestamp);
break;
}
case CAMERA2_MSG_AUTOFOCUS: //自动对焦msg
listener->notifyAutoFocus(ext1, ext2); //(ext1 = newState, ext2 = triggerId);
break;
case CAMERA2_MSG_AUTOEXPOSURE: //自动曝光
listener->notifyAutoExposure(ext1, ext2);
break;
case CAMERA2_MSG_AUTOWB: //自动白平衡
listener->notifyAutoWhitebalance(ext1, ext2);
break;
default:
ALOGE("%s: Unknown notification %d (arguments %d, %d, %d)!",
__FUNCTION__, msg_type, ext1, ext2, ext3);
}
}
}
第三个函数:DeviceAdapter.cpp中的:
int DeviceAdapter::autoFocusThread()
{
sp cameraEvt = new CameraEvent();
cameraEvt->mEventType = CameraEvent::EVENT_FOCUS;
dispatchEvent(cameraEvt);
//这里添加V4L2协议部分访问的底层driver的实现;注意从trigger_acton()到autoFocusThread()还需要实现几个函数,哪都比较简单,就是层层调用;
// exit the thread.
return UNKNOWN_ERROR;
}
autoFocusThread()是一个在threadLoop(){}线程里面调用的,4.4不知道怎么退出这个线程,我调试的现象是一旦Camera2.apk调用拍照,对焦,就再不能退出这个线程了;我最后直接不调用这个threadLoop();让autoFocus直接调用自己实现的DeviceAdapter::autoFocusThread();问题解决;
int DeviceAdapter::autoFocusThread()
{
sp cameraEvt = new CameraEvent();
cameraEvt->mEventType = CameraEvent::EVENT_FOCUS;
dispatchEvent(cameraEvt);
int focusModule = 1;
struct v4l2_control ctrl;
ctrl.id = V4L2_CID_FOCUS_AUTO;
ctrl.value = focusModule;
if (ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctrl) < 0) {
ALOGE("set ctrl auto focus failed\n");
return BAD_VALUE; //
}
return NO_ERROR;
// exit the thread.
return UNKNOWN_ERROR;
}
注意事项一:
有些Android4.4的源码是不支持autoFocus的;这个时候需要修改MetadaManager.cpp文件中的createStaticInfo()函数(也就是有些源码里的constructStaticInfo());availableAfModes[]的选项是代表支持的AF模式;minFocusDistance = 1等于是个开关选项;它和max3aRegions = 0的值配合决定Camera2.apk源码里面getFocusMode()的返回值;
status_t MetadaManager::createStaticInfo(camera_metadata_t **info, bool sizeRequest)
{
//static float minFocusDistance = 0;
static float minFocusDistance = 1;
static const uint8_t availableAfModes[] = {
ANDROID_CONTROL_AF_MODE_OFF , // new add
ANDROID_CONTROL_AF_MODE_AUTO, // new add
ANDROID_CONTROL_AF_MODE_MACRO, // new add
//ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, // new add
//ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE // new add
};
ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES,
availableAfModes, sizeof(availableAfModes));
}
status_t MetadaManager::createDefaultRequest(
int request_template,
camera_metadata_t **request,
bool sizeRequest) {
status_t MetadaManager::createStaticInfo(camera_metadata_t **info, bool sizeRequest)
也就是某些源码里的:
status_t ExynosCamera2::constructDefaultRequest(
int request_template,
camera_metadata_t **request,
bool sizeRequest) {
status_t ExynosCamera2::constructStaticInfo(camera_metadata_t **info,
int cameraId, bool sizeRequest) {
注意事项二:
如果你调试过程中出现Camera 拍照后一直卡在哪里或者对焦的小圈圈不消失那就是因为上报的ext1和ext2的值不对导致的;主要就是ext1的值;ext1的值比较麻烦;既要要framework获取又要返回;EmulatedFakeCamera2.cpp源码是这么做的:
bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
if (mRequest == NULL) {
Mutex::Autolock il(mInternalsMutex);
ALOGV("Configure: Getting next request");
//mRequestQueueSrc()也是framework层传下来的;在frameworkd实现;
res = mParent->mRequestQueueSrc->dequeue_request(
mParent->mRequestQueueSrc,
&mRequest);
if (res != NO_ERROR) {
ALOGE("%s: Error dequeuing next request: %d", __FUNCTION__, res);
mParent->signalError();
return false;
}
if (mRequest == NULL) {
ALOGV("Configure: Request queue empty, going inactive");
// No requests available, go into inactive mode
Mutex::Autolock lock(mInputMutex);
mActive = false;
return true;
} else {
Mutex::Autolock lock(mInputMutex);
mRequestCount++;
}
}
threadLoop()线程的res = mParent->
mRequestQueueSrc->dequeue_request(mParent->mRequestQueueSrc,&mRequest);
mRequestQueueSrc()也是framework中实现的const camera2_request_queue_src_ops *mRequestQueueSrc;mRequestQueueSrc->dequeue_request()得到
mRequest;
int EmulatedCamera2::set_request_queue_src_ops(const camera2_device_t *d,
const camera2_request_queue_src_ops *queue_src_ops) {
EmulatedCamera2* ec = getInstance(d);
ec->mRequestQueueSrc = queue_src_ops; //也是framework层传递下来的函数
return NO_ERROR;
}
camera2_device_ops_t EmulatedCamera2::sDeviceOps = {
EmulatedCamera2::set_request_queue_src_ops, //framework层实现
EmulatedCamera2::notify_request_queue_not_empty, //framework层实现
EmulatedCamera2::set_frame_queue_dst_ops,
EmulatedCamera2::get_in_progress_count,
EmulatedCamera2::flush_captures_in_progress,
EmulatedCamera2::construct_default_request,
EmulatedCamera2::allocate_stream,
EmulatedCamera2::register_stream_buffers,
EmulatedCamera2::release_stream,
EmulatedCamera2::allocate_reprocess_stream,
EmulatedCamera2::allocate_reprocess_stream_from_stream,
EmulatedCamera2::release_reprocess_stream,
EmulatedCamera2::trigger_action, //自动对焦需要自己实现
EmulatedCamera2::set_notify_callback, //framework层实现
EmulatedCamera2::get_metadata_vendor_tag_ops,
EmulatedCamera2::dump
};
再通过
mRequest 从framework层得到afMode;
status_t EmulatedFakeCamera2::ControlThread::processRequest(camera_metadata_t *request) {
Mutex::Autolock lock(mInputMutex);
// disable all 3A
//3A 就是 AF(自动对焦) AE(自动曝光) AWB(自动白平衡)
if (mControlMode == ANDROID_CONTROL_MODE_OFF) {
mEffectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
mSceneMode = ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
mAfMode = ANDROID_CONTROL_AF_MODE_OFF;
mAeLock = ANDROID_CONTROL_AE_LOCK_ON;
mAeMode = ANDROID_CONTROL_AE_MODE_OFF;
mAfModeChange = true;
mStartAf = false;
mCancelAf = true;
mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
mAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
return res;
}
//通过request得到afMode;根据不同的afMode值决定的AfState(ext1)的值;看Camera2.h中的注释;
res = find_camera_metadata_entry(request,
ANDROID_CONTROL_AF_MODE,
&mode);
if (mAfMode != mode.data.u8[0]) {
ALOGV("AF new mode: %d, old mode %d", mode.data.u8[0], mAfMode);
mAfMode = mode.data.u8[0]; //ANDROID_CONTROL_AF_MODE_OFF等;
mAfModeChange = true;
mStartAf = false;
mCancelAf = false;
}
}
得到afMode的值就可以在trigger_action中根据afMode的值判断AfState(ext1)的状态;通过ext1作为mNotifyCb()的参数返回给framework层了;
if (mNotifyCb != NULL) {
ALOGE("CameraHal_2=[%d]-%s-%d---trigger_id=%u:ext1=%d:ext2=%d\n",CameraHal_2,__func__,__LINE__,trigger_id,ext1,ext2);
mNotifyCb(CAMERA2_MSG_AUTOFOCUS, mAfState, mAfTriggerId, 0, mNotifyUserPtr);
}
以上是我的理解,正确的做法应该是上面的方法;但是特别的麻烦;可能也是我理解的不对;我自己做法如下:能解决问题;但是不提倡;由于时间问题先解决了问题;以后优化:
int CameraHal::triggerAction(uint32_t trigger_id,int ext1, int ext2)
{
uint8_t mAfState = 0;
int32_t mAfTriggerId = ext1;
//ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
switch (trigger_id) {
case CAMERA2_TRIGGER_AUTOFOCUS:
ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
mRequestManager->RequestManagerAutoFocus(trigger_id);
mAfState = processAfTrigger(ext1);
if (mNotifyCb != NULL) {
ALOGE("CameraHal_2=[%d]-%s-%d---trigger_id=%u:ext1=%d:ext2=%d\n",CameraHal_2,__func__,__LINE__,trigger_id,ext1,ext2);
mNotifyCb(CAMERA2_MSG_AUTOFOCUS, mAfState, mAfTriggerId, 0, mNotifyUserPtr);
}
break;
case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
mRequestManager->RequestManagerAutoFocus(trigger_id);
mAfState = processAfTrigger(ext1);
if (mNotifyCb != NULL) {
ALOGE("CameraHal_2=[%d]-%s-%d---trigger_id=%u:ext1=%d:ext2=%d\n",CameraHal_2,__func__,__LINE__,trigger_id,ext1,ext2);
mNotifyCb(CAMERA2_MSG_AUTOFOCUS, mAfState,mAfTriggerId, 0, mNotifyUserPtr);
}
break;
case CAMERA2_TRIGGER_PRECAPTURE_METERING:
ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
//return mRequestManager->RequestManagerAutoFocus(trigger_id);
break;
default:
break;
}
return 0;
}
int CameraHal::processAfTrigger(uint8_t afState) {
//这些值是测出来的,虽然能解决ext1返回值的问题;但是afState的值一直在递增;以后再按照系统做法做;
ALOGE("CameraHal_2=[%d]-%s-%d:afState=%d\n",CameraHal_2,__func__,__LINE__,afState);
switch (afState) {
case ANDROID_CONTROL_AF_STATE_INACTIVE: // 0
afState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
ALOGE("CameraHal_2=[%d]-%s-%d:afState=%d\n",CameraHal_2,__func__,__LINE__,afState);
break;
case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: // 1
afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
ALOGE("CameraHal_2=[%d]-%s-%d:afState=%d\n",CameraHal_2,__func__,__LINE__,afState);
case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: // 2
afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
ALOGE("CameraHal_2=[%d]-%s-%d:afState=%d\n",CameraHal_2,__func__,__LINE__,afState);
break;
case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: // 3
afState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
ALOGE("CameraHal_2=[%d]-%s-%d:afState=%d\n",CameraHal_2,__func__,__LINE__,afState);
case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: // 4
afState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
ALOGE("CameraHal_2=[%d]-%s-%d:afState=%d\n",CameraHal_2,__func__,__LINE__,afState);
case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: // 5
afState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
ALOGE("CameraHal_2=[%d]-%s-%d:afState=%d\n",CameraHal_2,__func__,__LINE__,afState);
case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED: // 6
afState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
ALOGE("CameraHal_2=[%d]-%s-%d:afState=%d\n",CameraHal_2,__func__,__LINE__,afState);
default:
afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
ALOGE("CameraHal_2=[%d]-%s-%d:afState=%d\n",CameraHal_2,__func__,__LINE__,afState);
break;
}
ALOGE("CameraHal_2=[%d]-%s-%d:afState=%d\n",CameraHal_2,__func__,__LINE__,afState);
return afState;
}