《【高通SDM660平台】(1) — Camera 驱动 Bringup Guide》
《【高通SDM660平台】(2) — Camera Kernel 驱动层代码逻辑分析》
《【高通SDM660平台】(3) — Camera V4L2 驱动层分析 》
《【高通SDM660平台】(4) — Camera Init 初始化流程 》
《【高通SDM660平台】(5) — Camera Open 流程》
《【高通SDM660平台】(6) — Camera getParameters 及 setParameters 流程》
《【高通SDM660平台】(7) — Camera onPreview 代码流程》
《【高通SDM660平台】(8) — Camera MetaData介绍》
《【高通SDM660平台 Android 10.0】(9) — Qcom Camera Daemon 代码分析》
《【高通SDM660平台 Android 10.0】(10) — Camera Sensor lib 与 Kernel Camera Probe 代码分析》
《【高通SDM660平台】Camera Capture 流程》
《【高通SDM660平台】Camera mm-qcamera-app 代码分析》
@ frameworks/base/core/java/android/hardware/Camera.java
@Deprecated
public class Camera {
private static final String TAG = "Camera";
public native final void startPreview();
}
可以看出,代码是获取 native 层Camera 对象,调用 Camera::startPreview()
方法
@ frameworks/base/core/jni/android_hardware_Camera.cpp
static void android_hardware_Camera_startPreview(JNIEnv *env, jobject thiz)
{
ALOGV("startPreview");
sp<Camera> camera = get_native_camera(env, thiz, NULL);
if (camera->startPreview() != NO_ERROR) {
jniThrowRuntimeException(env, "startPreview failed");
return;
}
}
//-------------------------------------------------
static const JNINativeMethod camMethods[] = {
{ "startPreview", "()V", (void *)android_hardware_Camera_startPreview },
}
@ frameworks/av/camera/Camera.cpp
// start preview mode
status_t Camera::startPreview()
{
ALOGV("startPreview");
sp <::android::hardware::ICamera> c = mCamera;
if (c == 0) return NO_INIT;
return c->startPreview();
========>
@ frameworks/av/camera/ICamera.cpp
status_t startPreview()
{
ALOGV("startPreview");
Parcel data, reply;
data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
remote()->transact(START_PREVIEW, data, &reply);
return reply.readInt32();
}
}
在 ICamera.cpp 代理对像中远程调用BnCamera的onTransact()
方法
@ frameworks/av/camera/ICamera.cpp
status_t BnCamera::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
switch(code) {
case START_PREVIEW: {
ALOGV("START_PREVIEW");
CHECK_INTERFACE(ICamera, data, reply);
reply->writeInt32(startPreview());
return NO_ERROR;
} break;
可以看出 ,调用的是BnCamera 的中的 startPreview() 方法,但实际上 BnCamera 并没有 实现startPreview() 方法,它是继承于 ICamera 的虚方法。
因此,startPreview()
是由 BnCamera 的子类来实现。
@ frameworks/av/services/camera/libcameraservice/CameraService.h
class CameraService :
public BinderService<CameraService>,
public ::android::hardware::BnCameraService,
public IBinder::DeathRecipient,
public camera_module_callbacks_t
{
class Client : public hardware::BnCamera, public BasicClient
{
public:
virtual status_t startPreview() = 0;
virtual void stopPreview() = 0;
}
}
@ frameworks/av/services/camera/libcameraservice/api1/CameraClient.h
class CameraClient : public CameraService::Client
{
public:
virtual status_t startPreview();
virtual void stopPreview();
}
可以看出,CameraService 的内部类 Client 是继承了 BnCamera 的 startPreview()
方法,
接着,CameraClient 继承了 CameraService::Client ,最终 startPreview()
方法 是由 CameraClient 来实现了
@ frameworks/av/services/camera/libcameraservice/api1/CameraClient.cpp
// start preview mode
status_t CameraClient::startPreview() {
LOG1("startPreview (pid %d)", getCallingPid());
// 开始预览
return startCameraMode(CAMERA_PREVIEW_MODE);
}
// start preview or recording
status_t CameraClient::startCameraMode(camera_mode mode) {
LOG1("startCameraMode(%d)", mode);
switch(mode) {
case CAMERA_PREVIEW_MODE:
return startPreviewMode();
case CAMERA_RECORDING_MODE:
return startRecordingMode();
}
}
status_t CameraClient::startPreviewMode() {
LOG1("startPreviewMode");
if (mPreviewWindow != 0) {
//根据新的参数,设置所有缓冲区的显示
mHardware->setPreviewScalingMode( NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
mHardware->setPreviewTransform(mOrientation);
}
//设置预览窗口
mHardware->setPreviewWindow(mPreviewWindow);
result = mHardware->startPreview();
if (result == NO_ERROR) {
mCameraService->updateProxyDeviceState( ICameraServiceProxy::CAMERA_STATE_ACTIVE,
String8::format("%d", mCameraId));
}
return result;
}
mHardware 对像实现如下, sp
可知,mHardware->setPreviewWindow(mPreviewWindow);
实现如下:
@ frameworks/av/services/camera/libcameraservice/device1/CameraHardwareInterface.h
class CameraHardwareInterface : public virtual RefBase {
public:
/*** Start preview mode. */
status_t startPreview()
{
ALOGV("%s(%s)", __FUNCTION__, mName.string());
if (mDevice->ops->start_preview)
return mDevice->ops->start_preview(mDevice);
return INVALID_OPERATION;
}
}
mDevice 定义为 camera_device_t *mDevice;
camera_device_t定义在:
@ hardware/libhardware/include/hardware/camera.h
struct camera_device;
typedef struct camera_device_ops {
/**
* Start preview mode.
*/
int (*start_preview)(struct camera_device *);
}camera_device_ops_t;
typedef struct camera_device {
/**
* camera_device.common.version must be in the range
* HARDWARE_DEVICE_API_VERSION(0,0)-(1,FF). CAMERA_DEVICE_API_VERSION_1_0 is
* recommended.
*/
hw_device_t common;
camera_device_ops_t *ops;
void *priv;
} camera_device_t;
@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp
camera_device_ops_t QCamera2HardwareInterface::mCameraOps = {
.start_preview = QCamera2HardwareInterface::start_preview,
.stop_preview = QCamera2HardwareInterface::stop_preview,
}
通过 processAPI()
往底层下发 QCAMERA_SM_EVT_START_PREVIEW
事件
@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp
int QCamera2HardwareInterface::start_preview(struct camera_device *device)
{
KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_START_PREVIEW);
QCamera2HardwareInterface *hw = reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
LOGI("[KPI Perf]: E PROFILE_START_PREVIEW camera id %d", hw->getCameraId());
qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_START_PREVIEW;
uint32_t cam_type = CAM_TYPE_MAIN;
if (hw->isDualCamera()) {
cam_type = MM_CAMERA_DUAL_CAM;
}
if (hw->isNoDisplayMode(cam_type)) {
evt = QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW;
}
ret = hw->processAPI(evt, NULL);
=========>
@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp
QCamera2HardwareInterface *hw = reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
int QCamera2HardwareInterface::processAPI(qcamera_sm_evt_enum_t api, void *api_payload)
{
ret = m_stateMachine.procAPI(api, api_payload);
}
if (ret == NO_ERROR) {
hw->waitAPIResult(evt, &apiResult);
ret = apiResult.status;
}
hw->unlockAPI();
LOGI("[KPI Perf]: X ret = %d", ret);
return ret;
}
经过层层调用,最终跑到了 QCameraStateMachine::procAPI()
中, evt=QCAMERA_SM_EVT_START_PREVIEW
将消息,保存在 class QCameraQueue
中。
node 中包括: QCAMERA_SM_EVT_START_PREVIEW 和 QCAMERA_SM_CMD_TYPE_API 两个消息
@ hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp
QCameraStateMachine m_stateMachine;
int32_t QCameraStateMachine::procAPI(qcamera_sm_evt_enum_t evt, void *api_payload)
{
qcamera_sm_cmd_t *node = (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
memset(node, 0, sizeof(qcamera_sm_cmd_t));
node->cmd = QCAMERA_SM_CMD_TYPE_API;
node->evt = evt;
node->evt_payload = api_payload;
if (api_queue.enqueue((void *)node)) {
cam_sem_post(&cmd_sem);
return NO_ERROR;
}
}
在QCameraQueue::enqueue() 中添加队列
@ hardware/qcom/camera/QCamera2/util/QCameraQueue.cpp
bool QCameraQueue::enqueue(void *data)
{
bool rc;
camera_q_node *node = (camera_q_node *)malloc(sizeof(camera_q_node));
memset(node, 0, sizeof(camera_q_node));
node->data = data;
if (m_active) {
cam_list_add_tail_node(&node->list, &m_head.list);
m_size++;
rc = true;
}
return rc;
}
在QCameraStateMachine
中创建了一个线程用 于处理evt 事件
@ hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp
QCameraStateMachine::QCameraStateMachine(QCamera2HardwareInterface *ctrl) :
api_queue(),
evt_queue()
{
pthread_create(&cmd_pid, NULL,smEvtProcRoutine,this);
pthread_setname_np(cmd_pid, "CAM_stMachine");
}
在线程 *QCameraStateMachine::smEvtProcRoutine()
中,
因为下发的是 QCAMERA_SM_EVT_START_PREVIEW
和 QCAMERA_SM_CMD_TYPE_API
两个消息 ,
可知跑 case QCAMERA_SM_CMD_TYPE_API
@ hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp
void *QCameraStateMachine::smEvtProcRoutine(void *data)
{
int running = 1, ret;
QCameraStateMachine *pme = (QCameraStateMachine *)data;
LOGH("E");
do {
do {
ret = cam_sem_wait(&pme->cmd_sem);
} while (ret != 0);
// we got notified about new cmd avail in cmd queue
// first check API cmd queue
qcamera_sm_cmd_t *node = (qcamera_sm_cmd_t *)pme->api_queue.dequeue();
if (node == NULL) {
// no API cmd, then check evt cmd queue
node = (qcamera_sm_cmd_t *)pme->evt_queue.dequeue();
}
if (node != NULL) {
switch (node->cmd) {
case QCAMERA_SM_CMD_TYPE_API:
pme->stateMachine(node->evt, node->evt_payload);
// API is in a way sync call, so evt_payload is managed by HWI
// no need to free payload for API
break;
case QCAMERA_SM_CMD_TYPE_EVT:
pme->stateMachine(node->evt, node->evt_payload);
// EVT is async call, so payload need to be free after use
free(node->evt_payload);
node->evt_payload = NULL;
break;
case QCAMERA_SM_CMD_TYPE_EXIT:
running = 0;
break;
}
} while (running);
LOGH("X");
return NULL;
}
在 QCameraStateMachine::stateMachine()
中,会根据之前的 m_state 状态来决定当前代码怎么走的,
在初始化时 配置 m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
因此我们第一次preview 是走 procEvtPreviewStoppedState
@ hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp
int32_t QCameraStateMachine::stateMachine(qcamera_sm_evt_enum_t evt, void *payload)
{
int32_t rc = NO_ERROR;
LOGL("m_state %d, event (%d)", m_state, evt);
switch (m_state) {
case QCAMERA_SM_STATE_PREVIEW_STOPPED:
rc = procEvtPreviewStoppedState(evt, payload);
break;
case QCAMERA_SM_STATE_PREVIEW_READY:
rc = procEvtPreviewReadyState(evt, payload);
break;
case QCAMERA_SM_STATE_PREVIEWING:
rc = procEvtPreviewingState(evt, payload);
break;
case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
rc = procEvtPrepareSnapshotState(evt, payload);
break;
case QCAMERA_SM_STATE_PIC_TAKING:
rc = procEvtPicTakingState(evt, payload);
break;
case QCAMERA_SM_STATE_RECORDING:
rc = procEvtRecordingState(evt, payload);
break;
case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
rc = procEvtVideoPicTakingState(evt, payload);
break;
case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
rc = procEvtPreviewPicTakingState(evt, payload);
break;
default:
break;
}
if (m_parent->isDualCamera()) {
/* Update the FOVControl dual camera result state based on the current state.
Update the result only in previewing and recording states */
bool updateResultState = false;
if (((m_state == QCAMERA_SM_STATE_PREVIEWING) ||
(m_state == QCAMERA_SM_STATE_RECORDING)) && !m_parent->mActiveAF
&& !m_parent->m_bPreparingHardware) {
updateResultState = true;
}
m_parent->m_pFovControl->UpdateFlag(FOVCONTROL_FLAG_UPDATE_RESULT_STATE,
&updateResultState);
}
return rc;
}
由于,我们下发的 evt 事件为 QCAMERA_SM_EVT_START_PREVIEW
,
首先调用 m_parent->preparePreview()
准备preview,添加 QCAMERA_CH_TYPE_PREVIEW
完毕后,将状态机,切换为 QCAMERA_SM_STATE_PREVIEW_READY
.
@ hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp
int32_t QCameraStateMachine::procEvtPreviewStoppedState(qcamera_sm_evt_enum_t evt,
void *payload)
{
int32_t rc = NO_ERROR;
qcamera_api_result_t result;
memset(&result, 0, sizeof(qcamera_api_result_t));
LOGL("event (%d)", evt);
switch (evt) {
case QCAMERA_SM_EVT_START_PREVIEW:
{
rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
if (m_parent->mPreviewWindow == NULL) {
rc = m_parent->preparePreview();
=======>
rc = addChannel(QCAMERA_CH_TYPE_PREVIEW);
========>
addPreviewChannel();
if(rc == NO_ERROR) {
// preview window is not set yet, move to previewReady state
m_state = QCAMERA_SM_STATE_PREVIEW_READY;
}
} else {
rc = m_parent->preparePreview();
if (rc == NO_ERROR) {
applyDelayedMsgs();
rc = m_parent->startPreview();
if (rc != NO_ERROR) {
m_parent->unpreparePreview();
} else {
// start preview success, move to previewing state
m_state = QCAMERA_SM_STATE_PREVIEWING;
}
}
}
result.status = rc;
result.request_api = evt;
result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
m_parent->signalAPIResult(&result);
}
break;
@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp
int32_t QCamera2HardwareInterface::addChannel(qcamera_ch_type_enum_t ch_type)
{
switch (ch_type) {
case QCAMERA_CH_TYPE_PREVIEW:
rc = addPreviewChannel();
break;
在 addPreviewChannel() 中
调用 addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,preview_stream_cb_routine, this);
注册preview_stream 回调函数。
回调函数为 preview_stream_cb_routine()
同时设置同步信号回调函数: pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW, synchronous_stream_cb_routine);
回调函数为 synchronous_stream_cb_routine()
如果需要获取 raw yuv 的图,则调用rc = addStreamToChannel(pChannel,CAM_STREAM_TYPE_RAW, preview_raw_stream_cb_routine,this);
回调函数为 preview_raw_stream_cb_routine()
在回调函数 中会dump 为 yuv 文件
@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp
int32_t QCamera2HardwareInterface::addPreviewChannel()
{
QCameraChannel *pChannel = NULL;
char value[PROPERTY_VALUE_MAX];
bool raw_yuv = false;
uint32_t handle = getCamHandleForChannel(QCAMERA_CH_TYPE_PREVIEW);
=====>
handle = mCameraHandle->camera_handle;
// 初始化 QCameraChannel 对象,传递 mCameraHandle->ops 操作函数,ops 是在构造函数中初始化
pChannel = new QCameraChannel(handle, mCameraHandle->ops);
// 初始化 Channel , preview only channel, don't need bundle attr and cb
rc = pChannel->init(NULL, NULL, NULL);
// meta data stream always coexists with preview if applicable
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA, metadata_stream_cb_routine, this);
if (isRdiMode()) {
if (isSecureMode()) {
LOGI("RDI - secure CB");
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
secure_stream_cb_routine, this);
}
else {
LOGI("RDI - rdi CB");
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
rdi_mode_stream_cb_routine, this);
}
} else {
if (isSecureMode()) {
LOGI("PREVIEW - secure CB");
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
secure_stream_cb_routine, this);
}
else {
LOGI("PREVIEW - non-secure CB");
uint32_t cam_type = CAM_TYPE_MAIN;
if (isDualCamera()) {
cam_type = (CAM_TYPE_MAIN | CAM_TYPE_AUX);
}
if (isNoDisplayMode(cam_type)) {
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW, nodisplay_preview_stream_cb_routine, this);
} else {
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,preview_stream_cb_routine, this);
======>
if (needSyncCB(CAM_STREAM_TYPE_PREVIEW) == TRUE) {
pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW, synchronous_stream_cb_routine);
}
}
}
}
if (((mParameters.fdModeInVideo()) || (mParameters.getDcrf() == true)
|| (mParameters.getRecordingHintValue() != true))&& (!isSecureMode())) {
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_ANALYSIS, NULL, this);
}
property_get("persist.camera.raw_yuv", value, "0");
raw_yuv = atoi(value) > 0 ? true : false;
if ( raw_yuv ) {
rc = addStreamToChannel(pChannel,CAM_STREAM_TYPE_RAW, preview_raw_stream_cb_routine,this);
}
m_channels[QCAMERA_CH_TYPE_PREVIEW] = pChannel;
return rc;
}
由于将状态机切换为 m_state = QCAMERA_SM_STATE_PREVIEW_READY;
,所以代码如下:
@ hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp
int32_t QCameraStateMachine::procEvtPreviewReadyState(qcamera_sm_evt_enum_t evt, void *payload)
{
LOGL("event (%d)", evt);
switch (evt) {
case QCAMERA_SM_EVT_START_PREVIEW:
{
if (m_parent->mPreviewWindow != NULL) {
rc = m_parent->startPreview();
if (rc != NO_ERROR) {
m_parent->unpreparePreview();
m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
} else {
m_state = QCAMERA_SM_STATE_PREVIEWING;
}
}
// no ops here
rc = NO_ERROR;
result.status = rc;
result.request_api = evt;
result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
m_parent->signalAPIResult(&result);
}
break;
开始调用 m_parent->startPreview()
,同时将状态切换为 m_state = QCAMERA_SM_STATE_PREVIEWING;
m_parent 定义为 QCamera2HardwareInterface *m_parent; // ptr to HWI
最终调用 startChannel(QCAMERA_CH_TYPE_PREVIEW);
@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp
int QCamera2HardwareInterface::startPreview()
{
KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_STARTPREVIEW);
int32_t rc = NO_ERROR;
LOGI("E ZSL = %d Recording Hint = %d", mParameters.isZSLMode(), mParameters.getRecordingHintValue());
updateThermalLevel((void *)&mThermalLevel);
setDisplayFrameSkip();
// start preview stream
if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) {
rc = startChannel(QCAMERA_CH_TYPE_ZSL);
} else if (isSecureMode()) {
if (mParameters.getSecureStreamType() == CAM_STREAM_TYPE_RAW && !isRdiMode()) {
rc = startChannel(QCAMERA_CH_TYPE_RAW);
}else {
rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
}
} else {
rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
=======>
rc = m_channels[ch_type]->start();
}
if (isDualCamera()) {
if (rc == NO_ERROR) {
mParameters.setDCDeferCamera(CAM_DEFER_PROCESS);
mParameters.setDCLowPowerMode(mActiveCameras);
} else {
mParameters.setDCDeferCamera(CAM_DEFER_FLUSH);
}
}
if ((msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) && (m_channels[QCAMERA_CH_TYPE_CALLBACK] != NULL)) {
rc = startChannel(QCAMERA_CH_TYPE_CALLBACK);
}
updatePostPreviewParameters();
m_stateMachine.setPreviewCallbackNeeded(true);
// if job id is non-zero, that means the postproc init job is already
// pending or complete
if (mInitPProcJob == 0&& !(isSecureMode() && isRdiMode())) {
mInitPProcJob = deferPPInit();
}
m_bPreviewStarted = true;
//configure snapshot skip for dual camera usecases
configureSnapshotSkip(true);
LOGI("X rc = %d", rc);
return rc;
}
在 m_channels[QCAMERA_CH_TYPE_PREVIEW]->start()
中,
int32_t QCameraChannel::start()
{
// there is more than one stream in the channel
// we need to notify mctl that all streams in this channel need to be bundled
for (size_t i = 0; i < mStreams.size(); i++) {
if ((mStreams[i] != NULL) &&(validate_handle(m_handle, mStreams[i]->getChannelHandle()))) {
mStreams[i]->setBundleInfo();
}
}
for (size_t i = 0; i < mStreams.size(); i++) {
if ((mStreams[i] != NULL) && (validate_handle(m_handle, mStreams[i]->getChannelHandle()))) {
mStreams[i]->start();
}
}
rc = m_camOps->start_channel(m_camHandle, m_handle);
m_bIsActive = true;
for (size_t i = 0; i < mStreams.size(); i++) {
if (mStreams[i] != NULL) {
mStreams[i]->cond_signal();
}
}
return rc;
}
可以看到 在代码中对每个stream 进行操作:
mStreams[i]->setBundleInfo();
mStreams[i]->start();
m_camOps->start_channel(m_camHandle, m_handle);
mStreams[i]->cond_signal();
我们先来看下有哪些 stream 。
可以看到 ,我们可能有这三个stream : CAM_STREAM_TYPE_METADATA
、CAM_STREAM_TYPE_PREVIEW
、CAM_STREAM_TYPE_RAW
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA, metadata_stream_cb_routine, this);
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,preview_stream_cb_routine, this);
pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW, synchronous_stream_cb_routine);
rc = addStreamToChannel(pChannel,CAM_STREAM_TYPE_RAW, preview_raw_stream_cb_routine,this);
@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
/* camera ops v-table */
static mm_camera_ops_t mm_camera_ops = {
.query_capability = mm_camera_intf_query_capability,
.register_event_notify = mm_camera_intf_register_event_notify,
.close_camera = mm_camera_intf_close,
.set_parms = mm_camera_intf_set_parms,
.get_parms = mm_camera_intf_get_parms,
.do_auto_focus = mm_camera_intf_do_auto_focus,
.cancel_auto_focus = mm_camera_intf_cancel_auto_focus,
.prepare_snapshot = mm_camera_intf_prepare_snapshot,
.start_zsl_snapshot = mm_camera_intf_start_zsl_snapshot,
.stop_zsl_snapshot = mm_camera_intf_stop_zsl_snapshot,
.map_buf = mm_camera_intf_map_buf,
.map_bufs = mm_camera_intf_map_bufs,
.unmap_buf = mm_camera_intf_unmap_buf,
.add_channel = mm_camera_intf_add_channel,
.delete_channel = mm_camera_intf_del_channel,
.get_bundle_info = mm_camera_intf_get_bundle_info,
.add_stream = mm_camera_intf_add_stream,
.link_stream = mm_camera_intf_link_stream,
.delete_stream = mm_camera_intf_del_stream,
.config_stream = mm_camera_intf_config_stream,
.qbuf = mm_camera_intf_qbuf,
.cancel_buffer = mm_camera_intf_cancel_buf,
.get_queued_buf_count = mm_camera_intf_get_queued_buf_count,
.map_stream_buf = mm_camera_intf_map_stream_buf,
.map_stream_bufs = mm_camera_intf_map_stream_bufs,
.unmap_stream_buf = mm_camera_intf_unmap_stream_buf,
.set_stream_parms = mm_camera_intf_set_stream_parms,
.get_stream_parms = mm_camera_intf_get_stream_parms,
.start_channel = mm_camera_intf_start_channel,
.stop_channel = mm_camera_intf_stop_channel,
.request_super_buf = mm_camera_intf_request_super_buf,
.cancel_super_buf_request = mm_camera_intf_cancel_super_buf_request,
.flush_super_buf_queue = mm_camera_intf_flush_super_buf_queue,
.configure_notify_mode = mm_camera_intf_configure_notify_mode,
.process_advanced_capture = mm_camera_intf_process_advanced_capture,
.get_session_id = mm_camera_intf_get_session_id,
.set_dual_cam_cmd = mm_camera_intf_set_dual_cam_cmd,
.flush = mm_camera_intf_flush,
.register_stream_buf_cb = mm_camera_intf_register_stream_buf_cb,
.register_frame_sync = mm_camera_intf_reg_frame_sync,
.handle_frame_sync_cb = mm_camera_intf_handle_frame_sync_cb
};
@ hardware/qcom/camera/QCamera2/HAL/QCameraStream.cpp
// set bundle for this stream to MCT
int32_t QCameraStream::setBundleInfo()
{
cam_stream_parm_buffer_t param, aux_param;
uint32_t active_handle = get_main_camera_handle(mChannelHandle);
=====>
@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
return mm_camera_util_get_handle_by_num(0, handle);
memset(&bundleInfo, 0, sizeof(bundleInfo));
if (active_handle) {
// 1. 检查当前 mCamHandle 是否有绑定 stream
ret = mCamOps->get_bundle_info(mCamHandle, active_handle,&bundleInfo);
=============================>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
+ rc = mm_camera_get_bundle_info(my_obj, m_chid, bundle_info);
+ ==============>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
+ rc = mm_channel_fsm_fn(ch_obj,MM_CHANNEL_EVT_GET_BUNDLE_INFO,(void *)bundle_info,NULL);
+ =================>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
+ rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
+ =================>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
+ case MM_CHANNEL_EVT_GET_BUNDLE_INFO:
+ cam_bundle_config_t *payload = (cam_bundle_config_t *)in_val;
+ rc = mm_channel_get_bundle_info(my_obj, payload);
+ ==================>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
+ s_obj = mm_channel_util_get_stream_by_handler(my_obj, my_obj->streams[i].my_hdl);
+ ================>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
+ for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+ if ((MM_STREAM_STATE_NOTUSED != ch_obj->streams[i].state) &&
+ (handler == ch_obj->streams[i].my_hdl)) {
+ s_obj = &ch_obj->streams[i];
+ break;
+ }
+ }
+ return s_obj;
<=============================
memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
param.type = CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO;
param.bundleInfo = bundleInfo;
}
mStreamInfo->parm_buf = param;
if ((aux_param.bundleInfo.num_of_streams > 1)&& (mStreamInfo->aux_str_info != NULL)) {
mStreamInfo->aux_str_info->parm_buf = aux_param;
}
if (mStreamInfo->parm_buf.bundleInfo.num_of_streams > 1) {
uint32_t channelHdl = get_main_camera_handle(mChannelHandle);
uint32_t streamHdl = get_main_camera_handle(mHandle);
if (mCamType == CAM_TYPE_AUX) {
channelHdl = mChannelHandle;
streamHdl = mHandle;
}
ret = mCamOps->set_stream_parms(mCamHandle, channelHdl, streamHdl, &mStreamInfo->parm_buf);
==================>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
+ rc = mm_camera_set_stream_parms(my_obj, chid, strid, parms);
+ ==============>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
+ rc = mm_channel_fsm_fn(ch_obj, MM_CHANNEL_EVT_SET_STREAM_PARM, (void *)&payload, NULL);
+ ============>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
+ rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
+ =============>
+ case MM_CHANNEL_EVT_GET_STREAM_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_channel_get_stream_parm(my_obj, payload);
+ ===============>
+ rc = mm_stream_fsm_fn(s_obj, MM_STREAM_EVT_GET_PARM,(void *)payload, NULL);
+ ============>
+ rc = mm_stream_fsm_inited(my_obj, evt, in_val, out_val);
+ =============>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
+ rc = mm_stream_get_parm(my_obj, payload->parms);
+ ============>
+ rc = mm_camera_util_g_ctrl(cam_obj, stream_id, my_obj->fd,
+ CAM_PRIV_STREAM_PARM, &value);
+ ==============>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
+ rc = ioctl(fd, VIDIOC_G_CTRL, &control);
+ LOGD("fd=%d, G_CTRL, id=0x%x, rc = %d\n", fd, id, rc);
+ if (value != NULL) {
+ *value = control.value;
+ }
+ ==============>
+ @ kernel/msm-4.4/drivers/media/v4l2-core/v4l2-subdev.c
+ static long subdev_do_ioctl(struct file *file, unsigned int cmd, void *arg)
+ {
+ case VIDIOC_G_CTRL:
+ return v4l2_g_ctrl(vfh->ctrl_handler, arg);
<=============================
}
}
初始化 mStreamMetaMemory 。
@ hardware/qcom/camera/QCamera2/HAL/QCameraStream.cpp
int32_t QCameraStream::start()
{
int32_t rc = 0;
mDataQ.init();
=============>
+ @ hardware/qcom/camera/QCamera2/util/QCameraQueue.cpp
+ m_active = true;
<=============
// 创建 frame线程 ,函数运行 dataProcRoutine
rc = mProcTh.launch(dataProcRoutine, this);
=============>
+ pthread_create(&cmd_pid, NULL, dataProcRoutine, user_data);
<=============
mCurMetaMemory = NULL;
mCurBufIndex = -1;
mCurMetaIndex = -1;
mFirstTimeStamp = 0;
memset (&mStreamMetaMemory, 0, (sizeof(MetaMemory) * CAMERA_MIN_VIDEO_BATCH_BUFFERS));
return rc;
}
void *QCameraStream::dataProcRoutine(void *data)
{
int running = 1;
QCameraStream *pme = (QCameraStream *)data;
QCameraCmdThread *cmdThread = &pme->mProcTh;
cmdThread->setName("CAM_strmDatProc");
LOGD("E");
do {
do { // 等待消息到来
ret = cam_sem_wait(&cmdThread->cmd_sem);
} while (ret != 0);
// we got notified about new cmd avail in cmd queue
camera_cmd_type_t cmd = cmdThread->getCmd();
switch (cmd) {
case CAMERA_CMD_TYPE_DO_NEXT_JOB:
{
LOGD("Do next job");
// 当有数据到来时,调用 QCameraStream的 callback 函数,将 frame 上报
mm_camera_super_buf_t *frame = (mm_camera_super_buf_t *)pme->mDataQ.dequeue();
if (NULL != frame) {
if (pme->mDataCB != NULL) {
pme->mDataCB(frame, pme, pme->mUserData);
} else {
// no data cb routine, return buf here
pme->bufDone(frame);
free(frame);
}
}
}
break;
case CAMERA_CMD_TYPE_EXIT:
LOGH("Exit");
/* flush data buf queue */
pme->mDataQ.flush();
running = 0;
break;
default:
break;
}
} while (running);
LOGH("X");
return NULL;
}
@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
/* camera ops v-table */
static mm_camera_ops_t mm_camera_ops = {
.start_channel = mm_camera_intf_start_channel,
}
@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
static int32_t mm_camera_intf_start_channel(uint32_t camera_handle,
uint32_t ch_id)
{
int32_t rc = -1;
mm_camera_obj_t * my_obj = NULL;
uint32_t chid = get_main_camera_handle(ch_id);
uint32_t aux_chid = get_aux_camera_handle(ch_id);
if (chid) {
uint32_t handle = get_main_camera_handle(camera_handle);
my_obj = mm_camera_util_get_camera_by_handler(handle);
if(my_obj) {
// 启动该 channel 中的所有 stream
rc = mm_camera_start_channel(my_obj, chid);
===================>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
+ rc = mm_channel_fsm_fn(ch_obj, MM_CHANNEL_EVT_START, NULL, NULL);
+ =============>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
+ case MM_CHANNEL_STATE_STOPPED:
+ rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
+ ==============>
+ @ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
+ case MM_CHANNEL_EVT_START:{
+ rc = mm_channel_start(my_obj);
+ if (0 == rc) {
+ my_obj->state = MM_CHANNEL_STATE_ACTIVE;
+ }
<===================
}
}
LOGH("X ch_id = %u rc = %d", ch_id, rc);
return rc;
}
在 mm_camera_channel.c
的 mm_channel_start()
中
@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
int32_t mm_channel_start(mm_channel_t *my_obj)
{
int32_t rc = 0;
int i = 0, j = 0;
mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
uint8_t num_streams_to_start = 0;
uint8_t num_streams_in_bundle_queue = 0;
mm_stream_t *s_obj = NULL;
int meta_stream_idx = 0;
cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
if (my_obj->streams[i].my_hdl > 0) {
s_obj = mm_channel_util_get_stream_by_handler(my_obj, my_obj->streams[i].my_hdl);
}
}
if (meta_stream_idx > 0 ) {
/* always start meta data stream first, so switch the stream object with the first one */
s_obj = s_objs[0];
s_objs[0] = s_objs[meta_stream_idx];
s_objs[meta_stream_idx] = s_obj;
}
if (NULL != my_obj->bundle.super_buf_notify_cb) {
/* need to send up cb, therefore launch thread */
/* init superbuf queue */
mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);
my_obj->bundle.superbuf_queue.num_streams = num_streams_in_bundle_queue;
my_obj->bundle.superbuf_queue.expected_frame_id = my_obj->bundle.superbuf_queue.attr.user_expected_frame_id;
my_obj->bundle.superbuf_queue.expected_frame_id_without_led = 0;
my_obj->bundle.superbuf_queue.led_off_start_frame_id = 0;
my_obj->bundle.superbuf_queue.led_on_start_frame_id = 0;
my_obj->bundle.superbuf_queue.led_on_num_frames = 0;
my_obj->bundle.superbuf_queue.good_frame_id = 0;
for (i = 0; i < num_streams_to_start; i++) {
/* Only bundle streams that belong to the channel */
if(!(s_objs[i]->stream_info->noFrameExpected) ||(s_objs[i]->is_frame_shared && (s_objs[i]->ch_obj == my_obj))) {
if (s_objs[i]->ch_obj == my_obj) {
/* set bundled flag to streams */
s_objs[i]->is_bundled = 1;
}
my_obj->bundle.superbuf_queue.bundled_streams[j] = s_objs[i]->my_hdl;
if (s_objs[i]->is_frame_shared && (s_objs[i]->ch_obj == my_obj)) {
mm_stream_t *dst_obj = NULL;
if (s_objs[i]->master_str_obj != NULL) {
dst_obj = s_objs[i]->master_str_obj;
} else if (s_objs[i]->aux_str_obj[0] != NULL) {
dst_obj = s_objs[i]->aux_str_obj[0];
}
if (dst_obj) {
my_obj->bundle.superbuf_queue.bundled_streams[j]|= dst_obj->my_hdl;
}
}
j++;
}
}
// 创建 pthread, 名为 "CAM_SuperBuf"
/* launch cb thread for dispatching super buf through cb */
snprintf(my_obj->cb_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBuf");
mm_camera_cmd_thread_launch(&my_obj->cb_thread, mm_channel_dispatch_super_buf,(void*)my_obj);
// 创建 pthread, 名为 "CAM_SuperBufCB", 用于处理 stream buf
/* launch cmd thread for super buf dataCB */
snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBufCB");
mm_camera_cmd_thread_launch(&my_obj->cmd_thread, mm_channel_process_stream_buf, (void*)my_obj);
===============>
pthread_create(&cmd_thread->cmd_pid, NULL, mm_channel_process_stream_buf, (void *)cmd_thread);
/* set flag to TRUE */
my_obj->bundle.is_active = TRUE;
}
/* link any streams first before starting the rest of the streams */
for (i = 0; i < num_streams_to_start; i++) {
if ((s_objs[i]->ch_obj != my_obj) && my_obj->bundle.is_active) {
pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
s_objs[i]->linked_stream->linked_obj = my_obj;
s_objs[i]->linked_stream->is_linked = 1;
pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
continue;
}
}
for (i = 0; i < num_streams_to_start; i++) {
if (s_objs[i]->ch_obj != my_obj) {
continue;
}
/* all streams within a channel should be started at the same time */
if (s_objs[i]->state == MM_STREAM_STATE_ACTIVE) {
LOGE("stream already started idx(%d)", i);
rc = -1;
break;
}
// 创建 streams 对应的 buff
/* allocate buf */
rc = mm_stream_fsm_fn(s_objs[i], MM_STREAM_EVT_GET_BUF, NULL, NULL);
// 注册 streams 对应的 buf
/* reg buf */
rc = mm_stream_fsm_fn(s_objs[i], MM_STREAM_EVT_REG_BUF, NULL, NULL);
// 开始 stream
/* start stream */
rc = mm_stream_fsm_fn(s_objs[i], MM_STREAM_EVT_START, NULL, NULL);
=================>
+ snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_StrmAppData");
+ mm_camera_cmd_thread_launch(&my_obj->cmd_thread, mm_stream_dispatch_app_data, (void *)my_obj);
+ my_obj->state = MM_STREAM_STATE_ACTIVE;
+ rc = mm_stream_streamon(my_obj);
<=================
}
my_obj->bWaitForPrepSnapshotDone = 0;
if (my_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
LOGH("registering Channel obj %p", my_obj);
mm_frame_sync_register_channel(my_obj);
}
return rc;
}
@ hardware/qcom/camera/QCamera2/HAL/QCameraStream.cpp
void QCameraStream::cond_signal(bool forceExit)
{
pthread_mutex_lock(&m_lock);
if(wait_for_cond == TRUE){
wait_for_cond = FALSE;
if (forceExit) {
mNumBufsNeedAlloc = 0;
}
pthread_cond_signal(&m_cond);
}
pthread_mutex_unlock(&m_lock);
}
在 Qcamearastream 中
主要是创建这三个stream , CAM_STREAM_TYPE_METADATA
、CAM_STREAM_TYPE_PREVIEW
、CAM_STREAM_TYPE_RAW
接着在各自的stream 中,注册对应的 pthread 线程,处理数据,
数据处理好后,调用对 应的callback 函数,将frame 数据上报,最终把数上报到上层。
在CAM_STREAM_TYPE_RAW
中,有点不同,在该线程中,是不停的将数据以文件 .yuv 的形式保存下来。
@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp
/*===========================================================================
* FUNCTION : metadata_stream_cb_routine
*
* DESCRIPTION: helper function to handle metadata frame from metadata stream
*
* PARAMETERS :
* @super_frame : received super buffer
* @stream : stream object
* @userdata : user data ptr
*
* RETURN : None
*
* NOTE : caller passes the ownership of super_frame, it's our
* responsibility to free super_frame once it's done. Metadata
* could have valid entries for face detection result or
* histogram statistics information.
*==========================================================================*/
void QCamera2HardwareInterface::metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,
QCameraStream * stream, void * userdata)
{
ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_METADATA_STRM_CB);
LOGD("[KPI Perf] : BEGIN");
QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
mm_camera_buf_def_t *frame = super_frame->bufs[0];
metadata_buffer_t *pMetaData = (metadata_buffer_t *)frame->buffer;
if (pme->isDualCamera()) {
mm_camera_buf_def_t *frameMain = NULL;
mm_camera_buf_def_t *frameAux = NULL;
metadata_buffer_t *pMetaDataMain = NULL;
metadata_buffer_t *pMetaDataAux = NULL;
metadata_buffer_t *resultMetadata = NULL;
if (super_frame->num_bufs == MM_CAMERA_MAX_CAM_CNT) {
if (get_main_camera_handle(super_frame->bufs[0]->stream_id)) {
frameMain = super_frame->bufs[0];
frameAux = super_frame->bufs[1];
} else {
frameMain = super_frame->bufs[1];
frameAux = super_frame->bufs[0];
}
pMetaDataMain = (metadata_buffer_t *)frameMain->buffer;
pMetaDataAux = (metadata_buffer_t *)frameAux->buffer;
} else {
if (super_frame->camera_handle ==
get_main_camera_handle(pme->mCameraHandle->camera_handle)) {
pMetaDataMain = pMetaData;
pMetaDataAux = NULL;
} else if (super_frame->camera_handle ==
get_aux_camera_handle(pme->mCameraHandle->camera_handle)) {
pMetaDataMain = NULL;
pMetaDataAux = pMetaData;
}
}
//Wait for first preview frame to process Dual fov control
LOGD("pme->m_bFirstPreviewFrameReceived: %d", pme->m_bFirstPreviewFrameReceived);
// skip fillDualCameraFOVControl till HAL receives first preview frame
if (pme->m_bFirstPreviewFrameReceived) {
pme->fillDualCameraFOVControl();
}
if (pMetaDataAux && (pme->mParameters.getHalPPType() == CAM_HAL_PP_TYPE_BOKEH)) {
//Handle Tele metadata for sending bokeh messages to app
IF_META_AVAILABLE(cam_rtb_msg_type_t, rtb_metadata,
CAM_INTF_META_RTB_DATA, pMetaDataAux) {
//Handle this meta data only if capture is not in process
if (!pme->m_stateMachine.isCaptureRunning()) {
qcamera_sm_internal_evt_payload_t *payload =
(qcamera_sm_internal_evt_payload_t *)
malloc(sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_RTB_METADATA;
payload->rtb_data = *rtb_metadata;
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
if (rc != NO_ERROR) {
LOGW("processEvt rtb update failed");
free(payload);
payload = NULL;
}
} else {
LOGE("No memory for rtb update qcamera_sm_internal_evt_payload_t");
}
}
}
}
resultMetadata = pme->m_pFovControl->processResultMetadata(pMetaDataMain, pMetaDataAux);
if (resultMetadata != NULL) {
pMetaData = resultMetadata;
} else {
stream->bufDone(super_frame);
free(super_frame);
return;
}
}
if(pme->m_stateMachine.isNonZSLCaptureRunning()&&
!pme->mLongshotEnabled) {
//Make shutter call back in non ZSL mode once raw frame is received from VFE.
pme->playShutter();
}
if (pMetaData->is_tuning_params_valid && pme->mParameters.getRecordingHintValue() == true) {
//Dump Tuning data for video
pme->dumpMetadataToFile(stream,frame,(char *)"Video");
}
IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, pMetaData) {
// process histogram statistics info
qcamera_sm_internal_evt_payload_t *payload =
(qcamera_sm_internal_evt_payload_t *)
malloc(sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS;
payload->stats_data = *stats_data;
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
if (rc != NO_ERROR) {
LOGW("processEvt histogram failed");
free(payload);
payload = NULL;
}
} else {
LOGE("No memory for histogram qcamera_sm_internal_evt_payload_t");
}
}
IF_META_AVAILABLE(cam_face_detection_data_t, detection_data,
CAM_INTF_META_FACE_DETECTION, pMetaData) {
cam_faces_data_t faces_data;
pme->fillFacesData(faces_data, pMetaData);
faces_data.detection_data.fd_type = QCAMERA_FD_PREVIEW; //HARD CODE here before MCT can support
qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
malloc(sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
payload->faces_data = faces_data;
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
}
}
IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, pMetaData) {
uint8_t forceAFUpdate = FALSE;
//1. Earlier HAL used to rely on AF done flags set in metadata to generate callbacks to
//upper layers. But in scenarios where metadata drops especially which contain important
//AF information, APP will wait indefinitely for focus result resulting in capture hang.
//2. HAL can check for AF state transitions to generate AF state callbacks to upper layers.
//This will help overcome metadata drop issue with the earlier approach.
//3. But sometimes AF state transitions can happen so fast within same metadata due to
//which HAL will receive only the final AF state. HAL may perceive this as no change in AF
//state depending on the state transitions happened (for example state A -> B -> A).
//4. To overcome the drawbacks of both the approaches, we go for a hybrid model in which
//we check state transition at both HAL level and AF module level. We rely on
//'state transition' meta field set by AF module for the state transition detected by it.
IF_META_AVAILABLE(uint8_t, stateChange, CAM_INTF_AF_STATE_TRANSITION, pMetaData) {
forceAFUpdate = *stateChange;
}
//This is a special scenario in which when scene modes like landscape are selected, AF mode
//gets changed to INFINITY at backend, but HAL will not be aware of it. Also, AF state in
//such cases will be set to CAM_AF_STATE_INACTIVE by backend. So, detect the AF mode
//change here and trigger AF callback @ processAutoFocusEvent().
IF_META_AVAILABLE(uint32_t, afFocusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
if (((cam_focus_mode_type)(*afFocusMode) == CAM_FOCUS_MODE_INFINITY) &&
pme->mActiveAF){
forceAFUpdate = TRUE;
}
}
if ((pme->m_currentFocusState != (*afState)) || forceAFUpdate) {
cam_af_state_t prevFocusState = pme->m_currentFocusState;
pme->m_currentFocusState = (cam_af_state_t)(*afState);
qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
malloc(sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_UPDATE;
payload->focus_data.focus_state = (cam_af_state_t)(*afState);
//Need to flush ZSL Q only if we are transitioning from scanning state
//to focused/not focused state.
payload->focus_data.flush_info.needFlush =
((prevFocusState == CAM_AF_STATE_PASSIVE_SCAN) ||
(prevFocusState == CAM_AF_STATE_ACTIVE_SCAN)) &&
((pme->m_currentFocusState == CAM_AF_STATE_FOCUSED_LOCKED) ||
(pme->m_currentFocusState == CAM_AF_STATE_NOT_FOCUSED_LOCKED));
payload->focus_data.flush_info.focused_frame_idx = frame->frame_idx;
IF_META_AVAILABLE(float, focusDistance,
CAM_INTF_META_LENS_FOCUS_DISTANCE, pMetaData) {
payload->focus_data.focus_dist.
focus_distance[CAM_FOCUS_DISTANCE_OPTIMAL_INDEX] = *focusDistance;
}
IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, pMetaData) {
payload->focus_data.focus_dist.
focus_distance[CAM_FOCUS_DISTANCE_NEAR_INDEX] = focusRange[0];
payload->focus_data.focus_dist.
focus_distance[CAM_FOCUS_DISTANCE_FAR_INDEX] = focusRange[1];
}
IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
payload->focus_data.focus_mode = (cam_focus_mode_type)(*focusMode);
}
IF_META_AVAILABLE(uint8_t, isDepthFocus,
CAM_INTF_META_FOCUS_DEPTH_INFO, pMetaData) {
payload->focus_data.isDepth = *isDepthFocus;
}
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
}
}
}
IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, pMetaData) {
if (crop_data->num_of_streams > MAX_NUM_STREAMS) {
LOGE("Invalid num_of_streams %d in crop_data",
crop_data->num_of_streams);
} else {
qcamera_sm_internal_evt_payload_t *payload =
(qcamera_sm_internal_evt_payload_t *)
malloc(sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_CROP_INFO;
payload->crop_data = *crop_data;
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
}
}
}
IF_META_AVAILABLE(int32_t, prep_snapshot_done_state,CAM_INTF_META_PREP_SNAPSHOT_DONE, pMetaData) {
qcamera_sm_internal_evt_payload_t *payload =
(qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE;
payload->prep_snapshot_state = (cam_prep_snapshot_state_t)*prep_snapshot_done_state;
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
}
}
IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data, CAM_INTF_META_ASD_HDR_SCENE_DATA, pMetaData) {
LOGH("hdr_scene_data: %d %f\n",hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
//Handle this HDR meta data only if capture is not in process
if (!pme->m_stateMachine.isCaptureRunning()) {
qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
malloc(sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_HDR_UPDATE;
payload->hdr_data = *hdr_scene_data;
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
}
}
}
IF_META_AVAILABLE(cam_asd_decision_t, cam_asd_info,CAM_INTF_META_ASD_SCENE_INFO, pMetaData) {
qcamera_sm_internal_evt_payload_t *payload =
(qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_ASD_UPDATE;
payload->asd_data = (cam_asd_decision_t)*cam_asd_info;
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
}
}
IF_META_AVAILABLE(cam_awb_params_t, awb_params, CAM_INTF_META_AWB_INFO, pMetaData) {
LOGH(", metadata for awb params.");
qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
malloc(sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_AWB_UPDATE;
payload->awb_data = *awb_params;
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
} else {
LOGE("No memory for awb_update qcamera_sm_internal_evt_payload_t");
}
}
IF_META_AVAILABLE(uint32_t, flash_mode, CAM_INTF_META_FLASH_MODE, pMetaData) {
pme->mExifParams.sensor_params.flash_mode = (cam_flash_mode_t)*flash_mode;
}
IF_META_AVAILABLE(int32_t, flash_state, CAM_INTF_META_FLASH_STATE, pMetaData) {
pme->mExifParams.sensor_params.flash_state = (cam_flash_state_t) *flash_state;
}
IF_META_AVAILABLE(float, aperture_value, CAM_INTF_META_LENS_APERTURE, pMetaData) {
pme->mExifParams.sensor_params.aperture_value = *aperture_value;
}
IF_META_AVAILABLE(cam_3a_params_t, ae_params, CAM_INTF_META_AEC_INFO, pMetaData) {
pme->mExifParams.cam_3a_params = *ae_params;
pme->mExifParams.cam_3a_params_valid = TRUE;
pme->mFlashNeeded = ae_params->flash_needed;
pme->mExifParams.cam_3a_params.brightness = (float) pme->mParameters.getBrightness();
qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
malloc(sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_AE_UPDATE;
payload->ae_data = *ae_params;
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
}
}
IF_META_AVAILABLE(int32_t, wb_mode, CAM_INTF_PARM_WHITE_BALANCE, pMetaData) {
pme->mExifParams.cam_3a_params.wb_mode = (cam_wb_mode_type) *wb_mode;
}
IF_META_AVAILABLE(cam_sensor_params_t, sensor_params, CAM_INTF_META_SENSOR_INFO, pMetaData) {
pme->mExifParams.sensor_params = *sensor_params;
}
IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params, CAM_INTF_META_EXIF_DEBUG_AE, pMetaData) {
if (pme->mExifParams.debug_params) {
pme->mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
pme->mExifParams.debug_params->ae_debug_params_valid = TRUE;
}
}
IF_META_AVAILABLE(cam_awb_exif_debug_t, awb_exif_debug_params, CAM_INTF_META_EXIF_DEBUG_AWB, pMetaData) {
if (pme->mExifParams.debug_params) {
pme->mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
pme->mExifParams.debug_params->awb_debug_params_valid = TRUE;
}
}
IF_META_AVAILABLE(cam_af_exif_debug_t, af_exif_debug_params, CAM_INTF_META_EXIF_DEBUG_AF, pMetaData) {
if (pme->mExifParams.debug_params) {
pme->mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
pme->mExifParams.debug_params->af_debug_params_valid = TRUE;
}
}
IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params, CAM_INTF_META_EXIF_DEBUG_ASD, pMetaData) {
if (pme->mExifParams.debug_params) {
pme->mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
pme->mExifParams.debug_params->asd_debug_params_valid = TRUE;
}
}
IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t, stats_exif_debug_params,
CAM_INTF_META_EXIF_DEBUG_STATS, pMetaData) {
if (pme->mExifParams.debug_params) {
pme->mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
pme->mExifParams.debug_params->stats_debug_params_valid = TRUE;
}
}
IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t, bestats_exif_debug_params,
CAM_INTF_META_EXIF_DEBUG_BESTATS, pMetaData) {
if (pme->mExifParams.debug_params) {
pme->mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
pme->mExifParams.debug_params->bestats_debug_params_valid = TRUE;
}
}
IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
CAM_INTF_META_EXIF_DEBUG_BHIST, pMetaData) {
if (pme->mExifParams.debug_params) {
pme->mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
pme->mExifParams.debug_params->bhist_debug_params_valid = TRUE;
}
}
IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
CAM_INTF_META_EXIF_DEBUG_3A_TUNING, pMetaData) {
if (pme->mExifParams.debug_params) {
pme->mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
pme->mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
}
}
IF_META_AVAILABLE(uint32_t, led_mode, CAM_INTF_META_LED_MODE_OVERRIDE, pMetaData) {
qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
malloc(sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE;
payload->led_data = (cam_flash_mode_t)*led_mode;
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
}
}
cam_edge_application_t edge_application;
memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
edge_application.sharpness = pme->mParameters.getSharpness();
if (edge_application.sharpness != 0) {
edge_application.edge_mode = CAM_EDGE_MODE_FAST;
} else {
edge_application.edge_mode = CAM_EDGE_MODE_OFF;
}
ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_META_EDGE_MODE, edge_application);
IF_META_AVAILABLE(cam_focus_pos_info_t, cur_pos_info,
CAM_INTF_META_FOCUS_POSITION, pMetaData) {
qcamera_sm_internal_evt_payload_t *payload =
(qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE;
payload->focus_pos = *cur_pos_info;
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
}
}
if (pme->mParameters.getLowLightCapture()) {
IF_META_AVAILABLE(cam_low_light_mode_t, low_light_level,CAM_INTF_META_LOW_LIGHT, pMetaData) {
pme->mParameters.setLowLightLevel(*low_light_level);
}
}
IF_META_AVAILABLE(cam_dyn_img_data_t, dyn_img_data,CAM_INTF_META_IMG_DYN_FEAT, pMetaData) {
pme->mParameters.setDynamicImgData(*dyn_img_data);
}
IF_META_AVAILABLE(int32_t, touch_ae_status, CAM_INTF_META_TOUCH_AE_RESULT, pMetaData) {
LOGD("touch_ae_status: %d", *touch_ae_status);
}
IF_META_AVAILABLE(int32_t, led_result, CAM_INTF_META_LED_CALIB_RESULT, pMetaData) {
qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)malloc(
sizeof(qcamera_sm_internal_evt_payload_t));
if (NULL != payload) {
memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
payload->evt_type = QCAMERA_INTERNAL_EVT_LED_CALIB_UPDATE;
payload->led_calib_result = (int32_t)*led_result;
int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
}
}
stream->bufDone(super_frame);
free(super_frame);
LOGD("[KPI Perf] : END");
}
@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp
/*===========================================================================
* FUNCTION : preview_stream_cb_routine
* DESCRIPTION: helper function to handle preview frame from preview stream in
* normal case with display.
* PARAMETERS :
* @super_frame : received super buffer
* @stream : stream object
* @userdata : user data ptr
* RETURN : None
* NOTE : caller passes the ownership of super_frame, it's our
* responsibility to free super_frame once it's done. The new
* preview frame will be sent to display, and an older frame
* will be dequeued from display and needs to be returned back
* to kernel for future use.
*==========================================================================*/
void QCamera2HardwareInterface::preview_stream_cb_routine(mm_camera_super_buf_t *super_frame, QCameraStream * stream, void *userdata)
{
CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PREVIEW_STRM_CB);
LOGH("[KPI Perf] : BEGIN");
int err = NO_ERROR;
QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
uint8_t dequeueCnt = 0;
mm_camera_buf_def_t *dumpBuffer = NULL;
mm_camera_buf_def_t *frame = super_frame->bufs[0];
// For instant capture and for instant AEC, keep track of the frame counter.
// This count will be used to check against the corresponding bound values.
if (pme->mParameters.isInstantAECEnabled() || pme->mParameters.isInstantCaptureEnabled()) {
pme->mInstantAecFrameCount++;
}
pthread_mutex_lock(&pme->mGrallocLock);
if (!stream->isSyncCBEnabled()) {
pme->mLastPreviewFrameID = frame->frame_idx;
}
bool discardFrame = false;
if (!stream->isSyncCBEnabled() && !pme->needProcessPreviewFrame(frame->frame_idx))
{
discardFrame = true;
} else if (stream->isSyncCBEnabled() && memory->isBufSkipped(frame->buf_idx)) {
discardFrame = true;
memory->setBufferStatus(frame->buf_idx, STATUS_IDLE);
}
pthread_mutex_unlock(&pme->mGrallocLock);
if (discardFrame) {
LOGH("preview is not running, no need to process");
stream->bufDone(frame->buf_idx);
}
uint32_t idx = frame->buf_idx;
if(pme->m_bPreviewStarted) {
LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
pme->m_perfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
pme->m_perfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
pme->m_bPreviewStarted = false;
pme->m_bFirstPreviewFrameReceived = true;
// Set power Hint for preview
pme->m_perfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_PREVIEW, 0);
}
if (!stream->isSyncCBEnabled() && !discardFrame) {
if (pme->needDebugFps()) {
pme->debugShowPreviewFPS();
}
memory->setBufferStatus(frame->buf_idx, STATUS_ACTIVE);
LOGD("Enqueue Buffer to display %d", idx);
#ifdef TARGET_TS_MAKEUP
pme->TsMakeupProcess_Preview(frame,stream);
#endif
err = memory->enqueueBuffer(idx);
if (err == NO_ERROR) {
pthread_mutex_lock(&pme->mGrallocLock);
pme->mEnqueuedBuffers++;
dequeueCnt = pme->mEnqueuedBuffers;
pthread_mutex_unlock(&pme->mGrallocLock);
}
} else {
pthread_mutex_lock(&pme->mGrallocLock);
dequeueCnt = pme->mEnqueuedBuffers;
pthread_mutex_unlock(&pme->mGrallocLock);
}
uint8_t numMapped = memory->getMappable();
LOGD("EnqueuedCnt %d numMapped %d", dequeueCnt, numMapped);
for (uint8_t i = 0; i < dequeueCnt; i++) {
int dequeuedIdx = memory->dequeueBuffer();
LOGD("dequeuedIdx %d numMapped %d Loop running for %d", dequeuedIdx, numMapped, i);
if (dequeuedIdx < 0 || dequeuedIdx >= memory->getCnt()) {
LOGE("Invalid dequeued buffer index %d from display",
dequeuedIdx);
break;
} else {
pthread_mutex_lock(&pme->mGrallocLock);
pme->mEnqueuedBuffers--;
pthread_mutex_unlock(&pme->mGrallocLock);
if (dequeuedIdx >= numMapped) {
// This buffer has not yet been mapped to the backend
err = stream->mapNewBuffer((uint32_t)dequeuedIdx);
if (memory->checkIfAllBuffersMapped()) {
// check if mapping is done for all the buffers
// Signal the condition for create jpeg session
Mutex::Autolock l(pme->mMapLock);
pme->mMapCond.signal();
LOGH("Mapping done for all bufs");
} else {
LOGH("All buffers are not yet mapped");
}
}
}
// Get the updated mappable buffer count since it's modified in dequeueBuffer()
numMapped = memory->getMappable();
if (err < 0) {
LOGE("buffer mapping failed %d", err);
} else {
// Dump preview frames
if (memory->isBufActive(dequeuedIdx)) {
dumpBuffer = stream->getBuffer(dequeuedIdx);
pme->dumpFrameToFile(stream, dumpBuffer, QCAMERA_DUMP_FRM_PREVIEW);
}
// Handle preview data callback
if (pme->m_channels[QCAMERA_CH_TYPE_CALLBACK] == NULL) {
if (pme->needSendPreviewCallback() &&
memory->isBufActive(dequeuedIdx) &&
(!pme->mParameters.isSceneSelectionEnabled())) {
frame->cache_flags |= CPU_HAS_READ;
int32_t rc = pme->sendPreviewCallback(stream, memory, dequeuedIdx);
if (NO_ERROR != rc) {
LOGW("Preview callback was not sent succesfully");
}
}
}
// Return dequeued buffer back to driver
err = stream->bufDone((uint32_t)dequeuedIdx);
if ( err < 0) {
LOGW("stream bufDone failed %d", err);
err = NO_ERROR;
}
memory->setBufferStatus(dequeuedIdx, STATUS_IDLE);
}
}
free(super_frame);
LOGH("[KPI Perf] : END");
return;
}
@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp
/*===========================================================================
* FUNCTION : preview_raw_stream_cb_routine
*
* DESCRIPTION: helper function to handle raw frame during standard preview
*
* PARAMETERS :
* @super_frame : received super buffer
* @stream : stream object
* @userdata : user data ptr
*
* RETURN : None
*
* NOTE : caller passes the ownership of super_frame, it's our
* responsibility to free super_frame once it's done.
*==========================================================================*/
void QCamera2HardwareInterface::preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
QCameraStream * stream,
void * userdata)
{
ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PREVIEW_RAW_STRM_CB);
LOGH("[KPI Perf] : BEGIN");
char value[PROPERTY_VALUE_MAX];
bool dump_preview_raw = false, dump_video_raw = false;
QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
if (pme == NULL ||
pme->mCameraHandle == NULL ||
!validate_handle(pme->mCameraHandle->camera_handle,
super_frame->camera_handle)){
LOGE("camera obj not valid");
// simply free super frame
free(super_frame);
return;
}
mm_camera_buf_def_t *raw_frame = super_frame->bufs[0];
if (raw_frame != NULL) {
property_get("persist.camera.preview_raw", value, "0");
dump_preview_raw = atoi(value) > 0 ? true : false;
property_get("persist.camera.video_raw", value, "0");
dump_video_raw = atoi(value) > 0 ? true : false;
if (dump_preview_raw || (pme->mParameters.getRecordingHintValue()
&& dump_video_raw)) {
pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
}
stream->bufDone(raw_frame->buf_idx);
}
free(super_frame);
LOGH("[KPI Perf] : END");
}
@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
/*===========================================================================
* FUNCTION : mm_stream_dispatch_app_data
*
* DESCRIPTION: dispatch stream buffer to registered users
*
* PARAMETERS :
* @cmd_cb : ptr storing stream buffer information
* @userdata: user data ptr (stream object)
*
* RETURN : none
*==========================================================================*/
static void mm_stream_dispatch_app_data(mm_camera_cmdcb_t *cmd_cb,
void* user_data)
{
int i;
mm_stream_t * my_obj = (mm_stream_t *)user_data;
mm_camera_buf_info_t* buf_info = NULL;
mm_camera_super_buf_t super_buf;
mm_stream_t *m_obj = my_obj;
if (NULL == my_obj) {
return;
}
LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
my_obj->my_hdl, my_obj->fd, my_obj->state);
if (MM_CAMERA_CMD_TYPE_DATA_CB != cmd_cb->cmd_type) {
LOGE("Wrong cmd_type (%d) for dataCB",
cmd_cb->cmd_type);
return;
}
buf_info = &cmd_cb->u.buf;
memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
super_buf.num_bufs = 1;
super_buf.bufs[0] = buf_info->buf;
super_buf.camera_handle = my_obj->ch_obj->cam_obj->my_hdl;
super_buf.ch_id = my_obj->ch_obj->my_hdl;
if (m_obj->master_str_obj != NULL) {
m_obj = m_obj->master_str_obj;
}
pthread_mutex_lock(&m_obj->frame_sync.sync_lock);
LOGD("frame_sync.is_active = %d, is_cb_active = %d",
m_obj->frame_sync.is_active, my_obj->is_cb_active);
if (m_obj->frame_sync.is_active) {
pthread_mutex_lock(&my_obj->buf_lock);
my_obj->buf_status[buf_info->buf->buf_idx].buf_refcnt++;
pthread_mutex_unlock(&my_obj->buf_lock);
mm_camera_muxer_stream_frame_sync(&super_buf, my_obj);
} else if (my_obj->is_cb_active) {
pthread_mutex_lock(&my_obj->cb_lock);
for(i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
if(NULL != my_obj->buf_cb[i].cb
&& (my_obj->buf_cb[i].cb_type !=
MM_CAMERA_STREAM_CB_TYPE_SYNC)) {
if (my_obj->buf_cb[i].cb_count != 0) {
/* if <0, means infinite CB
* if >0, means CB for certain times
* both case we need to call CB */
/* increase buf ref cnt */
pthread_mutex_lock(&my_obj->buf_lock);
my_obj->buf_status[buf_info->buf->buf_idx].buf_refcnt++;
pthread_mutex_unlock(&my_obj->buf_lock);
/* callback */
my_obj->buf_cb[i].cb(&super_buf,
my_obj->buf_cb[i].user_data);
}
/* if >0, reduce count by 1 every time we called CB until reaches 0
* when count reach 0, reset the buf_cb to have no CB */
if (my_obj->buf_cb[i].cb_count > 0) {
my_obj->buf_cb[i].cb_count--;
if (0 == my_obj->buf_cb[i].cb_count) {
my_obj->buf_cb[i].cb = NULL;
my_obj->buf_cb[i].user_data = NULL;
}
}
}
}
pthread_mutex_unlock(&my_obj->cb_lock);
}
pthread_mutex_unlock(&m_obj->frame_sync.sync_lock);
/* do buf_done since we increased refcnt by one when has_cb */
mm_stream_buf_done(my_obj, buf_info->buf);
}
当有数据到达时,会回调
# /hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
/*===========================================================================
* FUNCTION : mm_stream_data_notify
*
* DESCRIPTION: callback to handle data notify from kernel
*
* PARAMETERS :
* @user_data : user data ptr (stream object)
*
* RETURN : none
*==========================================================================*/
static void mm_stream_data_notify(void* user_data)
{
mm_stream_t *my_obj = (mm_stream_t*)user_data;
uint8_t has_cb = 0, length = 0;
mm_camera_buf_info_t buf_info;
LOGD("E, my_handle = 0x%x, fd = %d, state = %d", my_obj->my_hdl, my_obj->fd, my_obj->state);
if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {
length = 1;
} else {
length = my_obj->frame_offset.num_planes;
}
memset(&buf_info, 0, sizeof(mm_camera_buf_info_t));
rc = mm_stream_read_msm_frame(my_obj, &buf_info,(uint8_t)length);
uint32_t idx = buf_info.buf->buf_idx;
pthread_mutex_lock(&my_obj->cb_lock);
for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
if(NULL != my_obj->buf_cb[i].cb) {
if (my_obj->buf_cb[i].cb_type == MM_CAMERA_STREAM_CB_TYPE_SYNC) {
/*For every SYNC callback, send data*/
mm_stream_dispatch_sync_data(my_obj, &my_obj->buf_cb[i], &buf_info);
} else {
/* for every ASYNC CB, need ref count */
has_cb = 1;
}
}
}
pthread_mutex_unlock(&my_obj->cb_lock);
pthread_mutex_lock(&my_obj->buf_lock);
/* update buffer location */
my_obj->buf_status[idx].in_kernel = 0;
/* update buf ref count */
if (my_obj->is_bundled) {
/* need to add into super buf since bundled, add ref count */
my_obj->buf_status[idx].buf_refcnt++;
}
my_obj->buf_status[idx].buf_refcnt =
(uint8_t)(my_obj->buf_status[idx].buf_refcnt + has_cb);
pthread_mutex_unlock(&my_obj->buf_lock);
mm_stream_handle_rcvd_buf(my_obj, &buf_info, has_cb);
}
在 mm_stream_read_msm_frame 中
int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
mm_camera_buf_info_t* buf_info,
uint8_t num_planes)
{
int32_t rc = 0;
struct v4l2_buffer vb;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
LOGD("E, my_handle = 0x%x, fd = %d, state = %d", my_obj->my_hdl, my_obj->fd, my_obj->state);
memset(&vb, 0, sizeof(vb));
vb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
vb.memory = V4L2_MEMORY_USERPTR;
vb.m.planes = &planes[0];
vb.length = num_planes;
rc = ioctl(my_obj->fd, VIDIOC_DQBUF, &vb);
if (0 > rc) {
LOGE("VIDIOC_DQBUF ioctl call failed on stream type %d (rc=%d): %s",
my_obj->stream_info->stream_type, rc, strerror(errno));
} else {
pthread_mutex_lock(&my_obj->buf_lock);
my_obj->queued_buffer_count--;
if (0 == my_obj->queued_buffer_count) {
LOGH("Stoping poll on stream %p type: %d",
my_obj, my_obj->stream_info->stream_type);
mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
my_obj->my_hdl, mm_camera_async_call);
LOGH("Stopped poll on stream %p type: %d",
my_obj, my_obj->stream_info->stream_type);
}
pthread_mutex_unlock(&my_obj->buf_lock);
uint32_t idx = vb.index;
buf_info->buf = &my_obj->buf[idx];
buf_info->frame_idx = vb.sequence;
buf_info->stream_id = my_obj->my_hdl;
buf_info->buf->stream_id = my_obj->my_hdl;
buf_info->buf->buf_idx = idx;
buf_info->buf->frame_idx = vb.sequence;
buf_info->buf->ts.tv_sec = vb.timestamp.tv_sec;
buf_info->buf->ts.tv_nsec = vb.timestamp.tv_usec * 1000;
buf_info->buf->flags = vb.flags;
LOGH("VIDIOC_DQBUF buf_index %d, frame_idx %d, stream type %d, rc %d,"
"queued: %d, buf_type = %d flags = %d",
vb.index, buf_info->buf->frame_idx,
my_obj->stream_info->stream_type, rc,
my_obj->queued_buffer_count, buf_info->buf->buf_type,
buf_info->buf->flags);
buf_info->buf->is_uv_subsampled =
(vb.reserved == V4L2_PIX_FMT_NV14 || vb.reserved == V4L2_PIX_FMT_NV41);
if(buf_info->buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
mm_stream_read_user_buf(my_obj, buf_info);
}
if ( NULL != my_obj->mem_vtbl.clean_invalidate_buf ) {
rc = my_obj->mem_vtbl.clean_invalidate_buf(idx, my_obj->mem_vtbl.user_data);
}
}
LOGD("X rc = %d",rc);
return rc;
}
在 mm_stream_read_user_buf 中:
int32_t mm_stream_read_user_buf(mm_stream_t * my_obj, mm_camera_buf_info_t* buf_info)
{
int32_t rc = 0, i;
mm_camera_buf_def_t *stream_buf = NULL;
struct msm_camera_user_buf_cont_t *user_buf = NULL;
nsecs_t interval_nsec = 0, frame_ts = 0, timeStamp = 0;
int ts_delta = 0;
uint32_t frameID = 0;
user_buf = (struct msm_camera_user_buf_cont_t *)buf_info->buf->buffer;
if (buf_info->buf->frame_idx == 1) {
frameID = buf_info->buf->frame_idx;
}else {
frameID = (buf_info->buf->frame_idx - 1) * user_buf->buf_cnt;
}
timeStamp = (nsecs_t)(buf_info->buf->ts.tv_sec) * 1000000000LL + buf_info->buf->ts.tv_nsec;
if (timeStamp <= my_obj->prev_timestamp) {
LOGE("TimeStamp received less than expected");
mm_stream_qbuf(my_obj, buf_info->buf);
return rc;
} else if (my_obj->prev_timestamp == 0 || (my_obj->prev_frameID != buf_info->buf->frame_idx + 1)) {
/* For first frame or incase batch is droped */
interval_nsec = ((my_obj->stream_info->user_buf_info.frameInterval) * 1000000);
my_obj->prev_timestamp = (timeStamp - (nsecs_t)(user_buf->buf_cnt * interval_nsec));
} else {
ts_delta = timeStamp - my_obj->prev_timestamp;
interval_nsec = (nsecs_t)(ts_delta / user_buf->buf_cnt);
LOGD("Timestamp delta = %d timestamp = %lld", ts_delta, timeStamp);
}
for (i = 0; i < (int32_t)user_buf->buf_cnt; i++) {
buf_info->buf->user_buf.buf_idx[i] = user_buf->buf_idx[i];
stream_buf = &my_obj->plane_buf[user_buf->buf_idx[i]];
stream_buf->frame_idx = frameID + i;
frame_ts = (i * interval_nsec) + my_obj->prev_timestamp;
stream_buf->ts.tv_sec = (frame_ts / 1000000000LL);
stream_buf->ts.tv_nsec = (frame_ts - (stream_buf->ts.tv_sec * 1000000000LL));
stream_buf->is_uv_subsampled = buf_info->buf->is_uv_subsampled;
LOGD("buf_index %d, frame_idx %d, stream type %d, timestamp = %lld",
stream_buf->buf_idx, stream_buf->frame_idx, my_obj->stream_info->stream_type, frame_ts);
}
buf_info->buf->ts.tv_sec = (my_obj->prev_timestamp / 1000000000LL);
buf_info->buf->ts.tv_nsec = (my_obj->prev_timestamp - (buf_info->buf->ts.tv_sec * 1000000000LL));
buf_info->buf->user_buf.bufs_used = user_buf->buf_cnt;
buf_info->buf->user_buf.buf_in_use = 1;
my_obj->prev_timestamp = timeStamp;
my_obj->prev_frameID = buf_info->buf->frame_idx;
LOGD("X rc = %d",rc);
return rc;
}
参考:
【Camera专题】HAL层-深入浅出startPreview
camera的startpreview流程
Android: Camera1 open、preview、take picture流程分析
待看:
Android 相机
c枫_撸码的日子
【Camera专题】Qcom-Camera驱动框架浅析(Hal层->Driver层)
c枫_撸码的日子
QCamera学习笔记
qualcomm camera