android_驱动_qcom_【高通SDM660平台】(7) --- Camera onPreview 代码流程

【高通SDM660平台】Camera onPreview 代码流程

  • 一、Camera onPreview 流程
    • 1. [ Framework ] Camera.java
    • 2. [ JNI ] android_hardware_Camera.cpp
    • 3. [ Native ] Camera.cpp
    • 4. [ Native ] CameraClient.cpp
    • 5. [ Hardware ] CameraClient.cpp
    • 6. [ Hardware ] camera_device_t
    • 7. [ Hardware ] QCamera2HWI.cpp
    • 8. [ HWI ] QCameraStateMachine.cpp
    • 9. [ HWI ] procEvtPreviewStoppedState
      • 9.1 addChannel(QCAMERA_CH_TYPE_PREVIEW)
    • 10. [ HWI ] procEvtPreviewReadyState
    • 10. [ HWI ] QCamera2HardwareInterface::startPreview()
    • 11. [ HWI ] QCameraChannel
    • 12. mm_camera_ops 定义
    • 13. [ HWI ] QCameraStream
      • 13.1 mStreams[i]->setBundleInfo()
      • 13.2 mStreams[i]->start()
        • 13.2.1 Camera 数据处理线程 及 QCameraStream的 Callbak 调用
      • 13.3 m_camOps->start_channel(m_camHandle, m_handle)
      • 13.4 mStreams[i]->cond_signal()
      • 13.5 QCameraStream 总结
    • 14. CAM_STREAM_TYPE_METADATA 的 Callback 函数metadata_stream_cb_routine()
    • 15. CAM_STREAM_TYPE_PREVIEW 的 Callback 函数 preview_stream_cb_routine()
    • 16. CAM_STREAM_TYPE_RAW 的 Callback 函数 preview_raw_stream_cb_routine()
    • 17. mm_channel_start => mm_stream_dispatch_app_data() 线程函数解析"CAM_StrmAppData"
    • 18. Data Stream Call back


《【高通SDM660平台】(1) — Camera 驱动 Bringup Guide》
《【高通SDM660平台】(2) — Camera Kernel 驱动层代码逻辑分析》
《【高通SDM660平台】(3) — Camera V4L2 驱动层分析 》
《【高通SDM660平台】(4) — Camera Init 初始化流程 》
《【高通SDM660平台】(5) — Camera Open 流程》
《【高通SDM660平台】(6) — Camera getParameters 及 setParameters 流程》
《【高通SDM660平台】(7) — Camera onPreview 代码流程》
《【高通SDM660平台】(8) — Camera MetaData介绍》
《【高通SDM660平台 Android 10.0】(9) — Qcom Camera Daemon 代码分析》
《【高通SDM660平台 Android 10.0】(10) — Camera Sensor lib 与 Kernel Camera Probe 代码分析》
《【高通SDM660平台】Camera Capture 流程》
《【高通SDM660平台】Camera mm-qcamera-app 代码分析》


一、Camera onPreview 流程

1. [ Framework ] Camera.java

@ frameworks/base/core/java/android/hardware/Camera.java
@Deprecated
public class Camera {
    private static final String TAG = "Camera";
public native final void startPreview();

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7

2. [ JNI ] android_hardware_Camera.cpp

可以看出,代码是获取 native 层Camera 对象,调用 Camera::startPreview() 方法

@ frameworks/base/core/jni/android_hardware_Camera.cpp
static void android_hardware_Camera_startPreview(JNIEnv *env, jobject thiz)
{
    ALOGV("startPreview");
    sp<Camera> camera = get_native_camera(env, thiz, NULL);
if (camera->startPreview() != NO_ERROR) {
    jniThrowRuntimeException(env, "startPreview failed");
    return;
}

}

//-------------------------------------------------
static const JNINativeMethod camMethods[] = {
{ “startPreview”, “()V”, (void *)android_hardware_Camera_startPreview },
}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16

3. [ Native ] Camera.cpp

@ frameworks/av/camera/Camera.cpp
// start preview mode
status_t Camera::startPreview()
{
    ALOGV("startPreview");
    sp <::android::hardware::ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->startPreview();
    	========>
    	@ frameworks/av/camera/ICamera.cpp
    		status_t startPreview()
		    {
		        ALOGV("startPreview");
		        Parcel data, reply;
		        data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
		        remote()->transact(START_PREVIEW, data, &reply);
		        return reply.readInt32();
		    }
}

   
   
   
   
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19

在 ICamera.cpp 代理对像中远程调用BnCamera的onTransact() 方法

@ frameworks/av/camera/ICamera.cpp
status_t BnCamera::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
    switch(code) {
    	case START_PREVIEW: {
            ALOGV("START_PREVIEW");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(startPreview());
            return NO_ERROR;
        } break;

   
   
   
   
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10

可以看出 ,调用的是BnCamera 的中的 startPreview() 方法,但实际上 BnCamera 并没有 实现startPreview() 方法,它是继承于 ICamera 的虚方法。
因此,startPreview() 是由 BnCamera 的子类来实现。

@ frameworks/av/services/camera/libcameraservice/CameraService.h
class CameraService :
    public BinderService<CameraService>,
    public ::android::hardware::BnCameraService,
    public IBinder::DeathRecipient,
    public camera_module_callbacks_t
{
	class Client : public hardware::BnCamera, public BasicClient
    {
    public:
        virtual status_t      startPreview() = 0;
        virtual void          stopPreview() = 0;
	}
}

@ frameworks/av/services/camera/libcameraservice/api1/CameraClient.h
class CameraClient : public CameraService::Client
{
public:
virtual status_t startPreview();
virtual void stopPreview();
}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22

可以看出,CameraService 的内部类 Client 是继承了 BnCamera 的 startPreview() 方法,
接着,CameraClient 继承了 CameraService::Client ,最终 startPreview() 方法 是由 CameraClient 来实现了

4. [ Native ] CameraClient.cpp

@ frameworks/av/services/camera/libcameraservice/api1/CameraClient.cpp
// start preview mode
status_t CameraClient::startPreview() {
    LOG1("startPreview (pid %d)", getCallingPid());
    // 开始预览
    return startCameraMode(CAMERA_PREVIEW_MODE);
}

// start preview or recording
status_t CameraClient::startCameraMode(camera_mode mode) {
LOG1(“startCameraMode(%d)”, mode);
switch(mode) {
case CAMERA_PREVIEW_MODE:
return startPreviewMode();
case CAMERA_RECORDING_MODE:
return startRecordingMode();
}
}

status_t CameraClient::startPreviewMode() {
LOG1(“startPreviewMode”);
if (mPreviewWindow != 0) {
//根据新的参数,设置所有缓冲区的显示
mHardware->setPreviewScalingMode( NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
mHardware->setPreviewTransform(mOrientation);
}
//设置预览窗口
mHardware->setPreviewWindow(mPreviewWindow);
result = mHardware->startPreview();
if (result == NO_ERROR) {
mCameraService->updateProxyDeviceState( ICameraServiceProxy::CAMERA_STATE_ACTIVE,
String8::format("%d", mCameraId));
}
return result;
}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35

5. [ Hardware ] CameraClient.cpp

mHardware 对像实现如下, sp mHardware;
可知,mHardware->setPreviewWindow(mPreviewWindow); 实现如下:

@ frameworks/av/services/camera/libcameraservice/device1/CameraHardwareInterface.h

class CameraHardwareInterface : public virtual RefBase {
public:
/*** Start preview mode. */
status_t startPreview()
{
ALOGV("%s(%s)", FUNCTION, mName.string());
if (mDevice->ops->start_preview)
return mDevice->ops->start_preview(mDevice);
return INVALID_OPERATION;
}
}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13

6. [ Hardware ] camera_device_t

mDevice 定义为 camera_device_t *mDevice;

camera_device_t定义在:

@ hardware/libhardware/include/hardware/camera.h

struct camera_device;
typedef struct camera_device_ops {
/**
* Start preview mode.
/

int (start_preview)(struct camera_device *);
}camera_device_ops_t;

typedef struct camera_device {
/**
* camera_device.common.version must be in the range
* HARDWARE_DEVICE_API_VERSION(0,0)-(1,FF). CAMERA_DEVICE_API_VERSION_1_0 is
* recommended.
/

hw_device_t common;
camera_device_ops_t ops;
void *priv;
} camera_device_t;

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp

camera_device_ops_t QCamera2HardwareInterface::mCameraOps = {
.start_preview = QCamera2HardwareInterface::start_preview,
.stop_preview = QCamera2HardwareInterface::stop_preview,
}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6

7. [ Hardware ] QCamera2HWI.cpp

通过 processAPI() 往底层下发 QCAMERA_SM_EVT_START_PREVIEW 事件

@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp

int QCamera2HardwareInterface::start_preview(struct camera_device *device)
{
KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_START_PREVIEW);

QCamera2HardwareInterface *hw = reinterpret_cast<QCamera2HardwareInterface *>(device->priv);

LOGI("[KPI Perf]: E PROFILE_START_PREVIEW camera id %d", hw->getCameraId());

qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_START_PREVIEW;
uint32_t cam_type = CAM_TYPE_MAIN;
if (hw->isDualCamera()) {
    cam_type = MM_CAMERA_DUAL_CAM;
}
if (hw->isNoDisplayMode(cam_type)) {
    evt = QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW;
}
ret = hw->processAPI(evt, NULL);
	=========>
		@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp
		QCamera2HardwareInterface *hw = reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
		int QCamera2HardwareInterface::processAPI(qcamera_sm_evt_enum_t api, void *api_payload)
		{
			ret = m_stateMachine.procAPI(api, api_payload);		
		}
if (ret == NO_ERROR) {
    hw->waitAPIResult(evt, &apiResult);
    ret = apiResult.status;
}
hw->unlockAPI();
LOGI("[KPI Perf]: X ret = %d", ret);
return ret;

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34

经过层层调用,最终跑到了 QCameraStateMachine::procAPI() 中, evt=QCAMERA_SM_EVT_START_PREVIEW
将消息,保存在 class QCameraQueue 中。
node 中包括: QCAMERA_SM_EVT_START_PREVIEW 和 QCAMERA_SM_CMD_TYPE_API 两个消息

@ hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp
QCameraStateMachine m_stateMachine;

int32_t QCameraStateMachine::procAPI(qcamera_sm_evt_enum_t evt, void api_payload)
{
qcamera_sm_cmd_t node = (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));

memset(node, 0, sizeof(qcamera_sm_cmd_t));
node->cmd = QCAMERA_SM_CMD_TYPE_API;
node->evt = evt;
node->evt_payload = api_payload;
if (api_queue.enqueue((void *)node)) {
    cam_sem_post(&cmd_sem);
    return NO_ERROR;
}

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16

在QCameraQueue::enqueue() 中添加队列

@ hardware/qcom/camera/QCamera2/util/QCameraQueue.cpp
bool QCameraQueue::enqueue(void *data)
{
    bool rc;
    camera_q_node *node = (camera_q_node *)malloc(sizeof(camera_q_node));
memset(node, 0, sizeof(camera_q_node));
node->data = data;

if (m_active) {
    cam_list_add_tail_node(&node->list, &m_head.list);
    m_size++;
    rc = true;
}
return rc;

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16

8. [ HWI ] QCameraStateMachine.cpp

QCameraStateMachine 中创建了一个线程用 于处理evt 事件

@ hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp

QCameraStateMachine::QCameraStateMachine(QCamera2HardwareInterface *ctrl) :
api_queue(),
evt_queue()
{
pthread_create(&cmd_pid, NULL,smEvtProcRoutine,this);
pthread_setname_np(cmd_pid, “CAM_stMachine”);
}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9

在线程 *QCameraStateMachine::smEvtProcRoutine() 中,
因为下发的是 QCAMERA_SM_EVT_START_PREVIEWQCAMERA_SM_CMD_TYPE_API 两个消息 ,
可知跑 case QCAMERA_SM_CMD_TYPE_API

@ hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp
void *QCameraStateMachine::smEvtProcRoutine(void *data)
{
    int running = 1, ret;
    QCameraStateMachine *pme = (QCameraStateMachine *)data;
LOGH("E");
do {
    do {
        ret = cam_sem_wait(&pme->cmd_sem);
    } while (ret != 0);

    // we got notified about new cmd avail in cmd queue
    // first check API cmd queue
    qcamera_sm_cmd_t *node = (qcamera_sm_cmd_t *)pme->api_queue.dequeue();
    if (node == NULL) {
        // no API cmd, then check evt cmd queue
        node = (qcamera_sm_cmd_t *)pme->evt_queue.dequeue();
    }
    if (node != NULL) {
        switch (node->cmd) {
        case QCAMERA_SM_CMD_TYPE_API:
            pme->stateMachine(node->evt, node->evt_payload);
            // API is in a way sync call, so evt_payload is managed by HWI
            // no need to free payload for API
            break;
        case QCAMERA_SM_CMD_TYPE_EVT:
            pme->stateMachine(node->evt, node->evt_payload);

            // EVT is async call, so payload need to be free after use
            free(node->evt_payload);
            node->evt_payload = NULL;
            break;
        case QCAMERA_SM_CMD_TYPE_EXIT:
            running = 0;
            break;
    }
} while (running);
LOGH("X");
return NULL;

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41

QCameraStateMachine::stateMachine() 中,会根据之前的 m_state 状态来决定当前代码怎么走的,
在初始化时 配置 m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
因此我们第一次preview 是走 procEvtPreviewStoppedState

@ hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp
int32_t QCameraStateMachine::stateMachine(qcamera_sm_evt_enum_t evt, void *payload)
{
    int32_t rc = NO_ERROR;
    LOGL("m_state %d, event (%d)", m_state, evt);
    switch (m_state) {
    case QCAMERA_SM_STATE_PREVIEW_STOPPED:
        rc = procEvtPreviewStoppedState(evt, payload);
        break;
    case QCAMERA_SM_STATE_PREVIEW_READY:
        rc = procEvtPreviewReadyState(evt, payload);
        break;
    case QCAMERA_SM_STATE_PREVIEWING:
        rc = procEvtPreviewingState(evt, payload);
        break;
    case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
        rc = procEvtPrepareSnapshotState(evt, payload);
        break;
    case QCAMERA_SM_STATE_PIC_TAKING:
        rc = procEvtPicTakingState(evt, payload);
        break;
    case QCAMERA_SM_STATE_RECORDING:
        rc = procEvtRecordingState(evt, payload);
        break;
    case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
        rc = procEvtVideoPicTakingState(evt, payload);
        break;
    case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
        rc = procEvtPreviewPicTakingState(evt, payload);
        break;
    default:
        break;
    }
if (m_parent->isDualCamera()) {
    /* Update the FOVControl dual camera result state based on the current state.
     Update the result only in previewing and recording states */
    bool updateResultState = false;
    if (((m_state == QCAMERA_SM_STATE_PREVIEWING) ||
            (m_state == QCAMERA_SM_STATE_RECORDING)) && !m_parent->mActiveAF
            && !m_parent->m_bPreparingHardware) {
        updateResultState = true;
    }
    m_parent->m_pFovControl->UpdateFlag(FOVCONTROL_FLAG_UPDATE_RESULT_STATE,
            &updateResultState);
}

return rc;

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49

9. [ HWI ] procEvtPreviewStoppedState

由于,我们下发的 evt 事件为 QCAMERA_SM_EVT_START_PREVIEW
首先调用 m_parent->preparePreview() 准备preview,添加 QCAMERA_CH_TYPE_PREVIEW
完毕后,将状态机,切换为 QCAMERA_SM_STATE_PREVIEW_READY.

@ hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp

int32_t QCameraStateMachine::procEvtPreviewStoppedState(qcamera_sm_evt_enum_t evt,
void *payload)
{
int32_t rc = NO_ERROR;
qcamera_api_result_t result;
memset(&result, 0, sizeof(qcamera_api_result_t));

LOGL("event (%d)", evt);
switch (evt) {
case QCAMERA_SM_EVT_START_PREVIEW:
    {
        rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
       if (m_parent->mPreviewWindow == NULL) {
            rc = m_parent->preparePreview();
            	=======>
            		rc = addChannel(QCAMERA_CH_TYPE_PREVIEW);
            		========>
            			addPreviewChannel();
     
            if(rc == NO_ERROR) {
                // preview window is not set yet, move to previewReady state
                m_state = QCAMERA_SM_STATE_PREVIEW_READY;
            }
        } else {
            rc = m_parent->preparePreview();
            if (rc == NO_ERROR) {
                applyDelayedMsgs();
                rc = m_parent->startPreview();
                if (rc != NO_ERROR) {
                    m_parent->unpreparePreview();
                } else {
                    // start preview success, move to previewing state
                    m_state = QCAMERA_SM_STATE_PREVIEWING;
                }
            }
        }
        result.status = rc;
        result.request_api = evt;
        result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
        m_parent->signalAPIResult(&result);
    }
    break;
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44

9.1 addChannel(QCAMERA_CH_TYPE_PREVIEW)

@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp

int32_t QCamera2HardwareInterface::addChannel(qcamera_ch_type_enum_t ch_type)
{
switch (ch_type) {
case QCAMERA_CH_TYPE_PREVIEW:
rc = addPreviewChannel();
break;

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8

在 addPreviewChannel() 中
调用 addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,preview_stream_cb_routine, this); 注册preview_stream 回调函数。
回调函数为 preview_stream_cb_routine()

同时设置同步信号回调函数: pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW, synchronous_stream_cb_routine);
回调函数为 synchronous_stream_cb_routine()

如果需要获取 raw yuv 的图,则调用rc = addStreamToChannel(pChannel,CAM_STREAM_TYPE_RAW, preview_raw_stream_cb_routine,this);
回调函数为 preview_raw_stream_cb_routine()
在回调函数 中会dump 为 yuv 文件

@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp
int32_t QCamera2HardwareInterface::addPreviewChannel()
{
    QCameraChannel *pChannel = NULL;
    char value[PROPERTY_VALUE_MAX];
    bool raw_yuv = false;
uint32_t handle = getCamHandleForChannel(QCAMERA_CH_TYPE_PREVIEW);
	=====>
		handle = mCameraHandle->camera_handle;
// 初始化 QCameraChannel 对象,传递 mCameraHandle->ops 操作函数,ops 是在构造函数中初始化 
pChannel = new QCameraChannel(handle, mCameraHandle->ops);

// 初始化 Channel ,  preview only channel, don't need bundle attr and cb
rc = pChannel->init(NULL, NULL, NULL);

// meta data stream always coexists with preview if applicable
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA, metadata_stream_cb_routine, this);

if (isRdiMode()) {
    if (isSecureMode()) {
        LOGI("RDI - secure CB");
        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
                secure_stream_cb_routine, this);
    }
    else {
        LOGI("RDI - rdi CB");
        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
                rdi_mode_stream_cb_routine, this);
    }
} else {
    if (isSecureMode()) {
        LOGI("PREVIEW - secure CB");
        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
                secure_stream_cb_routine, this);
    }
    else {
        LOGI("PREVIEW - non-secure CB");
        uint32_t cam_type = CAM_TYPE_MAIN;
        if (isDualCamera()) {
            cam_type = (CAM_TYPE_MAIN | CAM_TYPE_AUX);
        }
        if (isNoDisplayMode(cam_type)) {
            rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW, nodisplay_preview_stream_cb_routine, this);
        } else {
                rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,preview_stream_cb_routine, this);
                ======>
                	
                if (needSyncCB(CAM_STREAM_TYPE_PREVIEW) == TRUE) {
                    pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW, synchronous_stream_cb_routine);
                }
        }
    }
}

if (((mParameters.fdModeInVideo()) || (mParameters.getDcrf() == true)
        || (mParameters.getRecordingHintValue() != true))&& (!isSecureMode())) {
    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_ANALYSIS, NULL, this);
}

property_get("persist.camera.raw_yuv", value, "0");
raw_yuv = atoi(value) > 0 ? true : false;
if ( raw_yuv ) {
    rc = addStreamToChannel(pChannel,CAM_STREAM_TYPE_RAW, preview_raw_stream_cb_routine,this);
}

m_channels[QCAMERA_CH_TYPE_PREVIEW] = pChannel;
return rc;

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66
  • 67
  • 68
  • 69

10. [ HWI ] procEvtPreviewReadyState

由于将状态机切换为 m_state = QCAMERA_SM_STATE_PREVIEW_READY;,所以代码如下:

@ hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp
int32_t QCameraStateMachine::procEvtPreviewReadyState(qcamera_sm_evt_enum_t evt, void *payload)
{
    LOGL("event (%d)", evt);
    switch (evt) {
    	case QCAMERA_SM_EVT_START_PREVIEW:
        {
            if (m_parent->mPreviewWindow != NULL) {
                rc = m_parent->startPreview();
                if (rc != NO_ERROR) {
                    m_parent->unpreparePreview();
                    m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
                } else {
                    m_state = QCAMERA_SM_STATE_PREVIEWING;
                }
            }
            // no ops here
            rc = NO_ERROR;
            result.status = rc;
            result.request_api = evt;
            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
            m_parent->signalAPIResult(&result);
        }
        break;

   
   
   
   
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24

开始调用 m_parent->startPreview(),同时将状态切换为 m_state = QCAMERA_SM_STATE_PREVIEWING;
m_parent 定义为 QCamera2HardwareInterface *m_parent; // ptr to HWI

10. [ HWI ] QCamera2HardwareInterface::startPreview()

最终调用 startChannel(QCAMERA_CH_TYPE_PREVIEW);

@ hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp
int QCamera2HardwareInterface::startPreview()
{
    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_STARTPREVIEW);
    int32_t rc = NO_ERROR;
LOGI("E ZSL = %d Recording Hint = %d", mParameters.isZSLMode(), mParameters.getRecordingHintValue());
updateThermalLevel((void *)&mThermalLevel);
setDisplayFrameSkip();

// start preview stream
if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) {
    rc = startChannel(QCAMERA_CH_TYPE_ZSL);
} else if (isSecureMode()) {
    if (mParameters.getSecureStreamType() == CAM_STREAM_TYPE_RAW && !isRdiMode()) {
        rc = startChannel(QCAMERA_CH_TYPE_RAW);
    }else {
        rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
    }
} else {
    rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
    =======>
    	rc = m_channels[ch_type]->start();
}

if (isDualCamera()) {
    if (rc == NO_ERROR) {
        mParameters.setDCDeferCamera(CAM_DEFER_PROCESS);
        mParameters.setDCLowPowerMode(mActiveCameras);
    } else {
        mParameters.setDCDeferCamera(CAM_DEFER_FLUSH);
    }
}

if ((msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) && (m_channels[QCAMERA_CH_TYPE_CALLBACK] != NULL)) {
    rc = startChannel(QCAMERA_CH_TYPE_CALLBACK);
}

updatePostPreviewParameters();
m_stateMachine.setPreviewCallbackNeeded(true);

// if job id is non-zero, that means the postproc init job is already
// pending or complete
if (mInitPProcJob == 0&& !(isSecureMode() && isRdiMode())) {
    mInitPProcJob = deferPPInit();
}
m_bPreviewStarted = true;

//configure snapshot skip for dual camera usecases
configureSnapshotSkip(true);

LOGI("X rc = %d", rc);
return rc;

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54

11. [ HWI ] QCameraChannel

m_channels[QCAMERA_CH_TYPE_PREVIEW]->start() 中,

int32_t QCameraChannel::start()
{
    // there is more than one stream in the channel
    // we need to notify mctl that all streams in this channel need to be bundled
    for (size_t i = 0; i < mStreams.size(); i++) {
        if ((mStreams[i] != NULL) &&(validate_handle(m_handle, mStreams[i]->getChannelHandle()))) {
            mStreams[i]->setBundleInfo();
        }
    }
    for (size_t i = 0; i < mStreams.size(); i++) {
        if ((mStreams[i] != NULL) && (validate_handle(m_handle, mStreams[i]->getChannelHandle()))) {
            mStreams[i]->start();
        }
    }
    rc = m_camOps->start_channel(m_camHandle, m_handle);
	m_bIsActive = true;
    for (size_t i = 0; i < mStreams.size(); i++) {
        if (mStreams[i] != NULL) {
            mStreams[i]->cond_signal();
        }
    }
return rc;

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25

可以看到 在代码中对每个stream 进行操作:
mStreams[i]->setBundleInfo();
mStreams[i]->start();
m_camOps->start_channel(m_camHandle, m_handle);
mStreams[i]->cond_signal();

我们先来看下有哪些 stream 。
可以看到 ,我们可能有这三个stream : CAM_STREAM_TYPE_METADATACAM_STREAM_TYPE_PREVIEWCAM_STREAM_TYPE_RAW

rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA, metadata_stream_cb_routine, this);

rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,preview_stream_cb_routine, this);
pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW, synchronous_stream_cb_routine);

rc = addStreamToChannel(pChannel,CAM_STREAM_TYPE_RAW, preview_raw_stream_cb_routine,this);

12. mm_camera_ops 定义

@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c

/* camera ops v-table */
static mm_camera_ops_t mm_camera_ops = {
.query_capability = mm_camera_intf_query_capability,
.register_event_notify = mm_camera_intf_register_event_notify,
.close_camera = mm_camera_intf_close,
.set_parms = mm_camera_intf_set_parms,
.get_parms = mm_camera_intf_get_parms,
.do_auto_focus = mm_camera_intf_do_auto_focus,
.cancel_auto_focus = mm_camera_intf_cancel_auto_focus,
.prepare_snapshot = mm_camera_intf_prepare_snapshot,
.start_zsl_snapshot = mm_camera_intf_start_zsl_snapshot,
.stop_zsl_snapshot = mm_camera_intf_stop_zsl_snapshot,
.map_buf = mm_camera_intf_map_buf,
.map_bufs = mm_camera_intf_map_bufs,
.unmap_buf = mm_camera_intf_unmap_buf,
.add_channel = mm_camera_intf_add_channel,
.delete_channel = mm_camera_intf_del_channel,
.get_bundle_info = mm_camera_intf_get_bundle_info,
.add_stream = mm_camera_intf_add_stream,
.link_stream = mm_camera_intf_link_stream,
.delete_stream = mm_camera_intf_del_stream,
.config_stream = mm_camera_intf_config_stream,
.qbuf = mm_camera_intf_qbuf,
.cancel_buffer = mm_camera_intf_cancel_buf,
.get_queued_buf_count = mm_camera_intf_get_queued_buf_count,
.map_stream_buf = mm_camera_intf_map_stream_buf,
.map_stream_bufs = mm_camera_intf_map_stream_bufs,
.unmap_stream_buf = mm_camera_intf_unmap_stream_buf,
.set_stream_parms = mm_camera_intf_set_stream_parms,
.get_stream_parms = mm_camera_intf_get_stream_parms,
.start_channel = mm_camera_intf_start_channel,
.stop_channel = mm_camera_intf_stop_channel,
.request_super_buf = mm_camera_intf_request_super_buf,
.cancel_super_buf_request = mm_camera_intf_cancel_super_buf_request,
.flush_super_buf_queue = mm_camera_intf_flush_super_buf_queue,
.configure_notify_mode = mm_camera_intf_configure_notify_mode,
.process_advanced_capture = mm_camera_intf_process_advanced_capture,
.get_session_id = mm_camera_intf_get_session_id,
.set_dual_cam_cmd = mm_camera_intf_set_dual_cam_cmd,
.flush = mm_camera_intf_flush,
.register_stream_buf_cb = mm_camera_intf_register_stream_buf_cb,
.register_frame_sync = mm_camera_intf_reg_frame_sync,
.handle_frame_sync_cb = mm_camera_intf_handle_frame_sync_cb
};

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47

13. [ HWI ] QCameraStream

13.1 mStreams[i]->setBundleInfo()

  1. 检查当前 mCamHandle 是否有绑定 stream
@ hardware/qcom/camera/QCamera2/HAL/QCameraStream.cpp
// set bundle for this stream to MCT
int32_t QCameraStream::setBundleInfo()
{
	cam_stream_parm_buffer_t param, aux_param;
    uint32_t active_handle = get_main_camera_handle(mChannelHandle);
    	=====>
    		@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
    		return mm_camera_util_get_handle_by_num(0, handle);
memset(&bundleInfo, 0, sizeof(bundleInfo));
if (active_handle) {
	// 1. 检查当前 mCamHandle 是否有绑定 stream 
    ret = mCamOps->get_bundle_info(mCamHandle, active_handle,&bundleInfo);
    =============================>
    + 	@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
	+	rc = mm_camera_get_bundle_info(my_obj, m_chid, bundle_info);
	+	==============>
	+		@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
	+		rc = mm_channel_fsm_fn(ch_obj,MM_CHANNEL_EVT_GET_BUNDLE_INFO,(void *)bundle_info,NULL);
	+		=================>
	+			@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
	+			rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
	+			=================>
	+				@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
	+				case MM_CHANNEL_EVT_GET_BUNDLE_INFO:
	+					cam_bundle_config_t *payload = (cam_bundle_config_t *)in_val;
    +   				rc = mm_channel_get_bundle_info(my_obj, payload);
    +    				==================>
    +    					@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
    +    					s_obj = mm_channel_util_get_stream_by_handler(my_obj, my_obj->streams[i].my_hdl);
    +    					================>
    +    						@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
	+						    for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
	+						        if ((MM_STREAM_STATE_NOTUSED != ch_obj->streams[i].state) &&
	+						            (handler == ch_obj->streams[i].my_hdl)) {
	+						            s_obj = &ch_obj->streams[i];
	+						            break;
	+						        }
	+						    }
	+						    return s_obj;
    <=============================                  
    memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
    param.type = CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO;
    param.bundleInfo = bundleInfo;
}
mStreamInfo->parm_buf = param;
if ((aux_param.bundleInfo.num_of_streams > 1)&& (mStreamInfo->aux_str_info != NULL)) {
    mStreamInfo->aux_str_info->parm_buf = aux_param;
}

if (mStreamInfo->parm_buf.bundleInfo.num_of_streams > 1) {
    uint32_t channelHdl = get_main_camera_handle(mChannelHandle);
    uint32_t streamHdl = get_main_camera_handle(mHandle);
    if (mCamType == CAM_TYPE_AUX) {
        channelHdl = mChannelHandle;
        streamHdl = mHandle;
    }
    ret = mCamOps->set_stream_parms(mCamHandle, channelHdl, streamHdl, &mStreamInfo->parm_buf);
    ==================>
   	+ 	@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
    +	rc = mm_camera_set_stream_parms(my_obj, chid, strid, parms);
    +	==============>
    +		@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
    +		rc = mm_channel_fsm_fn(ch_obj, MM_CHANNEL_EVT_SET_STREAM_PARM, (void *)&payload, NULL);
    +       ============>   
    +         	@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
    +         	rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);      
    + 			=============>
    + 				    case MM_CHANNEL_EVT_GET_STREAM_PARM:
	+			        {
	+			            mm_evt_paylod_set_get_stream_parms_t *payload =
	+			                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
	+			            rc = mm_channel_get_stream_parm(my_obj, payload);
	+			            ===============>
	+			            	rc = mm_stream_fsm_fn(s_obj, MM_STREAM_EVT_GET_PARM,(void *)payload, NULL);
	+			            	============>
	+			            		rc = mm_stream_fsm_inited(my_obj, evt, in_val, out_val);
	+			            		=============>
	+									@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
	+			            			rc = mm_stream_get_parm(my_obj, payload->parms);
	+									============>
	+										rc = mm_camera_util_g_ctrl(cam_obj, stream_id, my_obj->fd,
	+																			CAM_PRIV_STREAM_PARM, &value);
    +      									==============>
    + 											@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
    + 											rc = ioctl(fd, VIDIOC_G_CTRL, &control);
	+										    LOGD("fd=%d, G_CTRL, id=0x%x, rc = %d\n", fd, id, rc);
	+										    if (value != NULL) {
	+										        *value = control.value;
	+										    }			
	+											==============>
	+												@ kernel/msm-4.4/drivers/media/v4l2-core/v4l2-subdev.c
	+												static long subdev_do_ioctl(struct file *file, unsigned int cmd, void *arg)
	+												{
	+												case VIDIOC_G_CTRL:
	+													return v4l2_g_ctrl(vfh->ctrl_handler, arg);
	<=============================  
}

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66
  • 67
  • 68
  • 69
  • 70
  • 71
  • 72
  • 73
  • 74
  • 75
  • 76
  • 77
  • 78
  • 79
  • 80
  • 81
  • 82
  • 83
  • 84
  • 85
  • 86
  • 87
  • 88
  • 89
  • 90
  • 91
  • 92
  • 93
  • 94
  • 95
  • 96
  • 97
  • 98
  • 99
  • 100

13.2 mStreams[i]->start()

初始化 mStreamMetaMemory 。

@ hardware/qcom/camera/QCamera2/HAL/QCameraStream.cpp
int32_t QCameraStream::start()
{
    int32_t rc = 0;
    mDataQ.init();
    =============>
    +	@ hardware/qcom/camera/QCamera2/util/QCameraQueue.cpp
    +	m_active = true;
    <=============	
    // 创建 frame线程 ,函数运行 dataProcRoutine
    rc = mProcTh.launch(dataProcRoutine, this);
    =============>
    +	pthread_create(&cmd_pid, NULL, dataProcRoutine, user_data);
	<=============	
mCurMetaMemory = NULL;
mCurBufIndex = -1;
mCurMetaIndex = -1;
mFirstTimeStamp = 0;
memset (&mStreamMetaMemory, 0, (sizeof(MetaMemory) * CAMERA_MIN_VIDEO_BATCH_BUFFERS));
return rc;

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22

13.2.1 Camera 数据处理线程 及 QCameraStream的 Callbak 调用

void *QCameraStream::dataProcRoutine(void *data)
{
    int running = 1;
    QCameraStream *pme = (QCameraStream *)data;
    QCameraCmdThread *cmdThread = &pme->mProcTh;
    cmdThread->setName("CAM_strmDatProc");
    LOGD("E");
    do {
        do {	// 等待消息到来
            ret = cam_sem_wait(&cmdThread->cmd_sem);
        } while (ret != 0);
    // we got notified about new cmd avail in cmd queue
    camera_cmd_type_t cmd = cmdThread->getCmd();
    switch (cmd) {
    case CAMERA_CMD_TYPE_DO_NEXT_JOB:
        {
            LOGD("Do next job");
            // 当有数据到来时,调用 QCameraStream的 callback 函数,将 frame 上报
            mm_camera_super_buf_t *frame = (mm_camera_super_buf_t *)pme->mDataQ.dequeue();
            if (NULL != frame) {
                if (pme->mDataCB != NULL) {
                    pme->mDataCB(frame, pme, pme->mUserData);
                } else {
                    // no data cb routine, return buf here
                    pme->bufDone(frame);
                    free(frame);
                }
            }
        }
        break;
    case CAMERA_CMD_TYPE_EXIT:
        LOGH("Exit");
        /* flush data buf queue */
        pme->mDataQ.flush();
        running = 0;
        break;
    default:
        break;
    }
} while (running);
LOGH("X");
return NULL;

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44

13.3 m_camOps->start_channel(m_camHandle, m_handle)

@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
/* camera ops v-table */
static mm_camera_ops_t mm_camera_ops = {
	 .start_channel = mm_camera_intf_start_channel,
}

   
   
   
   
  • 1
  • 2
  • 3
  • 4
  • 5
@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
static int32_t mm_camera_intf_start_channel(uint32_t camera_handle,
                                            uint32_t ch_id)
{
    int32_t rc = -1;
    mm_camera_obj_t * my_obj = NULL;
    uint32_t chid = get_main_camera_handle(ch_id);
    uint32_t aux_chid = get_aux_camera_handle(ch_id);
if (chid) {
    uint32_t handle = get_main_camera_handle(camera_handle);
    
    my_obj = mm_camera_util_get_camera_by_handler(handle);
    if(my_obj) {
    	// 启动该 channel 中的所有 stream 
        rc = mm_camera_start_channel(my_obj, chid);
        ===================>
        +	@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
        +	rc = mm_channel_fsm_fn(ch_obj, MM_CHANNEL_EVT_START,   NULL, NULL);
        +		=============>
        +		@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
        +		case MM_CHANNEL_STATE_STOPPED:
    	+			rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
    	+			==============>
    	+				@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
    	+				case MM_CHANNEL_EVT_START:{
        +					rc = mm_channel_start(my_obj);
        +					if (0 == rc) {
        +    					my_obj->state = MM_CHANNEL_STATE_ACTIVE;
        +					}
        <===================
    }
}
LOGH("X ch_id = %u rc = %d", ch_id, rc);
return rc;

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36

mm_camera_channel.cmm_channel_start()

  1. 创建 pthread, 名为 “CAM_SuperBuf”
  2. 创建 pthread, 名为 “CAM_SuperBufCB”, 用于处理 stream buf
  3. 创建 streams 对应的 buff
  4. 注册 streams 对应的 buf
  5. 开始 stream
@ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
int32_t mm_channel_start(mm_channel_t *my_obj)
{
    int32_t rc = 0;
    int i = 0, j = 0;
    mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
    uint8_t num_streams_to_start = 0;
    uint8_t num_streams_in_bundle_queue = 0;
    mm_stream_t *s_obj = NULL;
    int meta_stream_idx = 0;
    cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
    if (my_obj->streams[i].my_hdl > 0) {
        s_obj = mm_channel_util_get_stream_by_handler(my_obj, my_obj->streams[i].my_hdl);
      	
    }
}
if (meta_stream_idx > 0 ) {
    /* always start meta data stream first, so switch the stream object with the first one */
    s_obj = s_objs[0];
    s_objs[0] = s_objs[meta_stream_idx];
    s_objs[meta_stream_idx] = s_obj;
}
if (NULL != my_obj->bundle.super_buf_notify_cb) {
    /* need to send up cb, therefore launch thread */
    /* init superbuf queue */
    mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);
    my_obj->bundle.superbuf_queue.num_streams = num_streams_in_bundle_queue;
    my_obj->bundle.superbuf_queue.expected_frame_id = my_obj->bundle.superbuf_queue.attr.user_expected_frame_id;
    my_obj->bundle.superbuf_queue.expected_frame_id_without_led = 0;
    my_obj->bundle.superbuf_queue.led_off_start_frame_id = 0;
    my_obj->bundle.superbuf_queue.led_on_start_frame_id = 0;
    my_obj->bundle.superbuf_queue.led_on_num_frames = 0;
    my_obj->bundle.superbuf_queue.good_frame_id = 0;

    for (i = 0; i < num_streams_to_start; i++) {
        /* Only bundle streams that belong to the channel */
        if(!(s_objs[i]->stream_info->noFrameExpected) ||(s_objs[i]->is_frame_shared && (s_objs[i]->ch_obj == my_obj))) {
            if (s_objs[i]->ch_obj == my_obj) {
                /* set bundled flag to streams */
                s_objs[i]->is_bundled = 1;
            }
            my_obj->bundle.superbuf_queue.bundled_streams[j] = s_objs[i]->my_hdl;

            if (s_objs[i]->is_frame_shared && (s_objs[i]->ch_obj == my_obj)) {
                mm_stream_t *dst_obj = NULL;
                if (s_objs[i]->master_str_obj != NULL) {
                    dst_obj = s_objs[i]->master_str_obj;
                } else if (s_objs[i]->aux_str_obj[0] != NULL) {
                    dst_obj = s_objs[i]->aux_str_obj[0];
                }
                if (dst_obj) {
                    my_obj->bundle.superbuf_queue.bundled_streams[j]|= dst_obj->my_hdl;
                }
            }
            j++;
        }
    }
    // 创建  pthread, 名为 "CAM_SuperBuf"
    /* launch cb thread for dispatching super buf through cb */
    snprintf(my_obj->cb_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBuf");
    mm_camera_cmd_thread_launch(&my_obj->cb_thread, mm_channel_dispatch_super_buf,(void*)my_obj);
	// 创建  pthread, 名为 "CAM_SuperBufCB", 用于处理 stream buf
    /* launch cmd thread for super buf dataCB */
    snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBufCB");
    mm_camera_cmd_thread_launch(&my_obj->cmd_thread, mm_channel_process_stream_buf, (void*)my_obj);
    	===============>
    		pthread_create(&cmd_thread->cmd_pid, NULL, mm_channel_process_stream_buf, (void *)cmd_thread);

    /* set flag to TRUE */
    my_obj->bundle.is_active = TRUE;
}

/* link any streams first before starting the rest of the streams */
for (i = 0; i < num_streams_to_start; i++) {
    if ((s_objs[i]->ch_obj != my_obj) && my_obj->bundle.is_active) {
        pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
        s_objs[i]->linked_stream->linked_obj = my_obj;
        s_objs[i]->linked_stream->is_linked = 1;
        pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
        continue;
    }
}

for (i = 0; i < num_streams_to_start; i++) {
    if (s_objs[i]->ch_obj != my_obj) {
        continue;
    }
    /* all streams within a channel should be started at the same time */
    if (s_objs[i]->state == MM_STREAM_STATE_ACTIVE) {
        LOGE("stream already started idx(%d)", i);
        rc = -1;
        break;
    }
	// 创建 streams 对应的 buff
    /* allocate buf */
    rc = mm_stream_fsm_fn(s_objs[i], MM_STREAM_EVT_GET_BUF, NULL, NULL);
	// 注册 streams 对应的 buf
    /* reg buf */
    rc = mm_stream_fsm_fn(s_objs[i], MM_STREAM_EVT_REG_BUF, NULL, NULL);
	// 开始 stream
    /* start stream */
    rc = mm_stream_fsm_fn(s_objs[i], MM_STREAM_EVT_START, NULL, NULL);
    	=================>
    	+	snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_StrmAppData");
        +    mm_camera_cmd_thread_launch(&my_obj->cmd_thread, mm_stream_dispatch_app_data, (void *)my_obj);
        +    my_obj->state = MM_STREAM_STATE_ACTIVE;
        +	rc = mm_stream_streamon(my_obj);
        <=================
}


my_obj->bWaitForPrepSnapshotDone = 0;
if (my_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
    LOGH("registering Channel obj %p", my_obj);
    mm_frame_sync_register_channel(my_obj);
}
return rc;

}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66
  • 67
  • 68
  • 69

你可能感兴趣的:(camera,驱动领域)