虚拟摄像头之九: IMX8Q 的 camera.imx8.hal 框架详解

前言

android 系统的 camera hal 框架不同厂家设计思路、不尽相同;
本篇梳理 NXP android8 的 camera hal 设计框架设计逻辑和代码走读与分析。

笔者再次特别说明:
在《虚拟摄像头之三: 重构android8.1 的 v4l2_camera_HAL 支持虚拟摄像头》文章中声称、虚拟摄像头 HAL 方案计划采用Android8 中的 v4l2_camera_HAL 来重构实现;现在更改为 NXP 的 CameraHAL 框架来实现,因此采用此篇梳理学习笔记内容。

1>. Camera 类的设计

基本设计逻辑是 封装 Camera 对象、 我们先看看该类的定义:
@vendor/nxp-opensource/imx/libcamera3/Camera.h

class Camera : public camera_info, public SensorData
{
public:
    // id is used to distinguish cameras. 0 <= id < NUM_CAMERAS.
    // module is a handle to the HAL module, used when the device is opened.
    Camera(int32_t id, int32_t facing, int32_t orientation, char* path);
    virtual ~Camera();

    static Camera* createCamera(int32_t id, char* name, int32_t facing,
                                int32_t orientation, char* path);
    // do advanced character set.
    int32_t processSettings(sp<Metadata> settings, uint32_t frame);
    // Common Camera Device Operations (see )
    int32_t openDev(const hw_module_t *module, hw_device_t **device);
    int32_t getInfo(struct camera_info *info);
    int32_t closeDev();
    int32_t flushDev();
    virtual bool isHotplug() {return false;}

protected:
    // Initialize static camera characteristics for individual device
    virtual status_t initSensorStaticData() = 0;

    virtual void setPreviewPixelFormat();
    virtual void setPicturePixelFormat();

    // Verify settings are valid for a capture
    virtual bool isValidCaptureSettings(const camera_metadata_t *);
    // Separate initialization method for individual devices when opened
    virtual int32_t initDevice();
    private:
    // Camera device handle returned to framework for use
    camera3_device_t mDevice;
    // Reuse a stream already created by this device
    sp<Stream> reuseStream(camera3_stream_t *astream);
    // Destroy all streams in a stream array, and the array itself
    void destroyStreams(sp<Stream> *array, int32_t count);
    // Verify a set of streams is valid in aggregate
    bool isValidStreamSet(sp<Stream> *array, int32_t count);
    // Verify settings are valid for reprocessing an input buffer
    bool isValidReprocessSettings(const camera_metadata_t *settings);
    // Send a shutter notify message with start of exposure time
    void notifyShutter(uint32_t frame_number, uint64_t timestamp);
    // Is type a valid template type (and valid index int32_to mTemplates)
    bool isValidTemplateType(int32_t type);

    // Identifier used by framework to distinguish cameras
    const int32_t mId;
    // camera_metadata structure containing static characteristics
    camera_metadata_t *mStaticInfo;
    // Busy flag indicates camera is in use
    bool mBusy;
    // Camera device operations handle shared by all devices
    const static camera3_device_ops_t sOps;
    // Methods used to call back into the framework
    const camera3_callback_ops_t *mCallbackOps;
    // Lock protecting the Camera object for modifications
    android::Mutex mDeviceLock;
    // Lock protecting only static camera characteristics, which may
    // be accessed without the camera device open
    static android::Mutex sStaticInfoLock;
    // Array of handles to streams currently in use by the device
    sp<Stream> *mStreams;                //> 持有 Stream 流智能指针
    // Number of streams in mStreams
    int32_t mNumStreams;
    // Static array of standard camera settings templates
    sp<Metadata> mTemplates[CAMERA3_TEMPLATE_COUNT];
    // Most recent request settings seen, memoized to be reused
    sp<Metadata> mSettings;

protected:
    sp<VideoStream> mVideoStream;      //> 持有 VideoStream 流智能指针
    autoState m3aState;
    uint8_t *mTmpBuf;  // used for soft csc temp buffer
};

该对象中持有 sp mVideoStream; 智能指针,也就是说强引用 VideoStream 对象,
其他具体类型的摄像头须继承该 Camera 类(如UVC、CSI、MIPI或虚拟摄像头设备);类中定义部分虚函数、给子类实现这些方法。
该类并继承了 public camera_info 和 public SensorData 两个类。定义如下:
@vendor/nxp-opensource/imx/libcamera3/CameraUtils.h

class SensorData
{
public:
    SensorData();
    virtual ~SensorData();

    virtual int getCaptureMode(int width, int height);
    virtual int getFps(int width, int height, int defValue);

    virtual PixelFormat getPreviewPixelFormat()
    {
        return mPreviewPixelFormat;
    }

    PixelFormat getPicturePixelFormat() {
        return mPicturePixelFormat;
    }

    PixelFormat getMatchFormat(int *sfmt, int  slen,
                               int *dfmt, int  dlen);

    int32_t getSensorFormat(int32_t availFormat);

protected:
    int32_t changeSensorFormats(int *src, int *dst, int len);
    status_t adjustPreviewResolutions();
    status_t setMaxPictureResolutions();

public:
    int mPreviewResolutions[MAX_RESOLUTION_SIZE];
    int mPreviewResolutionCount;
    int mPictureResolutions[MAX_RESOLUTION_SIZE];
    int mPictureResolutionCount;
    int mAvailableFormats[MAX_SENSOR_FORMAT];
    int mAvailableFormatCount;
    nsecs_t mMinFrameDuration;
    nsecs_t mMaxFrameDuration;
    int mTargetFpsRange[MAX_FPS_RANGE];
    int mMaxWidth;
    int mMaxHeight;
    float mPhysicalWidth;
    float mPhysicalHeight;
    float mFocalLength;
    int mMaxJpegSize;

    // these values is the max size sensor can support.
    int32_t mActiveArrayWidth;
    int32_t mActiveArrayHeight;
    int32_t mPixelArrayWidth;
    int32_t mPixelArrayHeight;

    // preview and picture format.
    PixelFormat mPicturePixelFormat;
    PixelFormat mPreviewPixelFormat;

    // vpu and capture limitation.
    int mVpuSupportFmt[MAX_VPU_SUPPORT_FORMAT];
    int mPictureSupportFmt[MAX_PICTURE_SUPPORT_FORMAT];

    int mSensorFormats[MAX_SENSOR_FORMAT];
    int mSensorFormatCount;
    char mDevPath[CAMAERA_FILENAME_LENGTH];
};

@hardware/libhardware/include/hardware/camera_common.h

typedef struct camera_metadata camera_metadata_t;

typedef struct camera_info {
    /**
     * The direction that the camera faces to. See system/core/include/system/camera.h
     * for camera facing definitions.
     *
     * Version information (based on camera_module_t.common.module_api_version):
     *
     * CAMERA_MODULE_API_VERSION_2_3 or lower:
     *
     *   It should be CAMERA_FACING_BACK or CAMERA_FACING_FRONT.
     *
     * CAMERA_MODULE_API_VERSION_2_4 or higher:
     *
     *   It should be CAMERA_FACING_BACK, CAMERA_FACING_FRONT or
     *   CAMERA_FACING_EXTERNAL.
     */
    int facing;

    int orientation;

    uint32_t device_version;

    const camera_metadata_t *static_camera_characteristics;  //> 相机参数内容

    int resource_cost;

    char** conflicting_devices;

    size_t conflicting_devices_length;

} camera_info_t;

与 Camera 类相关的定义我们有所了解,为进一步认知 Camera 类设计逻辑,我们在看一下它的构造函数
和 创建相机的方法,构造函数如下:
@vendor/nxp-opensource/imx/libcamera3/Camera.cpp

Camera::Camera(int32_t id, int32_t facing, int32_t orientation, char *path)
    : usemx6s(0), mId(id), mStaticInfo(NULL), mBusy(false), mCallbackOps(NULL), mStreams(NULL), mNumStreams(0), mTmpBuf(NULL)
{
    ALOGI("%s:%d: new camera device", __func__, mId);
    android::Mutex::Autolock al(mDeviceLock);

    camera_info::facing = facing;
    camera_info::orientation = orientation;
    strncpy(SensorData::mDevPath, path, CAMAERA_FILENAME_LENGTH);
    SensorData::mDevPath[CAMAERA_FILENAME_LENGTH-1] = 0;

    memset(&mDevice, 0, sizeof(mDevice));
    mDevice.common.tag = HARDWARE_DEVICE_TAG;
#if ANDROID_SDK_VERSION >= 28
    mDevice.common.version = CAMERA_DEVICE_API_VERSION_3_5;
#else
    mDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
#endif
    mDevice.common.close = close_device;
    mDevice.ops = const_cast<camera3_device_ops_t *>(&sOps);
    mDevice.priv = this;
    memset(&m3aState, 0, sizeof(m3aState));
}

构造一个 Camera 对象、 mDevice.ops = const_cast(&sOps) 并构造该相机的控制接口内容,再看看 createCamera 方法:

Camera* Camera::createCamera(int32_t id, char* name, int32_t facing,
                             int32_t orientation, char* path)
{
    Camera* device = NULL;

    android::Mutex::Autolock al(sStaticInfoLock);

    if (strstr(name, IMX8_OV5640_SENSOR_NAME)) {
        ALOGI("create id:%d imx8 ov5640 camera device", id);
        device = new Ov5640Imx8Q(id, facing, orientation, path);
    }
    else if (strstr(name, OV5640MIPI_SENSOR_NAME)) {
        ALOGI("create id:%d ov5640 mipi device", id);
        device = new Ov5640Mipi(id, facing, orientation, path);
    }
    else if (strstr(name, OV5642CSI_SENSOR_NAME)) {
        ALOGI("create id:%d ov5642 csi device", id);
        device = new Ov5642Csi(id, facing, orientation, path);
    }
    else if (strstr(name, OV5640CSI_SENSOR_NAME)) {
        ALOGI("create id:%d ov5640 csi device", id);
        device = new Ov5640Csi(id, facing, orientation, path);
    }
    else if (strstr(name, UVC_SENSOR_NAME)) {
#ifdef BOARD_HAVE_VPU
        char uvcMJPGStr[92];
        int configUseMJPG = 0;

        property_get(UVC_USE_MJPG, uvcMJPGStr, DEFAULT_ERROR_NAME_str);

        if (uvcMJPGStr[0] == DEFAULT_ERROR_NAME)
            configUseMJPG = 0;
        else
            configUseMJPG = atoi(uvcMJPGStr);

        if(configUseMJPG == 0) {
            ALOGI("create id:%d usb camera device", id);
            device = UvcDevice::newInstance(id, name, facing, orientation, path);
        } else {
            ALOGI("DeviceAdapter: Create uvc device, config to use MJPG");
            device = new UvcMJPGDevice(id, facing, orientation, path);
        }
#else
#ifdef IMX7ULP_UVC
        ALOGI("create id:%d imx7ulp usb camera device", id);
        device = Uvc7ulpDevice::newInstance(id, name, facing, orientation, path);
#else
        ALOGI("create id:%d usb camera device", id);
        device = UvcDevice::newInstance(id, name, facing, orientation, path);
#endif
#endif
    }
    else if (strstr(name, OV5640_SENSOR_NAME)) {
#ifdef VADC_TVIN
        ALOGI("create id:%d TVin device for auto_sx", id);
        device = new VADCTVINDevice(id, facing, orientation, path);
#else
        char boardName[CAMERA_SENSOR_LENGTH];
        memset(boardName, 0, sizeof(boardName));
        property_get("ro.board.platform", boardName, DEFAULT_ERROR_NAME_str);

        if (strstr(boardName, IMX8_BOARD_NAME)) {
            ALOGI("create id:%d 5640-csi-8mq device", id);
            device = new Ov5640Csi8MQ(id, facing, orientation, path);
        } else if (strstr(boardName, IMX7_BOARD_NAME)) {
            ALOGI("create id:%d 5640-csi-7d device", id);
            device = new Ov5640Csi7D(id, facing, orientation, path);
            device->usemx6s = 1;
        } else {
            ALOGI("create id:%d 5640-csi device", id);
            device = new Ov5640Csi(id, facing, orientation, path);
            device->usemx6s = 1;
        }
#endif
    }
    else if (strstr(name, ADV7180_TVIN_NAME)) {
        ALOGI("create id:%d adv7180 tvin device", id);
        device = new TVINDevice(id, facing, orientation, path);
    } else if (strstr(name, MAX9286MIPI_SENSOR_NAME)) {
        ALOGI("create id:%d Max9286Mipi device", id);
        device = new Max9286Mipi(id, facing, orientation, path);
    } else if (strstr(name, LOOPBACK_SENSOR_NAME)) {               //> 虚拟摄像头对象构建
        ALOGI("create id:%d VirtCamera device", id);
        device = new VirtCamera(id, facing, orientation, path);
    }
    else {
        ALOGE("doesn't support camera id:%d %s", id, name);
    }

    return device;
}

由 createCamera 方法我们清晰看到、Camera相机类设计思路,每个具体类型型号的摄像头都需要继承camera类,并实现与该类型摄像头
特定功能的方法,通过虚函数重构该方法。

总结:
(1). 具体类型、型号的摄像头是再什么时候创建的呢?
在 CameraHAL.cpp 中,

static hw_module_methods_t gCameraModuleMethods = {
    .open = open_dev
};

camera_module_t HAL_MODULE_INFO_SYM __attribute__ ((visibility("default"))) = {
    .common = {
       .tag                = HARDWARE_MODULE_TAG,
       .module_api_version = CAMERA_MODULE_API_VERSION_2_2,
       .hal_api_version    = HARDWARE_HAL_API_VERSION,
       .id                 = CAMERA_HARDWARE_MODULE_ID,
       .name               = "Default Camera HAL",
       .author             = "The Android Open Source Project",
       .methods            = &gCameraModuleMethods,
       .dso                = NULL,
       .reserved           = {0},
    },
   .get_number_of_cameras = get_number_of_cameras,
   .get_camera_info       = get_camera_info,
   .set_callbacks         = set_callbacks,
   .get_vendor_tag_ops    = get_vendor_tag_ops,
   .open_legacy           = NULL,
   .set_torch_mode        = NULL,
   .init                  = NULL,
   .reserved              = {0},
};

此CameraHAL.cpp 文件是 camera.imx8.so 库文件的入口,系统加载该库时,会实例化、即执行构建函数、内容如下:

CameraHAL::CameraHAL()
  : mCameraCount(0),
    mCallbacks(NULL)
{
    // Allocate camera array and instantiate camera devices
    mCameras = new Camera*[MAX_CAMERAS];
    memset(mSets, 0, sizeof(mSets));
    memset(mCameras, 0, MAX_CAMERAS * sizeof(Camera*));

    // enumerate all camera sensors.
    //> 枚举处系统现有所有注册摄像头,并匹配满足条件摄像头
    //> 此函数中提取出前置和后置摄像头
    enumSensorSet();

    ALOGI("Begin Create camera \n");
    // check if camera exists.
    for (int32_t index=0; index<MAX_CAMERAS; index++) {
        if (!mSets[index].mExisting) {
            continue;
        }
        //> 此处调用 Camera::createCamera 方法
        mCameras[index] = Camera::createCamera(index, mSets[index].mSensorName,
                                mSets[index].mFacing, mSets[index].mOrientation,
                                mSets[index].mDevPath);

        if (mCameras[index] == NULL) {
            // camera sensor is not supported now.
            // So, camera count should not change.
            ALOGW("Error: camera:%d, %s create failed", index,
                                 mSets[index].mSensorName);
        }
        else {
            mCameraCount++;
        }
    }
    ALOGI("camera number is %d", mCameraCount);

    mHotplugThread = new HotplugThread(this);
}

(2). 系统中谁在管理摄像头对象?
是 CameraProvider 在管理摄像头, CameraProvider是系统服务、启动就会运行此服务,该服务会加载 camera.imx8.so 库文件;

CameraProvider::CameraProvider() :
        camera_module_callbacks_t({sCameraDeviceStatusChange,
                                   sTorchModeStatusChange}) {
    mInitFailed = initialize();
}

bool CameraProvider::initialize() {
    camera_module_t *rawModule;
    ALOGV(" HW_get_module module..."); 
    int err = hw_get_module(CAMERA_HARDWARE_MODULE_ID,
            (const hw_module_t **)&rawModule);
    if (err < 0) {
        ALOGE("Could not load camera HAL module: %d (%s)", err, strerror(-err));
        return true;
    }
    ALOGV("Begin create camera module..."); 
    mModule = new CameraModule(rawModule);
    err = mModule->init();
    if (err != OK) {
        ALOGE("Could not initialize camera HAL module: %d (%s)", err, strerror(-err));
        mModule.clear();
        return true;
    }
    ALOGV("Loaded \"%s\" camera module", mModule->getModuleName()); 
    ...... //> 省略部分代码
    mNumberOfLegacyCameras = mModule->getNumberOfCameras();
    ALOGV("Initialized '%d' camera module ", mNumberOfLegacyCameras); 
    ...... //> 省略部分代码
    ALOGV("End Initialized camera module..."); 
    return false; // mInitFailed
}

此部分在笔者前面的博文中、由详细调用逻辑梳理。此处只作简短回顾。

2>. Stream 类设计

代码把流给抽象出来基本流类型、如下:

class Stream : public LightRefBase<Stream>
{
public:
    Stream(Camera* camera);                              //> 流的构造函数入口参数是 Camera 对象
    Stream(int id, camera3_stream_t *s, Camera* camera);
    virtual ~Stream();

    // validate that astream's parameters match this stream's parameters
    bool isValidReuseStream(int id, camera3_stream_t *s);

    int32_t processCaptureBuffer(StreamBuffer& buf,
                                 sp<Metadata> meta);

    void setCurrentBuffer(StreamBuffer* out) {mCurrent = out;}
    virtual void* getG2dHandle() {return NULL;}
    bool isPreview() {return mPreview;}
    bool isJpeg() {return mJpeg;}
    bool isCallback() {return mCallback;}
    bool isRecord() {return mRecord;}
    uint32_t width() {return mWidth;}
    uint32_t height() {return mHeight;}
    int32_t format() {return mFormat;}
    uint32_t bufferNum() {return mNumBuffers;}
    camera3_stream_t* stream() {return mStream;}
    void setReuse(bool reuse) {mReuse = reuse;}
    void setFps(uint32_t fps) {mFps = fps;}
    uint32_t fps() {return mFps;};

    int getType();
    bool isInputType();
    bool isOutputType();
    bool isRegistered();
    void dump(int fd);

protected:
    int32_t processJpegBuffer(StreamBuffer& src,
                              sp<Metadata> meta);
    int32_t processFrameBuffer(StreamBuffer& src,
                               sp<Metadata> meta);

protected:
    // This stream is being reused. Used in stream configuration passes
    bool mReuse;
    bool mPreview;
    bool mJpeg;
    bool mCallback;
    bool mRecord;
    // The camera device id this stream belongs to
    const int mId;
    // Handle to framework's stream, used as a cookie for buffers
    camera3_stream_t *mStream;
    // Stream type: CAMERA3_STREAM_* (see )
    const int mType;
    // Width in pixels of the buffers in this stream
    uint32_t mWidth;
    // Height in pixels of the buffers in this stream
    uint32_t mHeight;
    // Gralloc format: HAL_PIXEL_FORMAT_* (see )
    int32_t mFormat;
    // Gralloc usage mask : GRALLOC_USAGE_* (see )
    uint32_t mUsage;
    // frame rate.
    uint32_t mFps;
    // Max simultaneous in-flight buffers for this stream
    uint32_t mNumBuffers;
    // Buffers have been registered for this stream and are ready
    bool mRegistered;
    // Array of handles to buffers currently in use by the stream
    StreamBuffer* mBuffers[MAX_STREAM_BUFFERS];
    // Lock protecting the Stream object for modifications
    android::Mutex mLock;

    bool mCustomDriver;
    StreamBuffer* mCurrent;             //> 流缓冲区 StreamBuffer 
    Camera* mCamera;                    //> 摄像头对象
    sp<JpegBuilder> mJpegBuilder;
};

子类 VideoStream 流定义如下:

class VideoStream : public Stream
{
public:
    VideoStream(Camera* device);
    virtual ~VideoStream();
    void destroyStream();

    // configure device stream.
    int32_t configure(sp<Stream> stream);
    //> 拍照请求 send capture request for stream.
    int32_t requestCapture(sp<CaptureRequest> req);

    // open/close device stream.
    int32_t openDev(const char* name);
    int32_t closeDev();
    int32_t flushDev();

    void setOmitFrameCount(uint32_t omitCount) { mOmitFrmCount = omitCount; }
private:
    // message type.
    static const int32_t MSG_CONFIG = 0x100;
    static const int32_t MSG_FRAME = 0x103;
    static const int32_t MSG_CLOSE = 0x104;
    static const int32_t MSG_EXIT  = 0x105;
    static const int32_t MSG_FLUSH = 0x106;

    // device stream state.
    static const int32_t STATE_INVALID = 0x201;
    static const int32_t STATE_CONFIG = 0x202;
    static const int32_t STATE_START = 0x203;
    static const int32_t STATE_STOP  = 0x204;
    static const int32_t STATE_ERROR  = 0x205;

protected:
    // handle configure message internally.
    int32_t handleConfigureLocked(ConfigureParam* params);
    virtual int32_t onDeviceConfigureLocked() = 0;
    // handle start message internally.
    int32_t handleStartLocked(bool force);
    virtual int32_t onDeviceStartLocked() = 0;
    // handle stop message internally.
    int32_t handleStopLocked(bool force);
    virtual int32_t onDeviceStopLocked() = 0;
    // handle flush.
    int32_t handleFlushLocked();
    virtual int32_t onFlushLocked();
    // handle frame message internally.
    int32_t handleCaptureFrame();

    //> 处理拍照请求 process capture request with lock.
    int32_t processCaptureRequest(StreamBuffer& src, sp<CaptureRequest> req);
    //> 拍照参数配置 process capture advanced settings with lock.
    int32_t processCaptureSettings(sp<CaptureRequest> req);

    // get buffer from V4L2.
    StreamBuffer* acquireFrameLocked();
    virtual int32_t onFrameAcquireLocked() = 0;
    // put buffer back to V4L2.
    int32_t returnFrameLocked(StreamBuffer& buf);
    virtual int32_t onFrameReturnLocked(int32_t index, StreamBuffer& buf) = 0;
    // get buffer index.
    int32_t getBufferIndexLocked(StreamBuffer& buf);

    // allocate buffers.
    virtual int32_t allocateBuffersLocked() = 0;
    // free buffers.
    virtual int32_t freeBuffersLocked() = 0;

    int32_t handleMessage();
    int32_t flushDevLocked();

private:
	//> 相机消息队列处理线程
    class MessageThread : public Thread
    {
    public:
        MessageThread(VideoStream *device)
            : Thread(false), mStream(device)
            {}

        virtual void onFirstRef() {
            run("MessageThread", PRIORITY_URGENT_DISPLAY);
        }

        virtual status_t readyToRun() {
            return 0;
        }

        virtual bool threadLoop() {
            int ret = mStream->handleMessage();
            if (ret != 0) {
                ALOGI("%s exit...", __func__);
                mStream.clear();
                mStream = NULL;
                return false;
            }

            // loop until we need to quit
            return true;
        }

    private:
        sp<VideoStream> mStream;   //> 
    };

protected:
    CMessageQueue mMessageQueue;            //> 消息队列
    sp<MessageThread> mMessageThread;      //>  消息处理线程
    int32_t mState;

    List< sp<CaptureRequest> > mRequests;  //> 拍照请求队列
    int32_t mChanged;

    // camera dev node.
    int32_t mDev;
    uint32_t mAllocatedBuffers;
    enum v4l2_memory mV4l2MemType;
    enum v4l2_buf_type mV4l2BufType;
    uint32_t mOmitFrames;
    uint32_t mOmitFrmCount;
};

VideoStream 类是对摄像头流属性的抽象、其他具体类型的流都是继承并实现该类虚方法,其他类型流有 MMAPStream 和 DMAStream 流,
DMAStream主要是给硬件摄像头使用、笔者的虚拟摄像头使用的是 MMAPStream 流、接下来看看它的定义:
@vendor/nxp-opensource/imx/libcamera3/MMAPStream.h

// stream uses memory map buffers which allcated in kernel space.
class MMAPStream : public VideoStream
{
public:
    MMAPStream(Camera* device);
    MMAPStream(Camera *device, bool mplane);
    virtual ~MMAPStream();

    // configure device.
    virtual int32_t onDeviceConfigureLocked();
    // start device.
    virtual int32_t onDeviceStartLocked();
    // stop device.
    virtual int32_t onDeviceStopLocked();

    // get buffer from V4L2.
    virtual int32_t onFrameAcquireLocked();
    // put buffer back to V4L2.
    virtual int32_t onFrameReturnLocked(int32_t index, StreamBuffer& buf);

    // allocate buffers.
    virtual int32_t allocateBuffersLocked() {return 0;}
    // free buffers.
    virtual int32_t freeBuffersLocked() {return 0;}

private:
    bool mPlane;
};

部分源码内容如下:

(1). 请求捕获视频frame

@vendor/nxp-opensource/imx/libcamera3/VideoStream.cpp
我们从用户请求拍照部分开始梳理,此部分源码内容在 VideoStream.cpp 中,内容如下:

int32_t VideoStream::requestCapture(sp<CaptureRequest> req)
{
    ALOGV("%s : %s() %d \n", __FILE__, __func__, __LINE__);
    Mutex::Autolock lock(mLock);

    mRequests.push_back(req);

    mMessageQueue.postMessage(new CMessage(MSG_FRAME, 0));

    return 0;
}

此消息发送到 VideoStream 对象的内部 mMessageThread 线程中,接下来看看是如何处理此消息的,在 VideoStream.h 中已经看到
是调用的函数是 VideoStream->handleMessage 方法,内容如下:

int32_t VideoStream::handleMessage()
{
    int32_t ret = 0;
    ALOGV("%s : %s() %d \n", __FILE__, __func__, __LINE__);
    sp<CMessage> msg = mMessageQueue.waitMessage();
    if (msg == 0) {
        ALOGE("get invalid message");
        return -1;
    }

    switch (msg->what) {
        case MSG_CONFIG: {
            Mutex::Autolock lock(mLock);
            ConfigureParam* params = (ConfigureParam*)(uintptr_t)msg->arg0;
            ret = handleConfigureLocked(params);
            if (params != NULL) {
                delete params;
            }
        }
        break;

        case MSG_CLOSE: {
            Mutex::Autolock lock(mLock);
            ret = handleStopLocked(true);
            if (mDev > 0) {
                close(mDev);
                mDev = -1;
            }
        }
        break;

        case MSG_FRAME: {
            Mutex::Autolock lock(mLock);
            // to start device automically.
            if (mState != STATE_START) {
                ALOGV("state:0x%x when handle frame message", mState);
                ret = handleStartLocked(false);
                if (ret != 0) {
                    ALOGE("%s handleStartLocked failed", __func__);
                    return ret;
                }
            }

        }
        //> 调用视频捕获方法
        ret = handleCaptureFrame();
        break;

        case MSG_EXIT: {
            Mutex::Autolock lock(mLock);
            ALOGI("capture thread exit...");
            if (mState == STATE_START) {
                handleStopLocked(true);
            }

            ret = -1;
        }
        break;

        case MSG_FLUSH: {
            Mutex::Autolock lock(mLock);
            if (mState == STATE_START) {
                handleFlushLocked();
            }
        }
        break;

        default: {
            ALOGE("%s invalid message what:%d", __func__, msg->what);
        }
        break;
    }

    return ret;
}

捕获视频方法源码如下:

int32_t VideoStream::handleCaptureFrame()
{
    int32_t ret = 0;
    ALOGV("%s : %s() %d \n", __FILE__, __func__, __LINE__);

    List< sp<CaptureRequest> >::iterator cur;
    sp<CaptureRequest> req = NULL;
    StreamBuffer *buf = NULL;
    {
        Mutex::Autolock lock(mLock);
        if (mRequests.empty()) {
            return 0;
        }

        cur = mRequests.begin();
        req = *cur;
    }
    //advanced character.
    ret = processCaptureSettings(req);
    if (ret != 0) {
        Mutex::Autolock lock(mLock);
        mRequests.erase(cur);
        ALOGE("processSettings failed");
        return 0;
    }

    { //> 1. 调用子类实现的方法
        Mutex::Autolock lock(mLock);
        buf = acquireFrameLocked();
    }

    if (buf == NULL) {
        ALOGE("acquireFrameLocked failed");
        req->onCaptureError();
        Mutex::Autolock lock(mLock);
        mRequests.erase(cur);
        return 0;
    }

    ret = processCaptureRequest(*buf, req);
    if (ret != 0) {
        Mutex::Autolock lock(mLock);
        returnFrameLocked(*buf);
        ALOGE("processRequest failed");
        return 0;
    }

    Mutex::Autolock lock(mLock);
    mRequests.erase(cur);
    returnFrameLocked(*buf);

    return 0;
}

int32_t VideoStream::processCaptureRequest(StreamBuffer& src,
                         sp<CaptureRequest> req)
{
    int32_t ret = 0;
    ALOGV("%s : %s() %d \n", __FILE__, __func__, __LINE__);
    for (uint32_t i=0; i<req->mOutBuffersNumber; i++) {
        StreamBuffer* out = req->mOutBuffers[i];
        sp<Stream>& stream = out->mStream;
        // stream to process buffer.
        stream->setCurrentBuffer(out);
        stream->processCaptureBuffer(src, req->mSettings);
        stream->setCurrentBuffer(NULL);
        //> 2. 调用 CaptureRequest 方法
        ret = req->onCaptureDone(out);
        if (ret != 0) {
            return ret;
        }
    }

    return ret;
}

(1.1) 调用子类实现的方法

StreamBuffer* VideoStream::acquireFrameLocked()
{
    int32_t index = onFrameAcquireLocked();
    if (index >= MAX_STREAM_BUFFERS || index < 0) {
        ALOGE("%s: invalid index %d", __func__, index);
        return NULL;
    }

    return mBuffers[index];
}

(1.2). 子类方法 onFrameAcquireLocked()

在 VideoStream::handleCaptureFrame 方法中、调用子类实现的 onFrameAcquireLocked 方法,
如子类 MMAPStream 流实现内容如下:
@vendor/nxp-opensource/imx/libcamera3/MMAPStream.cpp

int32_t MMAPStream::onFrameAcquireLocked()
{
    ALOGV("%s", __func__);
    int32_t ret = 0;
    struct v4l2_buffer cfilledbuffer;
    struct v4l2_plane planes;
    memset(&planes, 0, sizeof(struct v4l2_plane));

capture_data:
    memset(&cfilledbuffer, 0, sizeof (cfilledbuffer));

    if (mPlane) {
        cfilledbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        cfilledbuffer.m.planes = &planes;
        cfilledbuffer.length = 1;
    } else {
        cfilledbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    }

    cfilledbuffer.memory = V4L2_MEMORY_MMAP;
    //> 通过 V4L2 框架接口发送 VIDIOC_DQBUF 出列请求.
    ret = ioctl(mDev, VIDIOC_DQBUF, &cfilledbuffer);
    if (ret < 0) {
        ALOGE("%s: VIDIOC_DQBUF Failed", __func__);
        return -1;
    }

    if (mOmitFrames > 0) {
        ALOGI("%s omit frame", __func__);
        ret = ioctl(mDev, VIDIOC_QBUF, &cfilledbuffer);
        if (ret < 0) {
          ALOGE("%s VIDIOC_QBUF Failed", __func__);
          return BAD_VALUE;
        }
        mOmitFrames--;
        goto capture_data;
    }
    return cfilledbuffer.index;
}

3>. CaptureRequest 捕获请求

android 系统的 camera api2 视频捕获采用是 CaptureRequest 来实现,再次简单梳理下定义和使用方法。
@vendor/nxp-opensource/imx/libcamera3/CameraUtils.h

class CaptureRequest : public LightRefBase<CaptureRequest>
{
public:
    CaptureRequest();
    ~CaptureRequest();

    void init(camera3_capture_request* request, camera3_callback_ops* callback,
              sp<Metadata> settings);
    int32_t onCaptureDone(StreamBuffer* buffer);
    int32_t onSettingsDone(sp<Metadata> meta);
    int32_t onCaptureError();

public:
    uint32_t mFrameNumber;
    sp<Metadata> mSettings;
    uint32_t mOutBuffersNumber;
    StreamBuffer* mOutBuffers[MAX_STREAM_BUFFERS];

    camera3_capture_request* mRequest;
    camera3_callback_ops *mCallbackOps;
};

相关源码内容如下:
@vendor/nxp-opensource/imx/libcamera3/CameraUtils.cpp

void StreamBuffer::initialize(buffer_handle_t* buf_h)
{
    if (buf_h == NULL) {
        return;
    }

    fsl::Memory *handle = (fsl::Memory *)(*buf_h);
    mBufHandle = buf_h;
    mVirtAddr  = (void *)handle->base;
    mPhyAddr   = handle->phys;
    mSize      = handle->size;

    //for uvc jpeg stream
    mpFrameBuf  = NULL;
}

void CaptureRequest::init(camera3_capture_request* request,
              camera3_callback_ops* callback,
              sp<Metadata> settings)
{
    mFrameNumber = request->frame_number;
    mSettings = settings;
    mOutBuffersNumber = request->num_output_buffers;
    mRequest = request;
    mCallbackOps = callback;

    ALOGV("CaptureRequest fm:%d, bn:%d", mFrameNumber, mOutBuffersNumber);
    for (uint32_t i = 0; i < request->num_output_buffers; i++) {
        mOutBuffers[i] = new StreamBuffer();
        mOutBuffers[i]->mStream = reinterpret_cast<Stream*>(
                            request->output_buffers[i].stream->priv);
        mOutBuffers[i]->mAcquireFence = request->output_buffers[i].acquire_fence;
        mOutBuffers[i]->initialize(request->output_buffers[i].buffer);
    }
}

初始化时、把 CaptureRequest对象的物理内存和虚拟内存地址记录下来、并初始 StreamBuffer 与之对应上;
在 视频捕获时、调用 req->onCaptureDone 方法时、就把此内容通过回调函数发送至调用者的监听对象中。
onCaptureDone 方法内容如下:

int32_t CaptureRequest::onCaptureDone(StreamBuffer* buffer)
{
    if (buffer == NULL || buffer->mBufHandle == NULL || mCallbackOps == NULL) {
        return 0;
    }

    camera3_stream_buffer_t cameraBuffer;
    cameraBuffer.stream = buffer->mStream->stream();
    cameraBuffer.buffer = buffer->mBufHandle;
    cameraBuffer.status = CAMERA3_BUFFER_STATUS_OK;
    cameraBuffer.acquire_fence = -1;
    cameraBuffer.release_fence = -1;

    camera3_capture_result_t result;
    memset(&result, 0, sizeof(result));
    result.frame_number = mFrameNumber;
    result.result = NULL;
    result.num_output_buffers = 1;
    result.output_buffers = &cameraBuffer;

    // For HAL3.2 or above, If HAL doesn't support partial, it must always set
    // partial_result to 1 when metadata is included in this result.
    result.partial_result = 1;

    ALOGV("onCaptureDone fm:%d", mFrameNumber);
    //> 捕获结构通过回调函数发送给调用者。
    mCallbackOps->process_capture_result(mCallbackOps, &result);
    return 0;
}

本篇梳理了 NXP 的 CameraHAL 设计框架,总结如下:
(1). CameraHAL 通过封装的 Camera 对象来管理各类型的物理摄像头,物理摄像头是 Camera 子类并实现该 Sensor 特有属性方法;

(2). 对 Camera 的流进行抽象封装、基本 Stream ==> VideoStream 类型, 不同特征流在分别实现如 DMAStream 和 MMAPStream 流, 分类方法是以流数据传输方式划分 DMA 还是 MMAP 内存映射;

(3). CaptureRequest 封装 Camera3 的视频捕获对象、把每次捕获请求都存放到 List< sp > mRequests 链表中, 处理后的结果、直接通过回调函数传回给调用者、期间不发生数据拷贝过程、用户最终从 CaptureRequest 中取出该缓冲区的虚拟地址;

(4). 每个 Camera 对象都持有特有类型流、该流又持有 CaptureRequest 对象, 流具有内部 MessageThread 线程、用以处理本摄像头的用户操作请求的响应。

后记

由于此Camera框架源码各厂家实现不同,所以在源码引用上尽量不做删减、以让读者能了解更多信息。本篇内容如果对您有所帮助、请给笔者一个点赞、以资鼓励。谢谢。

你可能感兴趣的:(Android-系列,android,音视频)