Android P之Camera HAL3流程分析(5)

Camera2中拍照完成之后,如何实现获取数据呢?答案就是使用ImageReader间接实现,首先创建一个ImageReader,并监听它的事件

    private void setupImageReader() {
        //2代表ImageReader中最多可以获取两帧图像流
        mImageReader = ImageReader.newInstance(mCaptureSize.getWidth(), mCaptureSize.getHeight(),
                ImageFormat.JPEG, 2);
        //拍照数据准备好之后会回调该函数保存图片数据
        mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
            @Override
            public void onImageAvailable(ImageReader reader) {
                //执行图像保存子线程
                mCameraHandler.post(new imageSaver(reader.acquireNextImage()));
            }
        }, mCameraHandler);
    }

返回结果阶段

Capture,preview以及autoFocus都是使用的这个回调,而Capture调用的时候,其RequestTag为CAPTURE,
而autoFocus的时候为TAP_TO_FOCUS,而preview请求时没有对RequestTag进行设置,所以回调到onCaptureStarted方法时,
不需要进行处理,但是到此时,preview已经启动成功,可以进行预览了,其数据都在buffer里。

alps/vendor/mediatek/proprietary/hardware/mtkcam3/main/hal/device/3.x/app/AppStreamMgr.CallbackHandler.cpp
threadLoop(){
    if ( ! waitUntilQueue1NotEmpty() ) {
        return true;
    }
    mQueue2.splice(mQueue2.end(), mQueue1);
    mQueue1Cond.broadcast();

    performCallback();
    return  true;
}
performCallback(){
    {
        for (auto const& cbParcel : mQueue2) {
            convertShutterToHidl(cbParcel, vNotifyMsg);
            convertErrorToHidl(cbParcel, vErrorMsg);
            convertMetaToHidl(cbParcel, vCaptureResult, vTempMetadataResult);
            convertImageToHidl(cbParcel, vBufferResult);
        }
        if  ( auto pResultMetadataQueue = mResultMetadataQueue ) {
            for (auto& item : vCaptureResult) {
                if  ( item.v3_2.result.size() == 0 ) {
                    continue;
                }
                if  ( pResultMetadataQueue->availableToWrite() > 0 ) {
                    if  ( CC_LIKELY(pResultMetadataQueue->write(item.v3_2.result.data(), item.v3_2.result.size())) ) {
                        item.v3_2.fmqResultSize = item.v3_2.result.size();
                        item.v3_2.result = hidl_vec(); //resize(0)
                    }
                    else {
                        item.v3_2.fmqResultSize = 0;
                    }
                }
            }
        }
        //  send callbacks
        {
            if  ( ! vNotifyMsg.empty() ) {
                hidl_vec vecNotifyMsg;
                vecNotifyMsg.setToExternal(vNotifyMsg.data(), vNotifyMsg.size());
                auto ret1 = mCommonInfo->mDeviceCallback->notify(vecNotifyMsg);
            }
            if  ( ! vCaptureResult.empty() ) {
                hidl_vec vecCaptureResult;
                vecCaptureResult.setToExternal(vCaptureResult.data(), vCaptureResult.size());
                auto ret2 = mCommonInfo->mDeviceCallback->processCaptureResult_3_4(vecCaptureResult);
            }
            if  ( ! vErrorMsg.empty() ) {
                hidl_vec vecErrorMsg;
                vecErrorMsg.setToExternal(vErrorMsg.data(), vErrorMsg.size());
                auto ret1 = mCommonInfo->mDeviceCallback->notify(vecErrorMsg);
            }
            if  ( ! vBufferResult.empty() ) {
                hidl_vec vecBufferResult;
                vecBufferResult.setToExternal(vBufferResult.data(), vBufferResult.size());
                auto ret2 = mCommonInfo->mDeviceCallback->processCaptureResult_3_4(vecBufferResult);
            }
        }
        //  free the memory of camera_metadata.
        for (auto& v : vTempMetadataResult) {
            mCommonInfo->mMetadataConverter->freeCameraMetadata(v);
            v = nullptr;
        }
    }
}
alps/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
hardware::Return Camera3Device::processCaptureResult_3_4(
        const hardware::hidl_vec<
                hardware::camera::device::V3_4::CaptureResult>& results)

void Camera3Device::processOneCaptureResultLocked(
        const hardware::camera::device::V3_2::CaptureResult& result,
        const hardware::hidl_vec<
                hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadatas)

void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
    uint32_t frameNumber = result->frame_number;
    CameraMetadata collectedPartialResult;
    {
        ssize_t idx = mInFlightMap.indexOfKey(frameNumber);
        InFlightRequest &request = mInFlightMap.editValueAt(idx);
        //处理部分的结果
        if (result->partial_result != 0)
            request.resultExtras.partialResultCount = result->partial_result;

        if (mUsePartialResult && result->result != NULL) {
            isPartialResult = (result->partial_result < mNumPartialResults);
            if (isPartialResult) {
                request.collectedPartialResult.append(result->result);
            }
            if (isPartialResult && request.hasCallback) {
                sendPartialCaptureResult(result->result, request.resultExtras,
                        frameNumber);
            }
        }
        shutterTimestamp = request.shutterTimestamp;
        hasInputBufferInRequest = request.hasInputBuffer;

        // 保存完整的结果
        if (result->result != NULL && !isPartialResult) {
            for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
                String8 physicalId(result->physcam_ids[i]);
                std::set::iterator cameraIdIter =
                        request.physicalCameraIds.find(physicalId);
                if (cameraIdIter != request.physicalCameraIds.end()) {
                    request.physicalCameraIds.erase(cameraIdIter);
                }
            }
            if (mUsePartialResult && !request.collectedPartialResult.isEmpty()) {
                collectedPartialResult.acquire(request.collectedPartialResult);
            }
            request.haveResultMetadata = true;
        }

        uint32_t numBuffersReturned = result->num_output_buffers;
        if (result->input_buffer != NULL) {
            if (hasInputBufferInRequest) {
                numBuffersReturned += 1;
            }
        }
        request.numBuffersLeft -= numBuffersReturned;

        //等待shutter消息再处理输出Buffer
        if (shutterTimestamp == 0) {
            request.pendingOutputBuffers.appendArray(result->output_buffers,
                result->num_output_buffers);
        } else {
            returnOutputBuffers(result->output_buffers, result->num_output_buffers, shutterTimestamp);
        }
        // 发送完整的结果
        if (result->result != NULL && !isPartialResult) {
            for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
                CameraMetadata physicalMetadata;
                physicalMetadata.append(result->physcam_metadata[i]);
                request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
                        physicalMetadata});
            }
            if (shutterTimestamp == 0) {
                request.pendingMetadata = result->result;
                request.collectedPartialResult = collectedPartialResult;
           } else if (request.hasCallback) {
                CameraMetadata metadata;
                metadata = result->result;
                sendCaptureResult(metadata, request.resultExtras,
                    collectedPartialResult, frameNumber,
                    hasInputBufferInRequest, request.physicalMetadatas);
            }
        }
        removeInFlightRequestIfReadyLocked(idx);
    }

    if (result->input_buffer != NULL) {
        if (hasInputBufferInRequest) {
            Camera3Stream *stream =
                Camera3Stream::cast(result->input_buffer->stream);
            res = stream->returnInputBuffer(*(result->input_buffer));
        }
    }
}

 

=======================================================================================================

返回通知
alps/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
void Camera3Device::notify(const camera3_notify_msg *msg) {
    listener = mListener.promote();
    switch (msg->type) {
        case CAMERA3_MSG_ERROR: {
            notifyError(msg->message.error, listener);
            break;
        }
        case CAMERA3_MSG_SHUTTER: {
            notifyShutter(msg->message.shutter, listener);
            break;
        }
    }
}
void Camera3Device::notifyShutter(const camera3_shutter_msg_t &msg,
        sp listener) {
    {
        idx = mInFlightMap.indexOfKey(msg.frame_number);
        if (idx >= 0) {
            InFlightRequest &r = mInFlightMap.editValueAt(idx);
            r.shutterTimestamp = msg.timestamp;
            if (r.hasCallback) {
                if (listener != NULL) {
                    //调用监听的notifyShutter
                    listener->notifyShutter(r.resultExtras, msg.timestamp);
                }
                //将待处理的result发送到Buffer
                sendCaptureResult(r.pendingMetadata, r.resultExtras,
                    r.collectedPartialResult, msg.frame_number,
                    r.hasInputBuffer, r.physicalMetadatas);
            }
            returnOutputBuffers(r.pendingOutputBuffers.array(),
                r.pendingOutputBuffers.size(), r.shutterTimestamp);

            removeInFlightRequestIfReadyLocked(idx);
        }
    }
}
调用客户端监听的notifyShutter
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
        nsecs_t timestamp) {
    // Thread safe. Don't bother locking.
    sp remoteCb = getRemoteCallback();
    if (remoteCb != 0) {
        remoteCb->onCaptureStarted(resultExtras, timestamp);
    }
    Camera2ClientBase::notifyShutter(resultExtras, timestamp);
}
public void onCaptureStarted(final CaptureResultExtras resultExtras, final long timestamp) {
    int requestId = resultExtras.getRequestId();
    final long frameNumber = resultExtras.getFrameNumber();
    final CaptureCallbackHolder holder;
    synchronized(mInterfaceLock) {
        if (mRemoteDevice == null) return; // Camera already closed
        // Get the callback for this frame ID, if there is one
        holder = CameraDeviceImpl.this.mCaptureCallbackMap.get(requestId);
        ......
        // Dispatch capture start notice
        holder.getHandler().post(new Runnable() {
            @Override
            public void run() {
                if (!CameraDeviceImpl.this.isClosed()) {
                    holder.getCallback().onCaptureStarted(CameraDeviceImpl.this,holder.getRequest(
                        resultExtras.getSubsequenceId()),timestamp, frameNumber);
                }
           }
       });
    }
}

将结果插入到队列
void Camera3Device::sendCaptureResult(CameraMetadata &pendingMetadata,
        CaptureResultExtras &resultExtras,
        CameraMetadata &collectedPartialResult,
        uint32_t frameNumber,
        bool reprocess,
        const std::vector& physicalMetadatas) {
    CaptureResult captureResult;
    captureResult.mResultExtras = resultExtras;
    captureResult.mMetadata = pendingMetadata;
    captureResult.mPhysicalMetadatas = physicalMetadatas;

    insertResultLocked(&captureResult, frameNumber);
}
void Camera3Device::insertResultLocked(CaptureResult *result,
        uint32_t frameNumber) {
    camera_metadata_t *meta = const_cast(
            result->mMetadata.getAndLock());
    set_camera_metadata_vendor_id(meta, mVendorTagId);

    // Valid result, insert into queue
    List::iterator queuedResult =
            mResultQueue.insert(mResultQueue.end(), CaptureResult(*result));
}
返回Buffer
void Camera3Device::returnOutputBuffers(
        const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
        nsecs_t timestamp) {
    for (size_t i = 0; i < numBuffers; i++)
    {
        Camera3Stream *stream = Camera3Stream::cast(outputBuffers[i].stream);
        status_t res = stream->returnBuffer(outputBuffers[i], timestamp);
    }
}
消除没有处理完的请求
void Camera3Device::removeInFlightRequestIfReadyLocked(int idx) {
    const InFlightRequest &request = mInFlightMap.valueAt(idx);
    const uint32_t frameNumber = mInFlightMap.keyAt(idx);

    nsecs_t sensorTimestamp = request.sensorTimestamp;
    nsecs_t shutterTimestamp = request.shutterTimestamp;

    if (request.numBuffersLeft == 0 &&
            (request.skipResultMetadata ||
            (request.haveResultMetadata && shutterTimestamp != 0))) {
        returnOutputBuffers(request.pendingOutputBuffers.array(),
            request.pendingOutputBuffers.size(), 0);
     }

    if (!mIsConstrainedHighSpeedConfiguration && mInFlightMap.size() > kInFlightWarnLimit) {
        CLOGE("In-flight list too large: %zu", mInFlightMap.size());
    } else if (mIsConstrainedHighSpeedConfiguration && mInFlightMap.size() >
            kInFlightWarnLimitHighSpeed) {
        CLOGE("In-flight list too large for high speed configuration: %zu",
                mInFlightMap.size());
    }
}

你可能感兴趣的:(Camera)