准备工作做好后,还需要响应点击拍照事件,我们设置点击拍照按钮调用capture()方法,capture()方法即实现拍照
private void capture() {
try {
//创建拍照请求
final CaptureRequest.Builder mCaptureBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
//获取屏幕方向
int rotation = getWindowManager().getDefaultDisplay().getRotation();
//绑定请求和mImageReader,不加的话就会导致mImageReader的onImageAvailable()方法不会回调
mCaptureBuilder.addTarget(mImageReader.getSurface());
//设置拍照方向
mCaptureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATION.get(rotation));
CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() {
//回调函数,通知拍照数据处理完成
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
TotalCaptureResult result) {
//显示拍照完成的提示
Toast.makeText(getApplicationContext(), "Image Saved!", Toast.LENGTH_SHORT).show();
//重新预览
unLockFocus();
}
};
//停止预览
mCameraCaptureSession.stopRepeating();
//开始拍照
mCameraCaptureSession.capture(mCaptureBuilder.build(), CaptureCallback, null);
}
}
构造 CaptureRequest->UserRequestParams->ParsedAppRequest->IPipelineFrame,最终交给PipelineContextImpl处理
alps/frameworks/base/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
public int capture(CaptureRequest request, CaptureCallback callback, Executor executor){
List
requestList.add(request);
return submitCaptureRequest(requestList, callback, executor, /*streaming*/false);
}
private int submitCaptureRequest(List
Executor executor, boolean repeating) throws CameraAccessException {
executor = checkExecutor(executor, callback);
synchronized(mInterfaceLock) {
if (repeating) {
stopRepeating();
}
SubmitInfo requestInfo;
CaptureRequest[] requestArray = requestList.toArray(new CaptureRequest[requestList.size()]);
// Convert Surface to streamIdx and surfaceIdx
for (CaptureRequest request : requestArray) {
request.convertSurfaceToStreamId(mConfiguredOutputs);
}
requestInfo = mRemoteDevice.submitRequestList(requestArray, repeating);
for (CaptureRequest request : requestArray) {
request.recoverStreamIdToSurface();
}
if (callback != null) {
mCaptureCallbackMap.put(requestInfo.getRequestId(),
new CaptureCallbackHolder(
callback, requestList, executor, repeating, mNextSessionId - 1));
}
if (repeating) {
if (mRepeatingRequestId != REQUEST_ID_NONE) {
checkEarlyTriggerSequenceComplete(mRepeatingRequestId,
requestInfo.getLastFrameNumber());
}
mRepeatingRequestId = requestInfo.getRequestId();
} else {
mRequestLastFrameNumbersList.add(
new RequestLastFrameNumbersHolder(requestList, requestInfo));
}
return requestInfo.getRequestId();
}
}
alps/frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
转换请求对象CaptureRequest->PhysicalCameraSettingsList
binder::Status CameraDeviceClient::submitRequestList(
const std::vector
bool streaming,
hardware::camera2::utils::SubmitInfo *submitInfo) {
List
std::list
submitInfo->mRequestId = mRequestIdCounter;
for (auto&& request: requests) {
SurfaceMap surfaceMap;
Vector
std::vector
if (request.mSurfaceList.size() > 0) {//循环初始化Surface
for (sp
int32_t streamId;
sp
res = insertGbpLocked(gbp, &surfaceMap, &outputStreamIds, &streamId);
ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
if (index >= 0) {
String8 requestedPhysicalId(
mConfiguredOutputs.valueAt(index).getPhysicalCameraId());
requestedPhysicalIds.push_back(requestedPhysicalId.string());
}
}
} else {//拍照和预览
for (size_t i = 0; i < request.mStreamIdxList.size(); i++) {
int streamId = request.mStreamIdxList.itemAt(i);
int surfaceIdx = request.mSurfaceIdxList.itemAt(i);
ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
const auto& gbps = mConfiguredOutputs.valueAt(index).getGraphicBufferProducers();
res = insertGbpLocked(gbps[surfaceIdx], &surfaceMap, &outputStreamIds, nullptr);
String8 requestedPhysicalId(mConfiguredOutputs.valueAt(index).getPhysicalCameraId());
requestedPhysicalIds.push_back(requestedPhysicalId.string());
}
}
构造PhysicalCameraSettingsList对象
CameraDeviceBase::PhysicalCameraSettingsList physicalSettingsList;
for (const auto& it : request.mPhysicalCameraSettings) {
String8 physicalId(it.id.c_str());
if (physicalId != mDevice->getId()) {
auto found = std::find(requestedPhysicalIds.begin(), requestedPhysicalIds.end(), it.id);
if (!mSupportedPhysicalRequestKeys.empty()) {
CameraMetadata filteredParams(mSupportedPhysicalRequestKeys.size());
camera_metadata_t *meta = const_cast
filteredParams.getAndLock());
set_camera_metadata_vendor_id(meta, mDevice->getVendorTagId());
filteredParams.unlock(meta);
for (const auto& keyIt : mSupportedPhysicalRequestKeys) {
camera_metadata_ro_entry entry = it.settings.find(keyIt);
if (entry.count > 0) {
filteredParams.update(entry);
}
}
physicalSettingsList.push_back({it.id, filteredParams});
}
} else {
physicalSettingsList.push_back({it.id, it.settings});
}
}
//更新数据
physicalSettingsList.begin()->metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS,
&outputStreamIds[0], outputStreamIds.size());
if (request.mIsReprocess) {
physicalSettingsList.begin()->metadata.update(ANDROID_REQUEST_INPUT_STREAMS,
&mInputStream.id, 1);
}
physicalSettingsList.begin()->metadata.update(ANDROID_REQUEST_ID,
&(submitInfo->mRequestId), /*size*/1);
loopCounter++; // loopCounter starts from 1
//压栈
metadataRequestList.push_back(physicalSettingsList);
surfaceMapList.push_back(surfaceMap);
}
mRequestIdCounter++;
if (streaming) {//预览会走此条通道
err = mDevice->setStreamingRequestList(metadataRequestList, surfaceMapList,
&(submitInfo->mLastFrameNumber));
mStreamingRequestId = submitInfo->mRequestId;
} else {//拍照等走此条通道
err = mDevice->captureList(metadataRequestList, surfaceMapList,
&(submitInfo->mLastFrameNumber));
}
}
预览
status_t Camera3Device::setStreamingRequestList(
const List
const std::list
return submitRequestsHelper(requestsList, surfaceMaps, /*repeating*/true, lastFrameNumber);
}
拍照
alps/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
status_t Camera3Device::captureList(const List
const std::list
int64_t *lastFrameNumber) {
return submitRequestsHelper(requestsList, surfaceMaps, /*repeating*/false, lastFrameNumber);
}
setStreamingRequestList和captureList方法都调用了submitRequestsHelper方法,只是他们的repeating参数一个ture,一个为false。
status_t Camera3Device::submitRequestsHelper(
const List
const std::list
bool repeating,
int64_t *lastFrameNumber){
RequestList requestList;
转换请求对象PhysicalCameraSettingsList->CaptureRequest
res = convertMetadataListToRequestListLocked(requests, surfaceMaps,
repeating, /*out*/&requestList);
if (repeating) {
res = mRequestThread->setRepeatingRequests(requestList, lastFrameNumber);
} else {
res = mRequestThread->queueRequestList(requestList, lastFrameNumber);
}
}
入队预览请求
status_t Camera3Device::RequestThread::setRepeatingRequests(
const RequestList &requests,
/*out*/
int64_t *lastFrameNumber) {
if (lastFrameNumber != NULL) {
*lastFrameNumber = mRepeatingLastFrameNumber;
}
mRepeatingRequests.clear();
mRepeatingRequests.insert(mRepeatingRequests.begin(),
requests.begin(), requests.end());//把预览的请求放到mRepeatingRequests队列中
unpauseForNewRequests();
mRepeatingLastFrameNumber = hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES;
return OK;
}
入队拍照请求
status_t Camera3Device::RequestThread::queueRequestList(
List
/*out*/
int64_t *lastFrameNumber) {
for (List
mRequestQueue.push_back(*it); //最终把CaputreRequest到放到mRequestQueue队列
}
if (lastFrameNumber != NULL) {
*lastFrameNumber = mFrameNumber + mRequestQueue.size() - 1;
}
unpauseForNewRequests();
}
处理队列中的预览和拍照请求
bool Camera3Device::RequestThread::threadLoop() {
waitForNextRequestBatch();//等待拍照或者预览请求
prepareHalRequests();//构造HAL层captureRequest和输出的outputBuffers
// Inform waitUntilRequestProcessed thread of a new request ID
mLatestRequestId = latestRequestId;
mLatestRequestSignal.signal();
if (mInterface->supportBatchRequest()) { //是否支持批量处理
submitRequestSuccess = sendRequestsBatch();
} else {
submitRequestSuccess = sendRequestsOneByOne();
}
return submitRequestSuccess;//返回值为false则中断循环
}
优先处理拍照请求,预览优先级较低
void Camera3Device::RequestThread::waitForNextRequestBatch() {
//处理主帧,将主帧保存到数组中
NextRequest nextRequest;
nextRequest.captureRequest = waitForNextRequestLocked();
nextRequest.halRequest = camera3_capture_request_t();
nextRequest.submitted = false;
mNextRequests.add(nextRequest);
//处理子帧,一般不走
const size_t batchSize = nextRequest.captureRequest->mBatchSize;
for (size_t i = 1; i < batchSize; i++) {
NextRequest additionalRequest;
additionalRequest.captureRequest = waitForNextRequestLocked();
additionalRequest.halRequest = camera3_capture_request_t();
additionalRequest.submitted = false;
mNextRequests.add(additionalRequest);
}
}
返回拍照或预览需要处理的请求
sp
Camera3Device::RequestThread::waitForNextRequestLocked() {
status_t res;
sp
while (mRequestQueue.empty()) {//当没有拍照时处理预览,拍照则跳过该循环
if (!mRepeatingRequests.empty()) {
const RequestList &requests = mRepeatingRequests;
RequestList::const_iterator firstRequest = requests.begin();
nextRequest = *firstRequest;
mRequestQueue.insert(mRequestQueue.end(),++firstRequest,requests.end());
mRepeatingLastFrameNumber = mFrameNumber + requests.size() - 1;
break;
}
res = mRequestSignal.waitRelative(mRequestLock, kRequestTimeout);
if ((mRequestQueue.empty() && mRepeatingRequests.empty()) || exitPending()) {
if (mPaused == false) {
mPaused = true;
sp
if (statusTracker != 0) {
statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
}
}
return NULL;
}
}
if (nextRequest == NULL) {//处理拍照
RequestList::iterator firstRequest = mRequestQueue.begin();
nextRequest = *firstRequest;
mRequestQueue.erase(firstRequest);//消除请求
if (mRequestQueue.empty() && !nextRequest->mRepeating) {
sp
if (listener != NULL) {
listener->notifyRequestQueueEmpty();//拍照回调函数
}
}
}
return nextRequest;
}
构造HAL层captureRequest和输出的outputBuffers
status_t Camera3Device::RequestThread::prepareHalRequests() {
}
处理数组中拍照或者预览的请求
bool Camera3Device::RequestThread::sendRequestsBatch() {
status_t res;
size_t batchSize = mNextRequests.size();
std::vector
uint32_t numRequestProcessed = 0;
for (size_t i = 0; i < batchSize; i++) {
requests[i] = &mNextRequests.editItemAt(i).halRequest;
}
//mInterface = new HalInterface(session, queue);
res = mInterface->processBatchCaptureRequests(requests, &numRequestProcessed);
}
转换请求对象camera3_capture_request_t->CaptureRequest
status_t Camera3Device::HalInterface::processBatchCaptureRequests(
std::vector
for (size_t i = 0; i < batchSize; i++) {
if (hidlSession_3_4 != nullptr) {
wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_4[i].v3_2,
/*out*/&handlesCreated);
} else {
wrapAsHidlRequest(requests[i], /*out*/&captureRequests[i], /*out*/&handlesCreated);
}
}
// 将元数据写入FMQ
for (size_t i = 0; i < batchSize; i++) {
camera3_capture_request_t* request = requests[i];
device::V3_2::CaptureRequest* captureRequest;
if (hidlSession_3_4 != nullptr) {
captureRequest = &captureRequests_3_4[i].v3_2;
} else {
captureRequest = &captureRequests[i];
}
if (request->settings != nullptr) {
size_t settingsSize = get_camera_metadata_size(request->settings);
if (mRequestMetadataQueue != nullptr && mRequestMetadataQueue->write(
reinterpret_cast
captureRequest->settings.resize(0);
captureRequest->fmqSettingsSize = settingsSize;
} else {
captureRequest->settings.setToExternal(
reinterpret_cast
get_camera_metadata_size(request->settings));
captureRequest->fmqSettingsSize = 0u;
}
} else {
// A null request settings maps to a size-0 CameraMetadata
captureRequest->settings.resize(0);
captureRequest->fmqSettingsSize = 0u;
}
if (hidlSession_3_4 != nullptr) {
captureRequests_3_4[i].physicalCameraSettings.resize(request->num_physcam_settings);
for (size_t j = 0; j < request->num_physcam_settings; j++) {
if (request->physcam_settings != nullptr) {
size_t settingsSize = get_camera_metadata_size(request->physcam_settings[j]);
if (mRequestMetadataQueue != nullptr && mRequestMetadataQueue->write(
reinterpret_cast
settingsSize)) {
captureRequests_3_4[i].physicalCameraSettings[j].settings.resize(0);
captureRequests_3_4[i].physicalCameraSettings[j].fmqSettingsSize =
settingsSize;
} else {
captureRequests_3_4[i].physicalCameraSettings[j].settings.setToExternal(
reinterpret_cast
request->physcam_settings[j])),
get_camera_metadata_size(request->physcam_settings[j]));
captureRequests_3_4[i].physicalCameraSettings[j].fmqSettingsSize = 0u;
}
} else {
captureRequests_3_4[i].physicalCameraSettings[j].fmqSettingsSize = 0u;
captureRequests_3_4[i].physicalCameraSettings[j].settings.resize(0);
}
captureRequests_3_4[i].physicalCameraSettings[j].physicalCameraId =
request->physcam_id[j];
}
}
}
//调用CameraDevice3SessionImpl处理请求
auto castResult_3_4 = device::V3_4::ICameraDeviceSession::castFrom(mHidlSession);
hidlSession_3_4->processCaptureRequest_3_4(captureRequests_3_4, cachesToRemove,
[&status, &numRequestProcessed] (auto s, uint32_t n) {
status = s;
*numRequestProcessed = n;
});
return CameraProviderManager::mapToStatusT(status);
}
alps/vendor/mediatek/proprietary/hardware/mtkcam3/main/hal/device/3.x/device/CameraDevice3SessionImpl.cpp
processCaptureRequest_3_4(const hidl_vec
{
uint32_t numRequestProcessed = 0;
auto status = onProcessCaptureRequest(requests, cachesToRemove, numRequestProcessed);
_hidl_cb(mapToHidlCameraStatus(status), numRequestProcessed);
return Void();
}
alps/vendor/mediatek/proprietary/hardware/mtkcam3/main/hal/device/3.x/device/CameraDevice3SessionImpl.cpp
ThisNamespace::
onProcessCaptureRequest(
const hidl_vec
const hidl_vec
uint32_t& numRequestProcessed
){
::android::Vector
auto pAppStreamManager = getSafeAppStreamManager();
pAppStreamManager->submitRequest(requests, appRequests);
std::vector
for ( size_t i=0; i
pItem = std::make_shared
pItem->requestNo = appRequests[i].frameNo;
_CLONE_(pItem->vIImageBuffers, appRequests[i].vInputImageBuffers);
_CLONE_(pItem->vOImageBuffers, appRequests[i].vOutputImageBuffers);
_CLONE_(pItem->vIMetaBuffers, appRequests[i].vInputMetaBuffers);
}
auto pPipelineModel = getSafePipelineModel();
pPipelineModel->submitRequest(vPipelineRequests, numRequestProcessed);
}
将请求对象CaptureRequest->UserRequestParams
alps/vendor/mediatek/proprietary/hardware/mtkcam3/main/hal/device/3.x/app/AppStreamMgr.cpp
AppStreamMgr::
submitRequest(
const hidl_vec
android::Vector
){
mRequestHandler->submitRequest(captureRequests, rRequests);
}
调用PipelineModelSessionDefault对UserRequestParams进行处理
alps/vendor/mediatek/proprietary/hardware/mtkcam3/pipeline/model/PipelineModelImpl.cpp
PipelineModelImpl::
submitRequest(
std::vector
uint32_t& numRequestProcessed
){
session = mSession;
session->submitRequest(requests, numRequestProcessed);
}
alps/vendor/mediatek/proprietary/hardware/mtkcam3/pipeline/model/session/PipelineModelSessionBase.cpp
PipelineModelSessionBase::
submitRequest(
std::vector
uint32_t& numRequestProcessed
){
//Convert: UserRequestParams -> ParsedAppRequest
for (size_t i = 0; i < requests.size(); i++) {
auto r = std::make_shared
parseAppRequest(r.get(), requests[i].get() );//构造AppRequest对象
reqs.emplace_back(r);
}
//Submit ParsedAppRequest one by one
for (size_t i = 0; i < reqs.size(); i++, numRequestProcessed++) {
submitOneRequest(reqs[i]);
}
}
submitOneRequest(
std::shared_ptr
){
auto const requestNo = request->requestNo;
auto pConfigInfo2 = getCurrentConfigInfo2();
auto pReqOutputParams = std::make_shared
onRequest_EvaluateRequest(*pReqOutputParams, request, pConfigInfo2)
onRequest_Reconfiguration(pConfigInfo2, *pReqOutputParams, request)
onRequest_ProcessEvaluatedFrame(*pReqOutputParams, request, pConfigInfo2)
onRequest_Ending(*pReqOutputParams)
}
onRequest_ProcessEvaluatedFrame(
policy::pipelinesetting::RequestOutputParams const& reqOutput __unused,
std::shared_ptr
std::shared_ptr
){
auto pAppMetaControl = std::make_shared
*pAppMetaControl = *autoAppMetaControl.get();
auto pPipelineContext = getCurrentPipelineContext(); // 获得 mCurrentPipelineContext
int res = processEvaluatedFrame(reqOutput,
pAppMetaControl,
pRequest,
pConfigInfo2,
pPipelineContext);
}
processEvaluatedFrame(
policy::pipelinesetting::RequestOutputParams const& reqOutput,
std::shared_ptr
std::shared_ptr
std::shared_ptr
android::sp
){
// process each frame
uint32_t lastFrameNo = 0;
auto processFrame = [&] (pipelinesetting::RequestResultParams const& result, int frameType) -> int
{
return processOneEvaluatedFrame(
lastFrameNo,
frameType,
result,
reqOutput,
pAppMetaControl,
request,
pConfigInfo2,
pPipelineContext
);
};
// pre-dummy frames
for (auto const& frame : reqOutput.preDummyFrames) {
processFrame(*frame, eFRAMETYPE_PREDUMMY);
}
// main frame
processFrame(*(reqOutput.mainFrame), eFRAMETYPE_MAIN);
// sub frames
for (auto const& frame : reqOutput.subFrames) {
processFrame(*frame, eFRAMETYPE_SUB);
}
// post-dummy frames
for (auto const& frame : reqOutput.postDummyFrames) {
processFrame(*frame, eFRAMETYPE_POSTDUMMY);
}
if (reqOutput.mainFrame->nodesNeed.needJpegNode)
{
auto pCaptureInFlightRequest = getCaptureInFlightRequest();
if ( pCaptureInFlightRequest != nullptr ) {
pCaptureInFlightRequest->insertRequest(request->requestNo, eMSG_INFLIGHT_NORMAL);
}
}
for (auto& control : reqOutput.vboostControl)
{
if (control.boostScenario != -1 && control.boostScenario != (int32_t)IScenarioControlV3::Scenario_None)
{
mpScenarioCtrl->boostScenario(control.boostScenario, control.featureFlag, lastFrameNo);
}
}
}
alps/vendor/mediatek/proprietary/hardware/mtkcam3/pipeline/model/session/PipelineModelSessionDefault.cpp
processOneEvaluatedFrame(
uint32_t& lastFrameNo,
uint32_t frameType,
policy::pipelinesetting::RequestResultParams const& reqResult,
policy::pipelinesetting::RequestOutputParams const& reqOutput,
std::shared_ptr
std::shared_ptr
std::shared_ptr
android::sp
){
BuildPipelineFrameInputParams const params = {
.requestNo = request->requestNo,
.pAppImageStreamBuffers = (frameType == eFRAMETYPE_MAIN ? request->pParsedAppImageStreamBuffers.get() : nullptr),
.pAppMetaStreamBuffers = (vAppMeta.empty() ? nullptr : &vAppMeta),
.pHalImageStreamBuffers = nullptr,
.pHalMetaStreamBuffers = (vHalMeta.empty() ? nullptr : &vHalMeta),
.pvUpdatedImageStreamInfo = &(reqResult.vUpdatedImageStreamInfo),
.pnodeSet = &reqResult.nodeSet,
.pnodeIOMapImage = &(reqResult.nodeIOMapImage),
.pnodeIOMapMeta = &(reqResult.nodeIOMapMeta),
.pRootNodes = &(reqResult.roots),
.pEdges = &(reqResult.edges),
.pCallback = (frameType == eFRAMETYPE_MAIN ? this : nullptr),
.pPipelineContext = pPipelineContext
};
// check pending request
if ((frameType == eFRAMETYPE_MAIN)&&(pZslProcessor->hasPendingZslRequest()))
{
enqueZslBuildFrameParam(params, frameType);
submitZslReq(reqOutput, pPipelineContext, lastFrameNo);
}
//使用ParsedAppRequest和其它信息构造IPipelineFrame对象
android::sp
buildPipelineFrame(pPipelineFrame, params);//pPipelineFrame = PipelineBufferSetFrameControlImp
if ((frameType == eFRAMETYPE_PREDUMMY) || (frameType == eFRAMETYPE_POSTDUMMY) || (frameType == eFRAMETYPE_SUB))
pZslProcessor->setBufferEnqueCnt(pPipelineFrame->getFrameNo(), mZSLConfigStreamCnt, 0);
lastFrameNo = pPipelineFrame->getFrameNo();
pPipelineContext->queue(pPipelineFrame); //异步操作,交由PipelineContextImpl处理
}
构造 IPipelineFrame
alps/vendor/mediatek/proprietary/hardware/mtkcam3/pipeline/model/utils/PipelineFrameBuilder.cpp
auto buildPipelineFrame(
android::sp
BuildPipelineFrameInputParams const& in __unused
){
RequestBuilder builder; //实际上会构造RequestBuilderImpl对象做实际工作
builder.setReprocessFrame(in.bReprocessFrame);
builder.setRootNode( *in.pRootNodes );
builder.setNodeEdges( *in.pEdges );
// IOMap of Image/Meta
for ( auto key : *(in.pnodeSet) ) {
auto const& it_image = in.pnodeIOMapImage->find(key);
auto const& it_meta = in.pnodeIOMapMeta->find(key);
builder.setIOMap(
key,
(it_image !=in.pnodeIOMapImage->end() ) ? it_image->second : IOMapSet::empty(),
(it_meta !=in.pnodeIOMapMeta->end() ) ? it_meta->second : IOMapSet::empty()
);
}
sp
.updateFrameCallback(in.pCallback)
.build(in.requestNo, in.pPipelineContext);
out = pFrame;
}
alps/vendor/mediatek/proprietary/hardware/mtkcam3/pipeline/pipeline/PipelineContextBuilders.cpp
RequestBuilder::
RequestBuilder()
: mpImpl(new RequestBuilderImpl())
{
}
RequestBuilder::
build(
MUINT32 const requestNo,
sp
){
typedef PipelineContext::PipelineContextImpl PipelineContextImplT;
PipelineContextImplT* pContextImpl = pContext->getImpl();
sp
return pFrame;
}
alps/vendor/mediatek/proprietary/hardware/mtkcam3/pipeline/pipeline/PipelineContextImpl.cpp
PipelineContext::PipelineContextImpl::
constructRequest(RequestBuilderImpl* pBuilder, MUINT32 const requestNo){
MBOOL const& bReprocessFrame = pBuilder->mbReprocessFrame;
NodeIOMaps const& aImageIOMaps = pBuilder->mImageNodeIOMaps;
NodeIOMaps const& aMetaIOMaps = pBuilder->mMetaNodeIOMaps;
NodeEdgeSet const& aEdges = pBuilder->mNodeEdges;
NodeSet const& aRoots = pBuilder->mRootNodes;
wp
typedef IPipelineBufferSetFrameControl PipelineFrameT;
sp
requestNo,
frameNo,
bReprocessFrame,
aAppCallback, // IAppCallback
mpStreamConfig.get(), // IPipelineStreamBufferProvider
mpDispatcher // IPipelineNodeCallback
);
//构造PipelineDAG对象
sp
mpPipelineDAG.get(),
aRoots,
aEdges
);
sp
{
sp
construct_FrameNodeMapControl::Params params = {
.pImageNodeIOMaps = &aImageIOMaps,
.pMetaNodeIOMaps = &aMetaIOMaps,
.pReqDAG = pReqDAG.get(),
.pReqStreamInfoSet = pReqStreamInfoSet.get(),
.pMapControl = pFrameNodeMap.get()
};
construct_FrameNodeMapControl()(params);
pReqFrameNodeMap = pFrameNodeMap;
}
pFrame->setPipelineNodeMap (mpPipelineNodeMap.get());
pFrame->setNodeMap (pReqFrameNodeMap);
pFrame->setPipelineDAG (pReqDAG);
pFrame->setStreamInfoSet (pReqStreamInfoSet);
pFrame->setPhysicalCameraSetting(aPhysicalCameraSettings);
pFrame->finishConfiguration();
return pFrame;
}
实现IPipelineFrame的功能
alps/vendor/mediatek/proprietary/hardware/mtkcam3/pipeline/pipeline/PipelineBufferSetFrameControlImp.cpp
#define MAIN_CLASS_NAME PipelineBufferSetFrameControlImp
IPipelineBufferSetFrameControl*
IPipelineBufferSetFrameControl::
create(
MUINT32 requestNo,
MUINT32 frameNo,
MBOOL bReporcessFrame,
android::wp
IPipelineStreamBufferProvider const* pBufferProvider,
android::wp
)
{
return new MAIN_CLASS_NAME(requestNo, frameNo, bReporcessFrame, pAppCallback, pBufferProvider, pNodeCallback);
}
处理拍照或者预览请求,返回的错误不会进行处理
alps/vendor/mediatek/proprietary/hardware/mtkcam3/pipeline/pipeline/PipelineContextImpl.cpp
PipelineContext::PipelineContextImpl::
queue( sp
mpInFlightRequest->registerRequest(pFrame);
sp
Vector
Vector
{
for(size_t i=0; i
if( mInFlush ) {
err = pNode->flush(pFrame);
} else {
err = pNode->queue(pFrame);
if(err == FAILED_TRANSACTION){
vspPipelineNode.push_back(pNode);
}
}
}
}
while(vspPipelineNode.size() != 0){
{
// wake up and enque to root node which can not be enqued last time
Vector
while(it != vspPipelineNode.end()){
if( mInFlush ) {
err = (*it)->flush(pFrame);
} else {
err = (*it)->queue(pFrame);
}
}
}
}
}