做mtk camera这一块有一段时间了,有些想法,一直没有整理起来。这样的后果就是,虽然这些东西你都做过,但是要让你说清楚是怎么一回事,你就很难说得清了。但是如果你将你做过的东西,在心里理清楚后,再写出来,就难转化为自己的理解,就能真正的将做过的东西吸收为自己知识沉淀的一部份了。这篇博客,先将mtk上用api1录相的流程整理一下。
1.)在app上录相的代码基本上如下
package com.example.dcamera;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import android.Manifest;
import android.content.res.Configuration;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.List;
public class MainActivity extends AppCompatActivity{
SurfaceView surfaceview1;
byte[] mPreBuffer = new byte[3133440];
SurfaceHolder surfaceholder1, surfaceholder2, surfaceholder3;
String TAG = "DoubleCamera";
private Camera camera1 = null, camera2 = null, camera3 = null;
Camera.Parameters parameters;
Button cam1StopPreview, cam2StopPreview, cam1Recorder, cam2Recorder;
private MediaRecorder mMediaRecorder1, mMediaRecorder2;
/*
mCam1State == 0 未录相/停止录相
mCam1State == 1 正在录相
*/
private int mCam1State=0, mCam2State=0;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d(TAG, "onCreate 1");
String[] permissionsToRequest = new String[5];
permissionsToRequest[0] = Manifest.permission.CAMERA;
permissionsToRequest[1] = Manifest.permission.RECORD_AUDIO;
permissionsToRequest[2] = Manifest.permission.READ_EXTERNAL_STORAGE;
permissionsToRequest[3] = Manifest.permission.WRITE_EXTERNAL_STORAGE;
permissionsToRequest[4] = Manifest.permission.ACCESS_COARSE_LOCATION;
ActivityCompat.requestPermissions(this, permissionsToRequest, 1);
Log.d(TAG, "onCreate 2");
setContentView(R.layout.activity_main);
surfaceview1 = (SurfaceView) findViewById(R.id.surfaceview1);
surfaceholder1 = surfaceview1.getHolder();
surfaceholder1.addCallback(new surfaceholderCallbackBack());
cam1StopPreview = (Button) findViewById(R.id.cam1StopPreview);
cam1Recorder = (Button) findViewById(R.id.cam1Recorder);
Log.d(TAG, "onCreate end");
}
@Override
protected void onDestroy() {
Log.e(TAG, "onDestroy");
if(camera1 != null)
{
camera1.stopPreview();
camera1.release();
}
if(camera2 != null)
{
camera2.stopPreview();
camera2.release();
}
super.onDestroy();
}
/**
* 后置摄像头回调
*/
class surfaceholderCallbackBack implements SurfaceHolder.Callback, Camera.PreviewCallback {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Log.d(TAG, "onPreviewFrame start");
if (mPreBuffer == null) {
mPreBuffer = new byte[3133440];
}
// camera1.addCallbackBuffer(mPreBuffer);//将此缓冲区添加到预览回调缓冲区队列中
Log.d(TAG, "onPreviewFrame data len is "+data.length);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
// 获取camera对象
int cameraCount = Camera.getNumberOfCameras();
Log.d(TAG, "surfaceholderCallbackBack cameraCount=" + cameraCount);
if (cameraCount > 0) {
camera1 = Camera.open(0);
Log.d(TAG, "surfaceholderCallbackBack 1");
try {
// 设置预览监听
camera1.setPreviewDisplay(holder);
Camera.Parameters parameters = camera1.getParameters();
Log.d(TAG, "surfaceholderCallbackBack 2, parameters is: "+parameters);
if (MainActivity.this.getResources().getConfiguration().orientation != Configuration.ORIENTATION_LANDSCAPE) {
parameters.set("orientation", "portrait");
camera1.setDisplayOrientation(90);
parameters.setRotation(90);
} else {
parameters.set("orientation", "landscape");
parameters.set("preview-size", "640x480");
camera1.setDisplayOrientation(0);
parameters.setRotation(0);
}
parameters.set("preview-frame-rate", 30);
List fpsList = parameters.getSupportedPreviewFpsRange();
if(fpsList != null && fpsList.size() > 0) {
int[] minFps = fpsList.get(0);
for (int[] fps: fpsList) {
Log.d(TAG, "fps is:"+fps[0]+"--"+fps[1]);
if(minFps[0] * minFps[1] > fps[0] * fps[1]) {
minFps = fps;
}
}
Log.d(TAG, "set fps :"+minFps[0]+"--"+minFps[1]);
//注意setPreviewFpsRange的输入参数是帧率*1000,如30帧/s则参数为30*1000
parameters.setPreviewFpsRange(minFps[0] , minFps[1]);
//setPreviewFrameRate的参数是实际的帧率
//parameters.setPreviewFrameRate(MAX_FPS);
}
//parameters.setPreviewFormat(ImageFormat.YUV_420_888);
camera1.addCallbackBuffer(mPreBuffer);
camera1.setPreviewCallbackWithBuffer((Camera.PreviewCallback) this);
camera1.setParameters(parameters);
// 启动摄像头预览
camera1.startPreview();
Log.d(TAG, "surfaceholderCallbackBack 3");
System.out.println("camera.startpreview");
} catch (IOException e) {
e.printStackTrace();
camera1.release();
System.out.println("camera.release");
Log.d(TAG, "surfaceholderCallbackBack IOException");
}
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
camera1.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
if (success) {
initCamera();// 实现相机的参数初始化
camera.cancelAutoFocus();// 只有加上了这一句,才会自动对焦。
}
}
});
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
// 相机参数的初始化设置
private void initCamera() {
parameters = camera1.getParameters();
parameters.setPictureFormat(PixelFormat.JPEG);
//parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);// 1连续对焦
//setDispaly(parameters, camera1);
camera1.setParameters(parameters);
camera1.startPreview();
camera1.cancelAutoFocus();// 2如果要实现连续的自动对焦,这一句必须加上
}
// 控制图像的正确显示方向
private void setDispaly(Camera.Parameters parameters, Camera camera) {
if (Integer.parseInt(Build.VERSION.SDK) >= 8) {
setDisplayOrientation(camera, 90);
} else {
parameters.setRotation(90);
}
}
// 实现的图像的正确显示
private void setDisplayOrientation(Camera camera, int i) {
Method downPolymorphic;
try {
downPolymorphic = camera.getClass().getMethod("setDisplayOrientation", new Class[]{int.class});
if (downPolymorphic != null) {
downPolymorphic.invoke(camera, new Object[]{i});
}
} catch (Exception e) {
Log.e("Came_e", "图像出错");
}
}
}
public void onClickButton(View view) {
switch (view.getId()) {
case R.id.cam1StopPreview:
{
Log.d(TAG, "cam1StopPreview, mCam1State="+mCam1State);
if(camera1 != null)
{
camera1.stopPreview();
camera1.release();
camera1 = null;
}
break;
}
case R.id.cam1Recorder:
{
Log.d(TAG, "onClickButton cam1Recorder 1, mCam1State="+mCam1State);
if (mCam1State == 0) {
mCam1State = 1;
cam1Recorder.setText("cam1_recording");
initializeRecorder(1);
Log.d(TAG, "onClickButton cam1Recorder 2");
try {
mMediaRecorder1.start(); // Recording is now started
Log.d(TAG, "onClickButton cam1Recorder 3");
} catch (RuntimeException e) {
Log.e(TAG, "Could not start mMediaRecorder1 ", e);
releaseMediaRecorder(1);
return;
}
} else if (mCam1State == 1) {
Log.d(TAG, "onClickButton cam1Recorder 4");
mCam1State = 0;
cam1Recorder.setText("cam1_Record");
stopVideoRecording(1);
Log.d(TAG, "onClickButton cam1Recorder 5");
}
}
break;
default:
break;
}
}
//现在不能两个app同时录相,必定是一个先录完,然后第二个才开始录。这个需要改mediaRecorder逻辑
private void initializeRecorder(int id) {
Log.d(TAG, "initializeRecorder start, camId="+id);
File sd = Environment.getExternalStorageDirectory();
String path=sd.getPath()+"/DCamera";
File file = new File(path);
if (!file.exists()) {
Log.d(TAG, "initializeRecorder "+path+" is no exist");
if (file.mkdir()) {
Log.d(TAG, "initializeRecorder "+path+" mkdir succes");
}
else
{
Log.d(TAG, "initializeRecorder "+path+" mkdir fail");
}
}
else
{
Log.d(TAG, "initializeRecorder "+path+" is exist");
}
if (id == 1) {
String filename = path + "/cam1.mp4";
File file1 = new File(filename);
if(file1.exists()){
file1.delete();
}
if(mMediaRecorder1 == null)
{
mMediaRecorder1 = new MediaRecorder();
}
Log.d(TAG, "initializeRecorder 1");
camera1.unlock();
mMediaRecorder1.setCamera(camera1);
//mMediaRecorder1.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mMediaRecorder1.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder1.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder1.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
//mMediaRecorder1.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mMediaRecorder1.setVideoSize(640, 480);
mMediaRecorder1.setVideoFrameRate(20);
mMediaRecorder1.setPreviewDisplay(surfaceholder1.getSurface());
mMediaRecorder1.setOutputFile(filename);
Log.d(TAG, "initializeRecorder 2");
try {
mMediaRecorder1.prepare();
Log.d(TAG, "initializeRecorder 3");
} catch (IOException e) {
Log.e(TAG, "prepare failed for " + filename, e);
releaseMediaRecorder(1);
throw new RuntimeException(e);
}
}
}
private void releaseMediaRecorder(int id) {
Log.i(TAG, "Releasing media recorder. id="+id);
if (id == 1) {
if (mMediaRecorder1 != null) {
mMediaRecorder1.reset();
mMediaRecorder1.release();
mMediaRecorder1 = null;
mCam1State = 0;
}
}
}
private void stopVideoRecording(int id) {
Log.d(TAG, "stopVideoRecording id="+id);
if (id == 1) {
try {
Log.d(TAG, "stopVideoRecording 1");
if(mMediaRecorder1 != null)
{
Log.d(TAG, "stopVideoRecording 2");
mMediaRecorder1.stop();
mMediaRecorder1.release();
Log.d(TAG, "stopVideoRecording 3");
}
camera1.lock();
Log.i(TAG, "stopVideoRecording 4, mMediaRecorder1");
} catch (RuntimeException e) {
Log.e(TAG, "stop mMediaRecorder1 fail", e);
}
}
}
}
2.)上面这里,先是通过Camera.open(0);去打开id为0的摄像头。它会调到frameworks\base\core\java\android\hardware\Camera.java文件里的open函数
public static Camera open(int cameraId) {
return new Camera(cameraId);
}
然后会调到下面的camera构造函数:
Camera(int cameraId) {
int err = cameraInitNormal(cameraId);
if (checkInitErrors(err)) {
if (err == -EACCES) {
throw new RuntimeException("Fail to connect to camera service");
} else if (err == -ENODEV) {
throw new RuntimeException("Camera initialization failed");
}
// Should never hit this.
throw new RuntimeException("Unknown camera error");
}
}
我们再看下cameraInitNormal函数:
private int cameraInitNormal(int cameraId) {
return cameraInitVersion(cameraId, CAMERA_HAL_API_VERSION_NORMAL_CONNECT);
}
再看cameraInitVersion函数:
private int cameraInitVersion(int cameraId, int halVersion) {
mShutterCallback = null;
mRawImageCallback = null;
mJpegCallback = null;
mPreviewCallback = null;
mPostviewCallback = null;
mUsingPreviewAllocation = false;
mZoomListener = null;
Looper looper;
if ((looper = Looper.myLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
} else if ((looper = Looper.getMainLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
} else {
mEventHandler = null;
}
return native_setup(new WeakReference(this), cameraId, halVersion,
ActivityThread.currentOpPackageName());
}
3.)上面的native_setup会调到\frameworks\base\core\jni\android_hardware_Camera.cpp这个文件里的jni方法android_hardware_Camera_native_setup
static jint android_hardware_Camera_native_setup(JNIEnv *env, jobject thiz,
jobject weak_this, jint cameraId, jint halVersion, jstring clientPackageName)
{
// Convert jstring to String16
const char16_t *rawClientName = reinterpret_cast(
env->GetStringChars(clientPackageName, NULL));
jsize rawClientNameLen = env->GetStringLength(clientPackageName);
String16 clientName(rawClientName, rawClientNameLen);
env->ReleaseStringChars(clientPackageName,
reinterpret_cast(rawClientName));
sp camera;
if (halVersion == CAMERA_HAL_API_VERSION_NORMAL_CONNECT) {
// Default path: hal version is don't care, do normal camera connect.
camera = Camera::connect(cameraId, clientName,
Camera::USE_CALLING_UID, Camera::USE_CALLING_PID);
} else {
jint status = Camera::connectLegacy(cameraId, halVersion, clientName,
Camera::USE_CALLING_UID, camera);
if (status != NO_ERROR) {
return status;
}
}
......
}
因为我们传进来的halVersion默认是CAMERA_HAL_API_VERSION_NORMAL_CONNECT,所以会走Camera::connect这个函数。
4.)接下来会调到frameworks\av\camera\CameraBase.cpp
template
sp CameraBase::connect(int cameraId,
const String16& clientPackageName,
int clientUid, int clientPid)
{
ALOGV("%s: connect", __FUNCTION__);
sp c = new TCam(cameraId);
sp cl = c;
const sp<::android::hardware::ICameraService> cs = getCameraService();
binder::Status ret;
if (cs != nullptr) {
TCamConnectService fnConnectService = TCamTraits::fnConnectService;
ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
clientPid, /*out*/ &c->mCamera);
}
if (ret.isOk() && c->mCamera != nullptr) {
IInterface::asBinder(c->mCamera)->linkToDeath(c);
c->mStatus = NO_ERROR;
} else {
ALOGW("An error occurred while connecting to camera %d: %s", cameraId,
(cs == nullptr) ? "Service not available" : ret.toString8().string());
c.clear();
}
return c;
}
上面getCameraService()会去取到我们的cameraService,接下来,进入到CameraService.cpp
5.)frameworks\av\services\camera\libcameraservice\CameraService.cpp
Status CameraService::connect(
const sp& cameraClient,
int cameraId,
const String16& clientPackageName,
int clientUid,
int clientPid,
/*out*/
sp* device) {
ATRACE_CALL();
Status ret = Status::ok();
String8 id = String8::format("%d", cameraId);
sp client = nullptr;
ret = connectHelper(cameraClient, id,
CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName, clientUid, clientPid, API_1,
/*legacyMode*/ false, /*shimUpdateOnly*/ false,
/*out*/client);
if(!ret.isOk()) {
logRejected(id, getCallingPid(), String8(clientPackageName),
ret.toString8());
return ret;
}
*device = client;
return ret;
}
6.)CameraService::connectHelper
Status CameraService::connectHelper(const sp& cameraCb, const String8& cameraId,
int halVersion, const String16& clientPackageName, int clientUid, int clientPid,
apiLevel effectiveApiLevel, bool legacyMode, bool shimUpdateOnly,
/*out*/sp& device) {
....
sp tmp = nullptr;
if(!(ret = makeClient(this, cameraCb, clientPackageName, cameraId, facing, clientPid,
clientUid, getpid(), legacyMode, halVersion, deviceVersion, effectiveApiLevel,
/*out*/&tmp)).isOk()) {
return ret;
}
client = static_cast(tmp.get());
LOG_ALWAYS_FATAL_IF(client.get() == nullptr, "%s: CameraService in invalid state",
__FUNCTION__);
err = client->initialize(mCameraProviderManager);
....
}
7.)
Status CameraService::makeClient(const sp& cameraService,
const sp& cameraCb, const String16& packageName, const String8& cameraId,
int facing, int clientPid, uid_t clientUid, int servicePid, bool legacyMode,
int halVersion, int deviceVersion, apiLevel effectiveApiLevel,
/*out*/sp* client) {
if (halVersion < 0 || halVersion == deviceVersion) {
// Default path: HAL version is unspecified by caller, create CameraClient
// based on device version reported by the HAL.
switch(deviceVersion) {
case CAMERA_DEVICE_API_VERSION_1_0:
if (effectiveApiLevel == API_1) { // Camera1 API route
sp tmp = static_cast(cameraCb.get());
ALOGD("CameraService::makeClient 1");
*client = new CameraClient(cameraService, tmp, packageName, cameraIdToInt(cameraId),
facing, clientPid, clientUid, getpid(), legacyMode);
} else { // Camera2 API route
ALOGW("Camera using old HAL version: %d", deviceVersion);
return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
"Camera device \"%s\" HAL version %d does not support camera2 API",
cameraId.string(), deviceVersion);
}
break;
....
}
8.)因为我们用的是api1+hal1,所以会走CameraClient。frameworks\av\services\camera\libcameraservice\api1\CameraClient.cpp
status_t CameraClient::initialize(sp manager) {
int callingPid = getCallingPid();
status_t res;
LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId);
// Verify ops permissions
res = startCameraOps();
if (res != OK) {
return res;
}
char camera_device_name[10];
snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);
mHardware = new CameraHardwareInterface(camera_device_name);
res = mHardware->initialize(manager);
if (res != OK) {
ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
mHardware.clear();
return res;
}
......
return OK;
}
9.)frameworks\av\services\camera\libcameraservice\device1\CameraHardwareInterface.cpp
status_t CameraHardwareInterface::initialize(sp manager) {
ALOGI("Opening camera %s", mName.string());
status_t ret = manager->openSession(mName.string(), this, &mHidlDevice);
if (ret != OK) {
ALOGE("%s: openSession failed! %s (%d)", __FUNCTION__, strerror(-ret), ret);
}
return ret;
}
10.)上面的openSession,调用到了frameworks\av\services\camera\libcameraservice\common\CameraProviderManager.cpp里的openSession
status_t CameraProviderManager::openSession(const std::string &id,
const sp& callback,
/*out*/
sp *session) {
std::lock_guard lock(mInterfaceMutex);
auto deviceInfo = findDeviceInfoLocked(id,
/*minVersion*/ {1,0}, /*maxVersion*/ {2,0});
if (deviceInfo == nullptr) return NAME_NOT_FOUND;
auto *deviceInfo1 = static_cast(deviceInfo);
hardware::Return status = deviceInfo1->mInterface->open(callback);
if (!status.isOk()) {
ALOGE("%s: Transaction error opening a session for camera device %s: %s",
__FUNCTION__, id.c_str(), status.description().c_str());
return DEAD_OBJECT;
}
if (status == Status::OK) {
*session = deviceInfo1->mInterface;
}
return mapToStatusT(status);
}
11.)上面的hardware::Return
template
void
CameraProviderImpl::
getCameraDeviceInterface(
const hidl_string& cameraDeviceName,
InterfaceCallbackT _hidl_cb
)
{
::android::sp pBaseDevice(nullptr);
auto status = mManager->getDeviceInterface(cameraDeviceName, pBaseDevice);
//
auto pICameraDevice = ::android::sp(static_cast(pBaseDevice.get()));
if ( pBaseDevice == nullptr || pICameraDevice == nullptr || 0 != status ) {
MY_LOGE(
"DeviceName:%s pBaseDevice:%p pICameraDevice:%p status:%s(%d)",
cameraDeviceName.c_str(), pBaseDevice.get(), pICameraDevice.get(), ::strerror(-status), -status
);
}
_hidl_cb(mapToHidlCameraStatus(status), pICameraDevice);
}
12.)vendor\mediatek\proprietary\hardware\mtkcam\main\hal\devicemgr\CameraDeviceManagerBase.cpp
auto
CameraDeviceManagerBase::
getDeviceInterface(
const std::string& deviceName,
::android::sp& rpDevice
) -> ::android::status_t
{
RWLock::AutoRLock _l(mDataRWLock);
//
auto const& pInfo = mVirtEnumDeviceMap.valueFor(deviceName);
if ( pInfo == nullptr ) {
MY_LOGE("[%s] this device name is unknown", deviceName.c_str());
logLocked();
return BAD_VALUE;
}
//
auto const& pDevice = pInfo->mVirtDevice;
if ( pDevice == nullptr ) {
MY_LOGE("[%s] bad virtual device", deviceName.c_str());
logLocked();
return BAD_VALUE;
}
//
status_t status = pDevice->getDeviceInterfaceBase(rpDevice);
if ( OK != status ) {
MY_LOGE("[%s] unknown error", deviceName.c_str());
logLocked();
}
return status;
}
这个函数的意思,就是从mVirtEnumDeviceMap这个列表里去取存在的device。这个device,是vendor\mediatek\proprietary\hardware\mtkcam\main\hal\device\1.x\device\CameraDevice1Base.cpp文件里的open函数中,通过调用startOpenDevice函数,然后在这个函数里,用attachOpenDeviceLocked(pVirtualDevice);将它添加进去的。
我们开机时,就会去加载hal层的代码,那个时候,就会调用CameraDevice1Base.cpp里的open函数,将cameraDevice保存下来。这里取的,就是之前打开了的设备。
13.)我们来看看CameraDevice::open
Return
CameraDevice::
open(const sp& callback)
{
MY_LOGD("ENTRY: open: %s", mStaticDeviceInfo->mInstanceName.c_str());
//[1] Create CameraDevice1Base
mpDevice = CameraDevice1Base::createCameraDevice(mDeviceManager,
mStaticDeviceInfo->mInstanceName,
mInstanceId);
if( !mpDevice.get() ){
MY_LOGE("Create CameraDevice1Base failed!");
return Status::INTERNAL_ERROR;
}
return mpDevice->open(callback);
}
这里就是直接调用到了cameraDevice1Base的open函数。我们再看下这个函数
14.)CameraDevice1Base::open
Return
CameraDevice1Base::
open(const sp& callback)
{
::android::Mutex::Autolock _lOpsLock(mOpsLock);
MY_LOGE("CameraDevice1Base::open ** start, mInstanceName=%s", mInstanceName.c_str());
MY_LOGI("+");
sp monitor = new JobMonitor::Helper(String8(__PRETTY_FUNCTION__), 10*1000);
//unlink to death notification for existed device callback
if ( mDeviceCallback != nullptr ) {
mDeviceCallback->unlinkToDeath(this);
mDeviceCallback = nullptr;
::memset(&mDeviceCallbackDebugInfo, 0, sizeof(mDeviceCallbackDebugInfo));
}
//link to death notification for device callback
if ( callback != nullptr ) {
hardware::Return linked = callback->linkToDeath(this, (uint64_t)this);
if (!linked.isOk()) {
MY_LOGE("Transaction error in linking to mDeviceCallback death: %s", linked.description().c_str());
} else if (!linked) {
MY_LOGW("Unable to link to mDeviceCallback death notifications");
}
callback->getDebugInfo([this](const auto& info){
mDeviceCallbackDebugInfo = info;
});
MY_LOGD("Link death to ICameraDeviceCallback %s", toString(mDeviceCallbackDebugInfo).c_str());
}
::android::status_t status = OK;
std::string const& instanceName = mInstanceName;
status = mDeviceManager->startOpenDevice(instanceName);
if ( OK != status ){
MY_LOGW("DeviceManager startOpenDevice failed");
mDeviceManager->updatePowerOnDone();
return mapToHidlCameraStatus(status);
}
{
// check if needs add log line count in stereo mode.
if ( 0 == ::strcmp(mDevName,"MtkStereo" ) )
setLogLevelToEngLoad(1, 1, 20000);
//
}
status = onOpenLocked();
if (OK != status){ //open failed
MY_LOGW("CameraDevice1Base onOpenLocked failed");
mDeviceManager->finishOpenDevice(instanceName, true/*cancel*/);
return mapToHidlCameraStatus(status);
}
else{ //open successfully
status = mDeviceManager->finishOpenDevice(instanceName, false/*cancel*/);
if( OK != status){
MY_LOGW("DeviceManager finishOpenDevice failed");
return mapToHidlCameraStatus(status);
}
}
//Set Device Callback
initHalPreviewWindow();
//update device list
{
Mutex::Autolock lock(gCameraDevice1Baselock);
MBOOL bIsFind = MFALSE;
for(list>::iterator it = glwpCameraDevice1Base.begin(); it != glwpCameraDevice1Base.end(); it++)
{
if( (*it).promote().get() != 0 &&
(*it).promote()->getInstanceId() == mInstanceId)
{
bIsFind = MTRUE;
MY_LOGE("Exist cameraId %d - %p, this - %p", mInstanceId, (*it).promote().get(), this);
break;
}
}
if(!bIsFind)
{
MY_LOGI("Add new cameraId %d - %p", mInstanceId, this);
glwpCameraDevice1Base.push_back(this);
}
}
setDeviceCallback(callback);
sSetDevConnect(instanceName, true);
MY_LOGD("CameraDevice1Base::open ** end");
return Status::OK;
}
15.)上面这个函数,主要就是做了一些open操作,然后还有一个很重要的函数:initHalPreviewWindow();我们来看看这个函数里干了啥
void CameraDevice1Base::initHalPreviewWindow()
{
mHalPreviewWindow.cancel_buffer = sCancelBuffer;
mHalPreviewWindow.lock_buffer = sLockBuffer;
mHalPreviewWindow.dequeue_buffer = sDequeueBuffer;
mHalPreviewWindow.enqueue_buffer = sEnqueueBuffer;
mHalPreviewWindow.set_buffer_count = sSetBufferCount;
mHalPreviewWindow.set_buffers_geometry = sSetBuffersGeometry;
mHalPreviewWindow.set_crop = sSetCrop;
mHalPreviewWindow.set_timestamp = sSetTimestamp;
mHalPreviewWindow.set_usage = sSetUsage;
mHalPreviewWindow.set_swap_interval = sSetSwapInterval;
mHalPreviewWindow.get_min_undequeued_buffer_count =
sGetMinUndequeuedBufferCount;
}
这个函数里,就只是设了一些函数接口。这些接口,设给了谁呢?就是设给了app上传下来的surface。我们可以先看下mHalPreviewWindow的定义:vendor\mediatek\proprietary\hardware\mtkcam\main\hal\device\1.x\device\CameraDevice1Base.h
struct CameraPreviewWindow : public preview_stream_ops {
// Called when we expect buffer will be re-allocated
void cleanUpCirculatingBuffers();
android::Mutex mLock;
sp mPreviewCallback = nullptr;
std::unordered_map mCirculatingBuffers;
std::unordered_map mBufferIdMap;
} mHalPreviewWindow;
看到没,这个mHalPreviewWindow,就是继承自preview_stream_ops的。而这个preview_stream_ops,就是app上传下来的surface。这个有兴趣的同学,可以回头去看下\frameworks\av\services\camera\libcameraservice\api1\CameraClient.cpp里的CameraClient::setPreviewTarget。上面将这些函数接口设置好后,就可以预览的数据,实时的写到app上的控件上去。
16.)上面将打开的流程写完了,再说下录相。录相我就不从app上往下讲了,直接从frameworks\av\services\camera\libcameraservice\api1\CameraClient.cpp里的startRecordingMode函数讲起
status_t CameraClient::startRecordingMode() {
LOG1("startRecordingMode");
status_t result = NO_ERROR;
// if recording has been enabled, nothing needs to be done
if (mHardware->recordingEnabled()) {
return NO_ERROR;
}
// if preview has not been started, start preview first
if (!mHardware->previewEnabled()) {
result = startPreviewMode();
if (result != NO_ERROR) {
return result;
}
}
// start recording mode
enableMsgType(CAMERA_MSG_VIDEO_FRAME);
sCameraService->playSound(CameraService::SOUND_RECORDING_START);
result = mHardware->startRecording();
if (result != NO_ERROR) {
ALOGE("mHardware->startRecording() failed with status %d", result);
}
return result;
}
17.)frameworks\av\services\camera\libcameraservice\device1\CameraHardwareInterface.cpp
status_t CameraHardwareInterface::startRecording()
{
ALOGV("%s(%s)", __FUNCTION__, mName.string());
if (CC_LIKELY(mHidlDevice != nullptr)) {
//!++
hardware::Return ret = mHidlDevice->startRecording();
if (!ret.isOk()) {
ALOGE("%s: Transaction error starting recording of camera device %s: %s",
__FUNCTION__, mName.string(), ret.description().c_str());
return DEAD_OBJECT;
}
return CameraProviderManager::mapToStatusT(ret);
//!--
}
return INVALID_OPERATION;
}
18.)vendor\mediatek\proprietary\hardware\mtkcam\main\hal\device\1.x\device\CameraDevice.cpp
Return
CameraDevice::
startRecording()
{
MY_LOGD("ENTRY: start recording");
if( !mpDevice.get() ){
MY_LOGE("mpDevice is NULL");
return Status::OPERATION_NOT_SUPPORTED;
}
return mpDevice->startRecording();
}
19.)vendor\mediatek\proprietary\hardware\mtkcam\main\hal\device\1.x\device\CameraDevice1Base.cpp
Return
CameraDevice1Base::
startRecording()
{
CAM_TRACE_CALL();
MY_LOGI("+");
sp monitor = new JobMonitor::Helper(String8(__PRETTY_FUNCTION__), 10*1000);
status_t status = OK;
if ( mpCamAdapter == 0 )
{
MY_LOGE("NULL Camera Adapter");
status = DEAD_OBJECT;
goto lbExit;
}
if ( ! onStartRecording() )
{
MY_LOGE("onStartRecording() fail");
status = INVALID_OPERATION;
goto lbExit;
}
// zsd -> default
if ( !strcmp(mpCamAdapter->getName(), MtkCameraParameters::APP_MODE_NAME_MTK_ZSD) )
{
MY_LOGW("invalid camera adapter: %s", mpCamAdapter->getName());
stopPreview();
mpParamsMgr->setForceHalAppMode(String8(MtkCameraParameters::APP_MODE_NAME_DEFAULT));
startPreview();
}
// startRecording in Camera Adapter.
{
CAM_TRACE_NAME("adapterStartRecording");
status = mpCamAdapter->startRecording();
if ( OK != status )
{
MY_LOGE("startRecording() in CameraAdapter returns: [%s(%d)]", ::strerror(-status), -status);
goto lbExit;
}
}
{
CAM_TRACE_NAME("clientStartRecording");
if ( mpCamClient != 0 )
{
// Get recording format & size.
// Set.
if ( ! mpCamClient->startRecording() )
{
status = INVALID_OPERATION;
goto lbExit;
}
}
// forward to registered clients
Vector >::const_iterator it;
for (it = vmpCamClient.begin(); it != vmpCamClient.end(); ++it)
{
(*it)->startRecording();
}
}
//
{
MINT32 enableFd = property_get_int32("debug.camera.VR.FD", -1);
if( enableFd > 0 )
{
sendCommand((CommandType)CAMERA_CMD_START_FACE_DETECTION, 0, 0);
}
else if( enableFd == 0 )
{
sendCommand((CommandType)CAMERA_CMD_STOP_FACE_DETECTION, 0, 0);
}
}
lbExit:
MY_LOGI("- status(%d)", status);
return mapToHidlCameraStatus(status);
}
/******************************************************************************
* Stop a previously started recording.
******************************************************************************/
Return
CameraDevice1Base::
stopRecording()
{
CAM_TRACE_CALL();
MY_LOGI("+");
sp monitor = new JobMonitor::Helper(String8(__PRETTY_FUNCTION__), 10*1000);
MINT32 enableFd = property_get_int32("debug.camera.VR.FD", -1);
if( enableFd > 0 )
{
sendCommand((CommandType)CAMERA_CMD_STOP_FACE_DETECTION, 0, 0);
}
{
CAM_TRACE_NAME("adapterStopRecording");
if ( mpCamAdapter != 0 )
{
mpCamAdapter->stopRecording();
}
}
{
CAM_TRACE_NAME("clientStopRecording");
if ( mpCamClient != 0 )
{
mpCamClient->stopRecording();
}
// forward to registered clients
Vector >::const_iterator it;
for (it = vmpCamClient.begin(); it != vmpCamClient.end(); ++it)
{
(*it)->stopRecording();
}
}
// check if need default -> zsd
if ( mpCamAdapter != 0 )
{
String8 halappmode;
if ( mpParamsMgr->evaluateHalAppMode(halappmode) &&
halappmode != mpCamAdapter->getName() )
{
stopPreview();
startPreview();
}
}
MY_LOGI("-");
return Void();
}
上面有两条比较重要,一是mpCamAdapter->startRecording();这个会调到vendor\mediatek\proprietary\hardware\mtkcam\legacy\platform\mt6761\v1\adapter\MtkDefault\MtkDefaultCamAdapter.Record.cpp里去
status_t
CamAdapter::
startRecording()
{
status_t status = OK;
//
if(recordingEnabled())
{
MY_LOGW("Recording has been started");
}
else
{
int32_t vdoWidth = 0, vdoHeight = 0, fps = mpParamsMgr->getInt(CameraParameters::KEY_PREVIEW_FRAME_RATE);
mpParamsMgr->getVideoSize(&vdoWidth, &vdoHeight);
MY_LOGD("VDO:FPS(%d), CurSize(%dx%d), LastSize(%dx%d)",
fps,
vdoWidth,
vdoHeight,
mLastVdoWidth,
mLastVdoHeight);
//
if( mLastVdoWidth != vdoWidth ||
mLastVdoHeight != vdoHeight)
{
MY_LOGD("mb4K2KVideoRecord(%d)",mb4K2KVideoRecord);
if( ( mb4K2KVideoRecord == MFALSE &&
vdoWidth*vdoHeight > IMG_1080P_SIZE) ||
( mb4K2KVideoRecord == MTRUE &&
vdoWidth*vdoHeight <= IMG_1080P_SIZE))
{
mpStateManager->getCurrentState()->onStopPreview(this);
if(vdoWidth*vdoHeight > IMG_1080P_SIZE)
{
mb4K2KVideoRecord = MTRUE;
}
else
{
mb4K2KVideoRecord = MFALSE;
}
mpStateManager->getCurrentState()->onStartPreview(this);
//
if(mPipStartPreviewFail)
{
return status;
}
}
mLastVdoWidth = vdoWidth;
mLastVdoHeight = vdoHeight;
}
//
status = mpStateManager->getCurrentState()->onStartRecording(this);
if ( OK != status ) {
goto lbExit;
}
}
//
lbExit:
if ( OK == status ) {
CamManager* pCamMgr = CamManager::getInstance();
pCamMgr->setRecordingHint(true);
}
return status;
}
在这个函数里,主要是去获取录相尺寸,再去设置hal层的一些状态。比如pCamMgr->setRecordingHint(true);就会调到vendor\mediatek\proprietary\hardware\mtkcam\utils\hw\CamManager.cpp里的setRecordingHint,它会去设置mbRecord这个状态为true。然后下次再开启预览时,会在vendor\mediatek\proprietary\hardware\mtkcam\main\hal\device\1.x\device\startPreview()函数,调用pCamMgr->getPermission() 去检测,当前是否正在录相,如果正在录相,就不能再打开预览了。
Return
CameraDevice1Base::
startPreview()
{
....
// Check Permission.
if ( ! pCamMgr->getPermission() )
{
MY_LOGE("Cannot start preview ... Permission denied");
sp spFrameworkCBThread = IFrameworkCBThread::createInstance(getInstanceId(),mpCamMsgCbInfo);
spFrameworkCBThread->init();
IFrameworkCBThread::callback_data cbData;
cbData.callbackType = IFrameworkCBThread::CALLBACK_TYPE_NOTIFY;
cbData.type = CAMERA_MSG_ERROR;
cbData.ext1 = CAMERA_ERROR_SERVER_DIED;
cbData.ext2 = 0;
spFrameworkCBThread->postCB(cbData);
spFrameworkCBThread->uninit();
spFrameworkCBThread = NULL;
status = OK;
goto lbExit;
}
....
}
getPermission这个函数,就定义在vendor\mediatek\proprietary\hardware\mtkcam\utils\hw\CamManager.cpp里
bool
CamManager::
getPermission() const
{
Mutex::Autolock _l(mLockMtx);
//
MY_LOGE("OpenId.size(%zu), mbRecord(%d), mbAvailable(%d), mbStereo(%d), 0:fps(%d); 1:fps(%d)",
mvOpenId.size(), mbRecord, mbAvailable, mbStereo, getFrameRate(0), getFrameRate(1));
return !mbRecord && mbAvailable && !mbStereo;
}
20.)再说说在CameraDevice1Base.cpp里的startRecording()中,比较重要的另一行代码: mpCamClient->startRecording() 。它定义在vendor\mediatek\proprietary\hardware\mtkcam\middleware\v1\client\CamClient\startRecording()函数中
bool
CamClient::
startRecording()
{
if ( mpFDClient != 0 )
{
mpFDClient->startRecording();
}
#if '1'==MTKCAM_HAVE_OT_CLIENT
if ( mpOTClient != 0 )
{
mpOTClient->startRecording();
}
#endif
return ( mpRecordClient != 0 )
? mpRecordClient->startRecording()
: false
;
}
21.)上面的mpRecordClient,是个sp
bool
CamClient::
init()
{
....
mpRecordClient = IRecordClient::createInstance(mpParamsMgr);
if ( mpRecordClient == 0 || ! mpRecordClient->init() )
{
ret = false;
goto lbExit;
}
....
}
22.)IRecordClient::createInstance定义在vendor\mediatek\proprietary\hardware\mtkcam\middleware\v1\client\CamClient\Record\RecordClient.Platform.cpp中
sp
IRecordClient::
createInstance(sp pParamsMgr)
{
return new RecordClientImp(pParamsMgr);
}
RecordClientImp::
RecordClientImp(sp pParamsMgr)
: RecordClient(pParamsMgr)
, mImgStrides{16,16,16}
, mImgImgFormat(MtkCameraParameters::PIXEL_FORMAT_YUV420P)
{
MY_LOGD("Img stride (%d/%d/%d), fmt(%s) if it is normal mode, and please ignore this if it is meta mode.",
mImgStrides[0],
mImgStrides[1],
mImgStrides[2],
mImgImgFormat.string());
}
从上面可以看出,CameraDevice1Base::startRecording()函数中的mpCamClient->startRecording(),实际上调用的是RecordClient::startRecording()
23.)vendor\mediatek\proprietary\hardware\mtkcam\middleware\v1\client\CamClient\Record\RecordClient.cpp
bool
RecordClient::
startRecording()
{
....
ret = onStateChanged();
....
}
在RecordClient模块中,数据流是通过mpImgBufQueue来流转的。mpImgBufQueue定义在RecordClient.h里,sp
然后在RecordClient::init()里初始化:
bool
RecordClient::
init()
{
bool ret = false;
status_t status = NO_ERROR;
//
MY_LOGD("+");
//
//
mpImgBufQueue = new ImgBufQueue(IImgBufProvider::eID_REC_CB, "RecCB@ImgBufQue");
if ( mpImgBufQueue == 0 )
{
MY_LOGE("Fail to new ImgBufQueue");
goto lbExit;
}
//
//
status = run("CamClient@Record");
if ( OK != status )
{
MY_LOGE("Fail to run thread, status[%s(%d)]", ::strerror(-status), -status);
goto lbExit;
}
//
//
ret = true;
lbExit:
MY_LOGD("-");
return ret;
}
bool
RecordClient::
onStateChanged()
{
bool ret = true;
//
if ( isEnabledState() )
{
postCommand(Command(Command::eID_WAKEUP));
}
//
return ret;
}
24.)上面这里发送了一个eID_WAKEUP命令,它发送到了哪里呢?跟进这个postCommand函数,可以看到这个函数定义在vendor\mediatek\proprietary\hardware\mtkcam\middleware\v1\client\CamClient\Record\RecordClient.Thread.cpp里,在这个函数里发送后,会在vendor\mediatek\proprietary\hardware\mtkcam\middleware\v1\client\CamClient\Record\RecordClient.Thread.cpp里的threadLoop里去接收:
bool
RecordClient::
threadLoop()
{
Command cmd;
if ( getCommand(cmd) )
{
switch (cmd.eId)
{
case Command::eID_WAKEUP:
onClientThreadLoop(cmd);
break;
//
case Command::eID_EXIT:
mbThreadExit = true;
default:
MY_LOGD("Command::%s", cmd.name());
break;
}
}
//
MY_LOGD("-");
return true;
}
25.)
void
RecordClient::
onClientThreadLoop(Command const& rCmd)
{
sp pBufMgr = NULL;
sp pBufQueue = NULL;
uint32_t i,size,releaseFrameCnt = 0,waitFrameCnt = 0;
//
if(!initBuffers())
{
MY_LOGE("initBuffers fail");
return;
}
// (1) Get references to pool/queue before starting, so that nothing will be free during operations.
{
Mutex::Autolock _l(mModuleMtx);
//
pBufMgr = mpImgBufMgr;
pBufQueue = mpImgBufQueue;
if ( pBufMgr == 0 || pBufQueue == 0 || ! isEnabledState() )
{
MY_LOGW("pBufMgr(%p), pBufQueue(%p), isEnabledState(%d)", pBufMgr.get(), pBufQueue.get(), isEnabledState());
return;
}
}
// (2) stop & clear all buffers so that we won't deque any undefined buffer.
pBufQueue->stopProcessor();
// (3) Prepare all TODO buffers.
if ( ! prepareAllTodoBuffers(pBufQueue, pBufMgr) )
{
return;
}
// (4) Start
if ( ! pBufQueue->startProcessor() )
{
return;
}
// (5) Do until all wanted messages are disabled.
::android_atomic_write(1, &mIsWaitBufBack);
while (1)
{
// (.1)
bool blocking = (mAllocatedExtraRecBufNum >= mExtraRecBufNum );
if( !waitAndHandleReturnBuffers(pBufQueue, blocking) )
{
if( !blocking )
{
int32_t index = pBufMgr->allocateExtraBuffer(mpCamMsgCbInfo->mCbCookie);
if( index >= 0 )
{
initBufferInfo(index);
++mAllocatedExtraRecBufNum;
MY_LOGI("Alloc %d, Total %d",
mRecBufNum + mAllocatedExtraRecBufNum,
mExtraRecBufNum+1);
}
}
}
// (.2) break if disabled.
if ( ! isEnabledState() || ! pBufQueue->isProcessorRunning())
{
MY_LOGI("Record client disabled, Rec(%d) BufQueueRunning(%d)",
::android_atomic_release_load(&mIsRecStarted),
pBufQueue->isProcessorRunning());
{
Mutex::Autolock _lock(mModuleMtx);
uint32_t emptyCount = 0, enqCount = 0, fillCount = 0, cbCount = 0;
//
for(i = 0; i < mvRecBufInfo.size(); i++)
{
if(mvRecBufInfo[i].Sta == REC_BUF_STA_EMPTY)
{
emptyCount++;
}
else
if(mvRecBufInfo[i].Sta == REC_BUF_STA_ENQUE)
{
enqCount++;
}
else
if(mvRecBufInfo[i].Sta == REC_BUF_STA_FILL)
{
fillCount++;
}
else
if(mvRecBufInfo[i].Sta == REC_BUF_STA_CB)
{
cbCount++;
}
}
//
MY_LOGI("buf:Emp(%d),Enq(%d),Fill(%d),CB(%d)",
emptyCount,
enqCount,
fillCount,
cbCount);
}
//
break;
}
// (.3) re-prepare all TODO buffers, if possible,
// since some DONE/CANCEL buffers return.
prepareAllTodoBuffers(pBufQueue, pBufMgr);
}
// (6) stop.
MY_LOGD("pauseProcessor");
pBufQueue->pauseProcessor();
MY_LOGD("flushProcessor");
pBufQueue->flushProcessor();
MY_LOGD("stopProcessor");
pBufQueue->stopProcessor();
//
// (7) Cancel all un-returned buffers.
cancelAllUnreturnBuffers();
::android_atomic_write(0, &mIsWaitBufBack);
//
while(1)
{
{
Mutex::Autolock _lock(mModuleMtx);
//
size = mvRecBufInfo.size();
//
for(i=0; i (mRecBufNum + mAllocatedExtraRecBufNum) )
{
MY_LOGW("Timeout:Force to free buffer!");
break;
}
else
if(android_atomic_release_load(&mbForceReleaseBuf))
{
MY_LOGW("Re-record:Force to free buffer!");
android_atomic_write(0, &mbForceReleaseBuf);
break;
}
else
{
usleep(CB_ADDR_WAIT_TIME);
}
}
//
uninitBuffers();
}
在onClientThreadLoop函数里,通过prepareAllTodoBuffers去准备好接收收据的buff,prepareAllTodoBuffers的第一个参数就是在init里初始化的mpImgBufQueue, 在这里赋值给了pBufQueue,在prepareAllTodoBuffers里, 通过enqueProcessor将当前的帧放到了mpImgBufQueue 里。
bool
RecordClient::
prepareAllTodoBuffers(spconst& rpBufQueue, spconst& rpBufMgr)
{
......
ret = rpBufQueue->enqueProcessor(
ImgBufQueNode(pCameraImgBuf, ImgBufQueNode::eSTATUS_TODO));
......
return ret;
}
再在onClientThreadLoop里, 调用pBufQueue->startProcessor() 去通知处理。 然后再通过waitAndHandleReturnBuffers去处理这些buff.这个函数里,通过rpBufQueue->dequeProcessor(vQueNode);来取刚刚得到的帧buff,最后丢给handleReturnBuffers处理理。 waitAndHandleReturnBuffers定义在vendor\mediatek\proprietary\hardware\mtkcam\middleware\v1\client\CamClient\Record\RecordClient.BufOps.cpp里
26.)
bool
RecordClient::
waitAndHandleReturnBuffers(spconst& rpBufQueue, bool blocking)
{
bool ret = false;
uint32_t i,size;
Vector vQueNode;
//
MY_LOGD_IF(ENABLE_LOG_PER_FRAME, "+ blocking(%d)", blocking);
//
{
Mutex::Autolock _l(mModuleMtx);
//
size = mvRecBufInfo.size();
for(i=0; iisProcessorQueueEmpty())
{
MY_LOGD("No buffer is done");
goto lbExit;
}
//
rpBufQueue->dequeProcessor(vQueNode);
if ( vQueNode.empty() ) {
MY_LOGW("vQueNode.empty()");
usleep(EMPTY_QUE_WAIT_TIME);
goto lbExit;
}
//
// (2) handle buffers dequed from processor.
ret = handleReturnBuffers(vQueNode);
lbExit:
//
MY_LOGD_IF(ENABLE_LOG_PER_FRAME, "- ret(%d)", ret);
return ret;
}
27.)在waitAndHandleReturnBuffers中,通过rpBufQueue->dequeProcessor(vQueNode);,去获取当前帧,然后通过handleReturnBuffers去处理。
bool
RecordClient::
handleReturnBuffers(Vectorconst& rvQueNode)
{
uint32_t i,j;
//MY_LOGD_IF(ENABLE_LOG_PER_FRAME, "+");
//
// (1) Lock
//Mutex::Autolock _l(mModuleMtx);
//
// (2) Remove from List and peform callback, one by one.
int32_t const queSize = rvQueNode.size();
for (i = 0; i < queSize; i++)
{
ImgBufQueNode const& rQueNode = rvQueNode[i];
spconst& rpQueImgBuf = rQueNode.getImgBuf(); // ImgBuf in Queue.
//
if(!(rvQueNode[i].isDONE()))
{
MY_LOGW("rvQueNode idx(%d) is not done",i);
{
Mutex::Autolock _l(mModuleMtx);
//
for(j=0; jgetIonFd())
{
mvRecBufInfo.editItemAt(j).Sta = REC_BUF_STA_EMPTY;
//
if(mbMetaMode)
{
mpImgBufMgr->getBuf(j)->getGrallocBuffer()->unlock();
}
break;
}
}
}
continue;
}
//
if(rpQueImgBuf == 0)
{
MY_LOGW("i(%d):rpQueImgBuf is NULL",i);
continue;
}
//
{
Mutex::Autolock _l(mModuleMtx);
//
for(j=0; jgetIonFd())
{
mvRecBufInfo.editItemAt(j).Sta = REC_BUF_STA_FILL;
//
if(mbMetaMode)
{
mpImgBufMgr->getBuf(j)->configPhyAddr();
mpImgBufMgr->getBuf(j)->getGrallocBuffer()->unlock();
if(mvRecBufInfo[j].PhyAddr == 0)
{
mvRecBufInfo.editItemAt(j).PhyAddr = (MINTPTR)(mpImgBufMgr->getBuf(j)->getPhyAddr());
}
}
break;
}
}
}
}
//
if(j == mvRecBufInfo.size())
{
MY_LOGE("Can't find FD(%d)/VA(%p)",
rpQueImgBuf->getIonFd(),
(MUINTPTR)(rpQueImgBuf->getVirAddr()));
return false;
}
//
MY_LOGD_IF(
0,
"CB:i(%d/%d),Idx(%d),Sta(%d),Info(%p/%d.%06d)",
i,
queSize-1,
j,
rQueNode.getStatus(),
(MUINTPTR)(rpQueImgBuf->getVirAddr()),
(uint32_t)((rpQueImgBuf->getTimestamp()/1000)/1000000),
(uint32_t)((rpQueImgBuf->getTimestamp()/1000)%1000000)
);
//
if(mpExtImgProc != NULL)
{
if(mpExtImgProc->getImgMask() & ExtImgProc::BufType_Record)
{
IExtImgProc::ImgInfo img;
//
img.bufType = ExtImgProc::BufType_Record;
img.format = rpQueImgBuf->getImgFormat();
img.width = rpQueImgBuf->getImgWidth();
img.height = rpQueImgBuf->getImgHeight();
img.stride[0] = rpQueImgBuf->getImgWidthStride(0);
img.stride[1] = rpQueImgBuf->getImgWidthStride(1);
img.stride[2] = rpQueImgBuf->getImgWidthStride(2);
img.virtAddr = (MUINTPTR)(rpQueImgBuf->getVirAddr());
img.bufSize = rpQueImgBuf->getBufSize();
//
mpExtImgProc->doImgProc(img);
}
}
//
//
if(!performRecordCallback(j, mpImgBufMgr->getBuf(j), rQueNode.getCookieDE()))
{
Mutex::Autolock _l(mModuleMtx);
mvRecBufInfo.editItemAt(j).Sta = REC_BUF_STA_EMPTY;
}
}
//
//MY_LOGD_IF(ENABLE_LOG_PER_FRAME, "-");
return true;
}
28.)在上面的函数中,取到帧后,会丢给performRecordCallback去回调给app
bool
RecordClient::
performRecordCallback(int32_t bufIdx, spconst& pCameraImgBuf, int32_t const msgType)
{
....
#ifdef ENABLE_DEFAULT_TIMEAMPS
addVideoTimestamps(pCameraImgBuf);
#endif
mpCamMsgCbInfo->mDataCbTimestamp(
(nsecs_t)pCameraImgBuf->getTimestamp(),
(int32_t)CAMERA_MSG_VIDEO_FRAME,
pCameraImgBuf->get_camera_memory(),
pCameraImgBuf->getBufIndex(),
mpCamMsgCbInfo->mCbCookie
);
....
}
这里就是收到的录相帧回调的地方。如果我们要加美颜或者时间水印等功能,都可以在mDataCbTimestamp这个回调函数之前加。
好了,录相流程到这里基本上就讲完了。后续会将预览、拍照流程给加上。