目录
- 为什么要使用 Camera2
- 重要功能
为什么要使用 Camera2
相比 Camera1 ,2 的 api 功能更全,控制的粒度更细,单论获取摄像头采集的原始数据,Camera1 获取 NV21 格式的 YUV 数据,而 Camera2 获取的数据给是 YUV420,这是因为在底层会将 NV21 做格式转换,为开发者省去一步操作
最低兼容到 Android 5.0,使用 Camera2 的实现的代码量也大大增加了,所以 Google 后面又推出了 CameraX (Jetpack)来简化 api
相机 app 大多使用 Camera2 来开发
基础功能
- 开启摄像头
- 获取预览
开启摄像头
- CameraManager 系统层相机服务
- CameraCharacteristics 摄像头参数
CameraCharacteristics.LENS_FACING :
获取摄像头方向。前置摄像头(LENS_FACING_FRONT)或 后置摄像头(LENS_FACING_BACK)
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL:
获取当前设备支持的相机特性
CameraCharacteristics.SENSOR_ORIENTATION:
获取摄像头方向
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP:
获取StreamConfigurationMap,它是管理摄像头支持的所有输出格式和尺寸
CameraCharacteristics.FLASH_INFO_AVAILABLE:
是否支持闪光灯
CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT:
同时检测到人脸的数量
CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES:
相机支持的人脸检测模式
获取预览
- ImageReader 获得预览数据回调
- CaptureRequest.Builder 设置摄像头参数,如自动对焦、开启闪光灯,建立会话前设置
- CameraCaptureSession 摄像头会话,获取数据
CameraCaptureSession
- capture() 获取一次,常用于单张拍照
- setRepeatingRequest() 是重复请求获取图像数据,常用于预览或连拍
实践代码
public class Camera2Helper {
private final static String TAG = "Camera2Helper";
private static Camera2Helper INSTANCE;
private Application mGlobalContext;
private TextureView mTextureView;
private Size mPreviewSize;
private CameraDevice.StateCallback mDeviceStateCallback;
private CameraDevice mCameraDevice;
private HandlerThread mBgThread;
private Handler mBgHandler;
private Point mPreviewViewSize;
private CaptureRequest.Builder mQuestBuidler;
private CameraCaptureSession mCaptureSession;
private CameraCaptureSession.StateCallback mSessionCallback;
private ImageReader mImageReader;
private ICamera2Callback mICamera2Callback;
private Camera2Helper(){
}
public static Camera2Helper getInstance() {
if (INSTANCE == null){
INSTANCE = new Camera2Helper();
}
return INSTANCE;
}
public synchronized void openCamera(Application applicationContext, TextureView textureView,ICamera2Callback camera2Callback) {
mTextureView = textureView;
mGlobalContext = applicationContext;
mICamera2Callback = camera2Callback;
// 获取摄像头服务
CameraManager cameraManager = (CameraManager) applicationContext.getSystemService(Context.CAMERA_SERVICE);
// 获取摄像头参数
try {
String[] cameraIds = cameraManager.getCameraIdList();
if (cameraIds != null){
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraIds[0]);
// 摄像头支持的所有输出格式和尺寸
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// 获取摄像头方向
int cameraDirection = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING);
if (cameraDirection == CameraMetadata.LENS_FACING_FRONT){
Log.e(TAG,"前置摄像头");
}else {
Log.e(TAG,"后置摄像头");
}
// 最佳预览尺寸
mPreviewSize = getBestSupportedSize(new ArrayList(Arrays.asList(map.getOutputSizes(SurfaceTexture.class))));
if (ActivityCompat.checkSelfPermission(applicationContext, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(),mPreviewSize.getHeight(), ImageFormat.YUV_420_888,2);
mBgThread = new HandlerThread(TAG);
mBgThread.start();
mBgHandler = new Handler(mBgThread.getLooper());
// 设置 ImageReader 回调监听
mImageReader.setOnImageAvailableListener(new OnImageAvailableListenerImpl(),mBgHandler);
cameraManager.openCamera(cameraIds[0], getStateCallback(), mBgHandler);
}
} catch (Exception e) {
Log.e(TAG,e.getMessage());
}
}
private CameraDevice.StateCallback getStateCallback() {
mDeviceStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
mCameraDevice = camera;
// 此时可以创建预览 Session
Log.e(TAG,"摄像头打开");
preview();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
mCameraDevice.close();
mCameraDevice = null;
Log.e(TAG,"onDisconnected");
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
mCameraDevice.close();
mCameraDevice = null;
Log.e(TAG,"onError");
}
};
return mDeviceStateCallback;
}
// 预览
private void preview() {
if (mTextureView == null){
return;
}
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(),mPreviewSize.getHeight());
Surface surface = new Surface(surfaceTexture);
//建立会话
try {
mQuestBuidler = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
// 自动对焦
mQuestBuidler.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mQuestBuidler.addTarget(surface);
mQuestBuidler.addTarget(mImageReader.getSurface());
mCameraDevice.createCaptureSession(Arrays.asList(surface,mImageReader.getSurface()),getSessionCallback(),mBgHandler);
} catch (Exception e) {
e.printStackTrace();
}
}
private CameraCaptureSession.StateCallback getSessionCallback() {
mSessionCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
//会话建立,可以开始拿摄像头的数据
mCaptureSession = session;
try {
mCaptureSession.setRepeatingRequest(mQuestBuidler.build(),new CameraCaptureSession.CaptureCallback() {
},mBgHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
}
};
return mSessionCallback;
}
private Size getBestSupportedSize(List sizes) {
Point maxPreviewSize = new Point(1920, 1080);
Point minPreviewSize = new Point(1280, 720);
Size defaultSize = sizes.get(0);
Size[] tempSizes = sizes.toArray(new Size[0]);
Arrays.sort(tempSizes, new Comparator() {
@Override
public int compare(Size o1, Size o2) {
if (o1.getWidth() > o2.getWidth()) {
return -1;
} else if (o1.getWidth() == o2.getWidth()) {
return o1.getHeight() > o2.getHeight() ? -1 : 1;
} else {
return 1;
}
}
});
sizes = new ArrayList<>(Arrays.asList(tempSizes));
for (int i = sizes.size() - 1; i >= 0; i--) {
if (maxPreviewSize != null) {
if (sizes.get(i).getWidth() > maxPreviewSize.x || sizes.get(i).getHeight() > maxPreviewSize.y) {
sizes.remove(i);
continue;
}
}
if (minPreviewSize != null) {
if (sizes.get(i).getWidth() < minPreviewSize.x || sizes.get(i).getHeight() < minPreviewSize.y) {
sizes.remove(i);
}
}
}
if (sizes.size() == 0) {
return defaultSize;
}
Size bestSize = sizes.get(0);
float previewViewRatio;
if (mPreviewViewSize != null) {
previewViewRatio = (float) mPreviewViewSize.x / (float) mPreviewViewSize.y;
} else {
previewViewRatio = (float) bestSize.getWidth() / (float) bestSize.getHeight();
}
if (previewViewRatio > 1) {
previewViewRatio = 1 / previewViewRatio;
}
for (Size s : sizes) {
if (Math.abs((s.getHeight() / (float) s.getWidth()) - previewViewRatio) < Math.abs(bestSize.getHeight() / (float) bestSize.getWidth() - previewViewRatio)) {
bestSize = s;
}
}
return bestSize;
}
private class OnImageAvailableListenerImpl implements ImageReader.OnImageAvailableListener {
private byte[] y;
private byte[] u;
private byte[] v;
@Override
public void onImageAvailable(ImageReader reader) {
// 可以提取 Iamge 对象了
if (mICamera2Callback == null){
return;
}
Image image= reader.acquireNextImage();
Image.Plane[] planes = image.getPlanes();
if (y == null) {
// new 了一次
// limit 是 缓冲区 所有的大小 position 起始大小
y = new byte[planes[0].getBuffer().limit() - planes[0].getBuffer().position()];
u = new byte[planes[1].getBuffer().limit() - planes[1].getBuffer().position()];
v = new byte[planes[2].getBuffer().limit() - planes[2].getBuffer().position()];
}
if (image.getPlanes()[0].getBuffer().remaining() == y.length) {
// 分别填到 yuv
planes[0].getBuffer().get(y);
planes[1].getBuffer().get(u);
planes[2].getBuffer().get(v);
// yuv 420
}
mICamera2Callback.onPreview(y, u, v, mPreviewSize, planes[0].getRowStride());
image.close();
}
}
}
遇到的问题
- 无法获取到 IamgerReader 中的 YUV 数据
原因: 在创建会话时,只添加了 Surface 作为一路输入 Target,未添加 ImageReader.getSurface() 作为输出
解决方案:
(CaptureRequest.Builder)mRequestBuilder.addTarget(mImageReader.getSurface());
-
报异常 IllegalArgumentException: CaptureRequest contains unconfigured Input/Output Surface!
相关链接
Android 多线程之HandlerThread 完全详解
Android Camera2 之 CameraCaptureSession 详解
camera2拍照报错集锦宝典