Android音视频开发——Camera、Camera2和CameraX的使用和封装

前言

  • 源码:
    https://github.com/Peakmain/Video_Audio/tree/master/app/src/main/java/com/peakmain/video_audio/utils
  • 我的:https://www.jianshu.com/u/3ff32f5aea98
  • 我的Github:https://github.com/peakmain

基础知识

  • 在Android5.0以前我们用的都是Camera,虽然使用简单,但是已经被抛弃了,Android5.0之后便出现了Camera2,但是Camera2使用非常繁琐,于是后来又出现CameraX,但是CameraX实际也只是对Camera2的封装
  • Android的坐标系
    • 传感器与屏幕方向不一致,将图像传感器的坐标系逆时针90度,才能显示到屏幕的坐标
    • 所以看到的屏幕是逆时针旋转了90度
    • 因此我们将图像顺时针旋转90度才能看到正常的画面


      image.png
  • 相机捕获的类型是nv21, 但是手机识别的是nv12,所以通常我们需要将nv21数据转成nv12。两个都是yuv420
  • nv21和nv12的区别
    • nv12:YYYYYYYYY UVUV=>YUV420SP
    • nv21:YYYYYYYYY VUVU=>YUV420SP
      两者主要区别UV方向不同

Camera的封装和使用

  • 因为Camera使用比较简单而且已经过时了,我们主要过一遍
  • Camera.open打开相机,0代表后置摄像头,1代表前置摄像头
  • Camera.getParameters();获取相机的相关参数,比如大小
  • Buffer需要设置成width*height*3/2,原因是,y:u:v=4:1:1,而y的值是width*height
  • setPreviewCallbackWithBuffer设置预览的回调,会重写onPreviewFrame方法
  • startPreview:开始预览
public class CameraSurface extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
    private Camera mCamera;
    private Camera.Size size;
    byte[] mBuffer;

    public CameraSurface(Context context) {
        this(context, null);
    }

    public CameraSurface(Context context, AttributeSet attrs) {
        this(context, attrs, 0);
    }

    public CameraSurface(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
        getHolder().addCallback(this);
    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        startPrview();
    }

    private void startPrview() {
        //打开相机,后摄像头
        mCamera = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
        //获取相机相关的参数
        Camera.Parameters parameters = mCamera.getParameters();
        size = parameters.getPreviewSize();
        try {
            mCamera.setPreviewDisplay(getHolder());
            mCamera.setDisplayOrientation(90);
            mBuffer = new byte[size.width * size.height * 3 / 2];
            mCamera.addCallbackBuffer(mBuffer);
            mCamera.setPreviewCallbackWithBuffer(this);
            mCamera.startPreview();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    private volatile boolean isCaptrue;

    public void startCaptrue() {
        isCaptrue = true;
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        if (isCaptrue) {
            byte[] nv12 = FileUtils.nv21toNV12(data);
            FileUtils.portraitData2Raw(nv12, mBuffer, size.width, size.height);
            isCaptrue = false;
            captrue(mBuffer);
            ToastUtils.showLong("保存成功");
        }
        mCamera.addCallbackBuffer(data);
    }

    int index = 0;

    private void captrue(byte[] bytes) {
        String fileName = "Camera_" + index++ + ".jpg";
        File sdRoot = Environment.getExternalStorageDirectory();
        File pictureFile = new File(sdRoot, fileName);
        if (!pictureFile.exists()) {
            try {
                pictureFile.createNewFile();
                FileOutputStream fileOutputStream = new FileOutputStream(pictureFile);
                YuvImage image = new YuvImage(bytes, ImageFormat.NV21,size.height, size.width,null);   //将NV21 data保存成YuvImage
                image.compressToJpeg(
                        new Rect(0, 0, image.getWidth(), image.getHeight()),
                        100, fileOutputStream);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }


    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {

    }
}

Camera2的使用和封装

  • 流程
    这里方便大家记住,我画了一个流程图


    image.png
  • start封装代码
  fun start(textureView: TextureView?, cameraId: Int) {
        mTextureView = textureView
        //摄像头的管理类
        val cameraManager =
            mContext.getSystemService(Context.CAMERA_SERVICE) as CameraManager
        val characteristics: CameraCharacteristics
        try {
            characteristics = if (cameraId == 0) {
                cameraManager.getCameraCharacteristics("" + CameraCharacteristics.LENS_FACING_FRONT)
            } else {
                cameraManager.getCameraCharacteristics("" + CameraCharacteristics.LENS_FACING_BACK)
            }
            //管理摄像头支持的所有输出格式和尺寸
            val map =
                characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
            //最合适的尺寸
            mPreviewSize = getBestSupportedSize(
                ArrayList(
                    listOf(
                        *map.getOutputSizes(
                            SurfaceTexture::class.java
                        )
                    )
                )
            )
            mImageReader = ImageReader.newInstance(
                mPreviewSize.width, mPreviewSize.height
                , ImageFormat.YUV_420_888, 2
            )
            mBackgroundThread = HandlerThread("Camera2Helper")
            mBackgroundThread?.start()
            mBackgroundHandler = Handler(mBackgroundThread!!.looper)
            //拍照获得图片的回调
            mImageReader.setOnImageAvailableListener(
                OnImageAvailableListenerImpl(),
                mBackgroundHandler
            )
            if (ActivityCompat.checkSelfPermission(
                    mContext,
                    Manifest.permission.CAMERA
                ) != PackageManager.PERMISSION_GRANTED
            ) {
                return
            }
            if (cameraId == 0) {
                cameraManager.openCamera(
                    "" + CameraCharacteristics.LENS_FACING_FRONT,
                    mDeviceStateCallback,
                    mBackgroundHandler
                )
            } else {
                cameraManager.openCamera(
                    "" + CameraCharacteristics.LENS_FACING_BACK,
                    mDeviceStateCallback,
                    mBackgroundHandler
                )
            }
        } catch (e: CameraAccessException) {
            e.printStackTrace()
        }
    }

setOnImageAvailableListener是获得数据的回调,这里我们可以对数据进行回调

    private inner class OnImageAvailableListenerImpl : OnImageAvailableListener {
        private var y: ByteArray? = null
        private var u: ByteArray? = null
        private var v: ByteArray? = null
        override fun onImageAvailable(reader: ImageReader) {
            val image = reader.acquireNextImage()
            val planes = image.planes
            //初始化y u v
            if (y == null) {
                y = ByteArray((planes[0].buffer.limit() - planes[0].buffer.position()))
                u = ByteArray(
                    planes[1].buffer.limit() - planes[1].buffer.position()
                )
                v = ByteArray(
                    planes[2].buffer.limit() - planes[2].buffer.position()
                )
            }
            if (image.planes[0].buffer.remaining() == y?.size) {
                //分别填到 yuv
                planes[0].buffer[y]
                planes[1].buffer[u]
                planes[2].buffer[v]
            }
            mCamera2Listener?.invoke(y, u, v, mPreviewSize, planes[0].rowStride)
            image.close()
        }
    }
  • mDeviceStateCallback打开相机回调
    private val mDeviceStateCallback: CameraDevice.StateCallback =
        object : CameraDevice.StateCallback() {
            override fun onOpened(camera: CameraDevice) {
                mCameraDevice = camera
                createCameraPreviewSession()
            }

            override fun onDisconnected(camera: CameraDevice) {
                camera.close()
                mCameraDevice = null
            }

            override fun onError(camera: CameraDevice, error: Int) {
                camera.close()
                mCameraDevice = null
            }
        }
  • 创建相机预览
    private fun createCameraPreviewSession() {
        try {
            val texture = mTextureView!!.surfaceTexture
            //设置预览的宽高
            texture.setDefaultBufferSize(mPreviewSize.width, mPreviewSize.height)
            val surface = Surface(texture)
            // 创建预览需要的CaptureRequest.Builder
            mPreviewRequestBuilder =
                mCameraDevice!!.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
            //设置自动对焦
            mPreviewRequestBuilder.set(
                CaptureRequest.CONTROL_AE_ANTIBANDING_MODE,
                CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE
            )
            mPreviewRequestBuilder.addTarget(surface)
            mPreviewRequestBuilder.addTarget(mImageReader.surface)
            //该对象负责管理处理预览请求和拍照请求
            mCameraDevice!!.createCaptureSession(
                listOf(
                    surface,
                    mImageReader.surface
                ), mCaptureStateCallback, mBackgroundHandler
            )
        } catch (e: CameraAccessException) {
            e.printStackTrace()
        }
    }
  • setRepeatingRequest创建捕获会话,可以在这里做拍照等功能,我这里是音视频,所以我这里是个空实现
    private val mCaptureStateCallback: CameraCaptureSession.StateCallback =
        object : CameraCaptureSession.StateCallback() {
            override fun onConfigured(session: CameraCaptureSession) {
                if (null == mCameraDevice) {
                    return
                }
                mCaptureSession = session
                try {
                    mCaptureSession!!.setRepeatingRequest(
                        mPreviewRequestBuilder.build(),
                        object : CaptureCallback() {},
                        mBackgroundHandler
                    )
                } catch (e: CameraAccessException) {
                    e.printStackTrace()
                }
            }

            override fun onConfigureFailed(session: CameraCaptureSession) {}
        }

CameraX的使用和封装

  • 添加依赖
    implementation "androidx.camera:camera-core:1.0.0-alpha05"
    implementation "androidx.camera:camera-camera2:1.0.0-alpha05"
  • Camerax的api就比较简单了
    • CameraX.bindToLifecycle
      • 第一个参数LifecycleOwner,一般传入activity就可以了
      • 第二个参数UseCase,可以传入多个参数,
        • Preview代表预览图片回调
        • ImageAnalysis代表数据分析回调
    • setOnPreviewOutputUpdateListener,会重写onUpdated方法,主要防止切换尽头报错,做一些处理
    • setAnalyzer设置分析分析器去分析和接收图片,我们可将图片进行回调
      完整代码
class CameraXHelper(
    private var mLifecycleOwner: LifecycleOwner? = null,
    private var mTextureView: TextureView
) :
    OnPreviewOutputUpdateListener, ImageAnalysis.Analyzer {
    private val mHandlerThread: HandlerThread = HandlerThread("CameraXHelper")
    var width = SizeUtils.screenWidth
    var height = SizeUtils.screenHeight

    //设置后摄像头
    private val currentFacing = LensFacing.BACK
    fun startCamera() {
        if (mLifecycleOwner == null) {
            return
        }
        CameraX.bindToLifecycle(mLifecycleOwner, preView, analysis)
    }

    //预览
    //setTargetResolution设置预览尺寸
    private val preView: Preview
        get() {
            //预览
            //setTargetResolution设置预览尺寸
            val previewConfig =
                PreviewConfig.Builder().setTargetResolution(Size(width, height))
                    .setLensFacing(currentFacing).build()
            val preview = Preview(previewConfig)
            preview.onPreviewOutputUpdateListener = this
            return preview
        }

    private val analysis: ImageAnalysis
        get() {
            val imageAnalysisConfig = ImageAnalysisConfig.Builder()
                .setCallbackHandler(Handler(mHandlerThread.looper))
                .setLensFacing(currentFacing)
                .setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
                .setTargetResolution(Size(width, height))
                .build()
            val imageAnalysis = ImageAnalysis(imageAnalysisConfig)
            imageAnalysis.analyzer = this
            return imageAnalysis
        }

    override fun onUpdated(output: PreviewOutput) {
        val surfaceTexture = output.surfaceTexture
        //防止切换镜头报错
        if (mTextureView.surfaceTexture !== surfaceTexture) {
            if (mTextureView.isAvailable) {
                // 当切换摄像头时,会报错
                val parent = mTextureView.parent as ViewGroup
                parent.removeView(mTextureView)
                parent.addView(mTextureView, 0)
                parent.requestLayout()
            }
            mTextureView.surfaceTexture = surfaceTexture
        }
    }

    private val lock =
        ReentrantLock()
    private var y: ByteArray?=null
    private var u: ByteArray?=null
    private var v: ByteArray?=null
    override fun analyze(image: ImageProxy, rotationDegrees: Int) {
        lock.lock()
        try {
            val planes = image.planes
            //初始化y v  u
            if (y == null) {
                y = ByteArray(
                    planes[0].buffer.limit() - planes[0].buffer.position()
                )
                u = ByteArray(
                    planes[1].buffer.limit() - planes[1].buffer.position()
                )
                v = ByteArray(
                    planes[2].buffer.limit() - planes[2].buffer.position()
                )
            }
            if (image.planes[0].buffer.remaining() == y!!.size) {
                planes[0].buffer[y]
                planes[1].buffer[u]
                planes[2].buffer[v]
                val stride = planes[0].rowStride
                val size = Size(image.width, image.height)
                if (cameraXListener != null) {
                    cameraXListener!!.invoke(y, u, v, size, stride)
                }
            }
        } catch (e: Exception) {
            e.printStackTrace()
        } finally {
            lock.unlock()
        }
    }

    private var cameraXListener: ((ByteArray?, ByteArray?, ByteArray?, Size, Int) -> Unit)? = null

    fun setCameraXListener(cameraXListener: ((ByteArray?, ByteArray?, ByteArray?, Size, Int) -> Unit)) {
        this.cameraXListener = cameraXListener
    }

    init {
        //子线程中回调
        mHandlerThread.start()
        mLifecycleOwner = mLifecycleOwner
    }
}

你可能感兴趣的:(Android音视频开发——Camera、Camera2和CameraX的使用和封装)