自定义音视频基类六:ImageReader+GPUImage+MediaCodec

音视频这块从一个懵懵懂懂的新手到现在也快一个月了,越是学习越是感受到音视频开发的博大精深,每进一步都要花费很多精力,这篇文章是在前面学习的基础上对自己的总结,实现视频的采集+渲染+编码的基类

现在9102下半年了,5.0以下的手机已经不多了,对音视频开发来说最低版本可以定位5.0了(个人意见如有错误请指正),便不再适配5.0以下手机,如果想适配5.0以下手机可以看看前面的几篇文章,自己修改一下

废话不多说,首先介绍一下我总结的视频开发流程:

1、初始化摄像头和编码器

2、开启摄像头

3、ImageReader获得摄像头返回的视频数据

4、将Image视频数据转成ByteArray

5、初始化渲染组件(GPUImage)

6、渲染原始视频数据并显示到屏幕上

7、然后将渲染后的数据导入MediaCodec创建的inputSurface

8、硬编码渲染的数据

 

1、初始化摄像头和编码器

 

/**
 * 初始化Camera2
 */
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
fun initCamera2() {
    cameraManager = application.getSystemService(Context.CAMERA_SERVICE) as CameraManager
    cameraIdList = cameraManager.cameraIdList
    cameraId = cameraIdList[index]

    previewSize = getSurfaceSize()
}

/**
 * 初始化MediaCodec的编码器
 */
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
fun createSurfaceVideoEncoder() {
    //视频编码器
    videoEncoder = MediaCodec.createEncoderByType("video/avc")
    // 创建视频MediaFormat
    val videoFormat =
        MediaFormat.createVideoFormat("video/avc", previewSize.width, previewSize.height)

    // 指定编码器颜色格式
    videoFormat.setInteger(
        MediaFormat.KEY_COLOR_FORMAT,
        MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
    )
    // 仅编码器指定比特率
    videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, 1250000)
    // 编码器必须指定帧率
    videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25)
    // 指定关键帧时间间隔
    videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1)
    //BITRATE_MODE_CBR输出码率恒定,BITRATE_MODE_CQ保证图像质量,BITRATE_MODE_VBR图像复杂则码率高,图像简单则码率低
    videoFormat.setInteger(
        MediaFormat.KEY_BITRATE_MODE,
        MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR
    )
    videoFormat.setInteger(
        MediaFormat.KEY_COMPLEXITY,
        MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR
    )

    videoEncoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)

    //获取输入Surface
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
        inputSurface = MediaCodec.createPersistentInputSurface()
        videoEncoder.setInputSurface(inputSurface)
        videoEncoder.setCallback(mediaCodecCallback, getBackgroundHandler())
    } else {
        inputSurface = videoEncoder.createInputSurface()
        videoEncoder.setCallback(mediaCodecCallback)
    }

    paint = Paint()

    videoEncoder.start()
}

2、开启摄像头

/**
 * 打开摄像头
 */
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
fun openCamera() {
    if (ContextCompat.checkSelfPermission(
            this,
            Manifest.permission.CAMERA
        ) == PackageManager.PERMISSION_GRANTED
    ) {
        cameraManager.openCamera(cameraId, callback, getBackgroundHandler())
    } else {
        val dialog = AlertDialog.Builder(this)
        dialog.setTitle("开启相机失败").setMessage("缺少开启相机的权限").setCancelable(false)

        dialog.setNegativeButton("取消") { _, _ ->

        }

        dialog.setPositiveButton("授权") { _, _ ->
            val intent = Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS)
            intent.data = Uri.parse("package:$packageName")
            startActivity(intent)
        }

        dialog.show()
    }
}

/**
 * 打开摄像头的回调
 */
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
private val callback = object : CameraDevice.StateCallback() {
    //成功打开时的回调,可以得到一个 CameraDevice 实例
    override fun onOpened(camera: CameraDevice) {
        cameraDevice = camera

        imageReader = ImageReader.newInstance(
            previewSize.width,
            previewSize.height,
            ImageFormat.YUV_420_888,
            5
        )
        imageReader.setOnImageAvailableListener(
            onImageAvailableListener,
            getBackgroundHandler()
        )
        imageSurface = imageReader.surface

        previewCaptureRequest = camera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD)
        previewCaptureRequest.addTarget(imageSurface)

        //创建一个Session
        camera.createCaptureSession(
            arrayListOf(imageSurface),
            mSessionCallback,
            getBackgroundHandler()
        )
    }

    //当 camera 不再可用时的回调,通常在该方法中进行资源释放的操作
    override fun onDisconnected(camera: CameraDevice) {

    }

    // 当 camera 打开失败时的回调,error 为具体错误原因,通常在该方法中也要进行资源释放的操作
    override fun onError(camera: CameraDevice, error: Int) {
        camera.close()
        showError(error)
        releaseBackgroundThread()
    }

    //相机关闭时回调
    override fun onClosed(camera: CameraDevice) {
        super.onClosed(camera)
        cameraCaptureSession?.close()
        imageReader.close()
        imageSurface.release()
    }
}

/**
 * 创建预览Session的回调
 */
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
val mSessionCallback = object : CameraCaptureSession.StateCallback() {

    override fun onConfigured(session: CameraCaptureSession) {
        cameraCaptureSession = session

        session.setRepeatingRequest(
            previewCaptureRequest.build(),
            null,
            getBackgroundHandler()
        )
    }

    //创建失败
    override fun onConfigureFailed(session: CameraCaptureSession) {
        session.close()
        showToast("创建Session失败")
    }
}

3、ImageReader获得摄像头返回的视频数据

/**
 * ImageReader获得摄像头返回的视频数据
 */
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
private val onImageAvailableListener = ImageReader.OnImageAvailableListener {
    val image = it.acquireNextImage()
    val byteArray = getDataFromImage(image,NV21)
    sendVideoData(byteArray,image.width,image.height)
    image.close()
}

abstract fun sendVideoData(data:ByteArray, width:Int, height:Int)

4、将Image视频数据转成ByteArray

 

/**
 * 将Image视频数据转成ByteArray
 */
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
private fun getDataFromImage(image:Image, colorFormat:Int):ByteArray {
    require(!(colorFormat != I420 && colorFormat != NV21)) { "only support I420 and NV21" }
    val crop = image.cropRect
    val format = image.format
    val width = crop.width()
    val height = crop.height()
    val planes = image.planes;
    val data =  ByteArray(width * height * ImageFormat.getBitsPerPixel(format) / 8)
    val rowData = ByteArray(planes[0].rowStride)

    var channelOffset = 0
    var outputStride = 1
    for (i in planes.indices){
        when(i){
            0 ->{
                channelOffset = 0
                outputStride = 1
            }
            1 ->{
                if (colorFormat == I420) {
                    channelOffset = width * height
                    outputStride = 1
                } else if (colorFormat == NV21) {
                    channelOffset = width * height + 1
                    outputStride = 2
                }
            }
            2 ->{
                if (colorFormat == I420) {
                    channelOffset = (width * height * 1.25).toInt()
                    outputStride = 1
                } else if (colorFormat == NV21) {
                    channelOffset = width * height;
                    outputStride = 2
                }
            }
        }
        val buffer = planes[i].buffer
        val rowStride = planes[i].rowStride
        val pixelStride = planes[i].pixelStride
        val shift = if (i == 0) 0 else 1
        val w = width shr shift
        val h = height shr shift
        buffer.position(rowStride * (crop.top shr shift) + pixelStride * (crop.left shr shift))
        for (row in 0 until h) {
            val length: Int
            if (pixelStride == 1 && outputStride == 1) {
                length = w
                buffer.get(data, channelOffset, length)
                channelOffset += length
            } else {
                length = (w - 1) * pixelStride + 1
                buffer.get(rowData, 0, length)
                for (col in 0 until w) {
                    data[channelOffset] = rowData[col * pixelStride]
                    channelOffset += outputStride
                }
            }
            if (row < h - 1) {
                buffer.position(buffer.position() + rowStride - length)
            }
        }
    }
    return data
}

参考自https://www.polarxiong.com/archives/Android-YUV_420_888%E7%BC%96%E7%A0%81Image%E8%BD%AC%E6%8D%A2%E4%B8%BAI420%E5%92%8CNV21%E6%A0%BC%E5%BC%8Fbyte%E6%95%B0%E7%BB%84.html

5、初始化渲染组件(GPUImage)

GPUImage参考自https://blog.csdn.net/afei__/article/details/99313826

class ShortVideoActivity : BaseShortVideoActivity() {

    override fun getLayoutId(): Int {
        return R.layout.activity_short_video
    }

    override fun init() {
        super.init()
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
            //设置摄像头旋转角度
            updateGPUImageRotate()
            //设置渲染模式,调用渲染器反复渲染场景
            gpuImageView.setRenderMode(GPUImageView.RENDERMODE_CONTINUOUSLY)
            //设置滤镜
            gpuImageView.filter = GPUImageGrayscaleFilter()

            //切换摄像头
            switchCamera.setOnClickListener {
                switchCamera()
                updateGPUImageRotate()
            }
        }
    }

    /**
     * 设置摄像头旋转角度
     */
    @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
    private fun updateGPUImageRotate() {
        val rotation = getRotation(getCameraOrientation());
        var flipHorizontal = false
        var flipVertical = false
        if (isFrontCamera()) { // 前置摄像头需要镜像
            if (rotation == Rotation.NORMAL || rotation == Rotation.ROTATION_180) {
                flipHorizontal = true;
            } else {
                flipVertical = true;
            }
        }
        gpuImageView.gpuImage.setRotation(rotation, flipHorizontal, flipVertical);
    }

    /**
     * 获取旋转角度
     */
    private fun getRotation( orientation:Int) :Rotation{
        return when(orientation){
            90 -> {
                Rotation.ROTATION_90;
            }
            180 -> {
                Rotation.ROTATION_180;
            }
            270 -> {
                Rotation.ROTATION_270;
            }
            else -> {
                Rotation.NORMAL;
            }
        }
    }

    /**
     * 自动修改宽高以适应不同预览比例
     */
    override fun onWindowFocusChanged(hasFocus: Boolean) {
        super.onWindowFocusChanged(hasFocus)
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
            val width = gpuImageView.width
            val height = gpuImageView.height
            val proportion1 = previewSize.width.toFloat() / previewSize.height
            val proportion2 = height.toFloat() / width
            if (proportion1 > proportion2) {
                val widthProportion = previewSize.height.toFloat() / previewSize.width
                val layoutParams = gpuImageView.layoutParams
                layoutParams.width = (height * widthProportion + .5).toInt()
                gpuImageView.layoutParams = layoutParams
            } else if (proportion1 < proportion2) {
                val layoutParams = gpuImageView.layoutParams
                layoutParams.height = (width * proportion1 + .5).toInt()
                gpuImageView.layoutParams = layoutParams
            }
        }
    }

}

6、渲染原始视频数据并显示到屏幕上

 

/**
 * 将转码后的ByteArray视频数据显示到屏幕上
 */
override fun sendVideoData(data:ByteArray, width:Int,height:Int) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
        gpuImageView.updatePreviewFrame(data,width,height)

        val bitmap = gpuImageView.capture()
        drawBitmap(bitmap)
    }
}

7、然后将渲染后的数据导入MediaCodec创建的inputSurface

 

/**
 * 然后将渲染后的数据导入inputSurface
 */
fun drawBitmap(bitmap:Bitmap){
    val canvas = inputSurface.lockCanvas(null)
    canvas.drawBitmap(bitmap,0f,0f,paint)
    inputSurface.unlockCanvasAndPost(canvas)
}

 

8、硬编码渲染的数据

 

if (index >= 0) {
    val outputBuffers = codec.getOutputBuffer(index)
    if (outputBuffers != null) {
        if (info.size != 0) {
            outputBuffers.position(info.offset);
            outputBuffers.limit(info.offset + info.size)
        }
    }
    codec.releaseOutputBuffer(index, true)
}

 

 

 

 

 

你可能感兴趣的:(Camera2,音视频开发)