打开相机并获取相机流,并将yuv数据转换成nv21
1.导入依赖
在app下的build.gradle中加入
def camerax_version ="1.0.1"
implementation"androidx.camera:camera-core:${camerax_version}"
implementation"androidx.camera:camera-camera2:${camerax_version}"
implementation"androidx.camera:camera-lifecycle:${camerax_version}"
implementation"androidx.camera:camera-view:1.0.0-alpha28"
2.相机布局文件
使用androidx 的布局
xmlns:app="http://schemas.android.com/apk/res-auto" xmlns:tools="http://schemas.android.com/tools" android:layout_width="match_parent" android:layout_height="match_parent" tools:context=".MainActivity"> android:id="@+id/textureView" android:layout_width="match_parent" android:layout_height="match_parent" tools:layout_editor_absoluteX="0dp" tools:layout_editor_absoluteY="0dp" />
3.打开相机
private var textureView:PreviewView? =null
private val executor =Executors.newSingleThreadExecutor()
private val permissionsRequestCode =Random.nextInt(0,10000)
private var lensFacing:Int =CameraSelector.LENS_FACING_FRONT
//打开相机需要的权限
private val permissions =listOf(Manifest.permission.CAMERA,Manifest.permission.READ_EXTERNAL_STORAGE,Manifest.permission.WRITE_EXTERNAL_STORAGE)
override fun onCreate(savedInstanceState:Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
textureView = findViewById(R.id.textureView);
}
//先检测权限如果有权限就打开相机
override fun onResume() {
super.onResume()
if (!hasPermissions(this)) { //检测权限
ActivityCompat.requestPermissions(
this,permissions.toTypedArray(),permissionsRequestCode
)
}else {
bindCameraUseCases()//打开相机
}
}
/** 绑定预览和获取图片数据*/
@SuppressLint("UnsafeExperimentalUsageError","UnsafeOptInUsageError")
private fun bindCameraUseCases() =textureView?.post{
val cameraProviderFuture =ProcessCameraProvider.getInstance(this)
cameraProviderFuture.addListener(Runnable {
val cameraProvider =cameraProviderFuture.get()
val preview =textureView?.display?.let {
Preview.Builder()
.setTargetAspectRatio(AspectRatio.RATIO_4_3)
.setTargetRotation(it.rotation)
.build()
}
val imageAnalysis =ImageAnalysis.Builder()
.setTargetAspectRatio(AspectRatio.RATIO_4_3)
.setTargetRotation(textureView!!.display.rotation)
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build()
var frameCounter =0
var lastFpsTimestamp =System.currentTimeMillis()
var yuvBits:ByteBuffer? =null
imageAnalysis.setAnalyzer(executor,ImageAnalysis.Analyzer{ image->
val yuvBuffer = image.image?.let { YuvByteBuffer(it,yuvBits)} //获取相机流并转换成nv21
image.close()
//检测相机帧率
val frameCount =10
if (++frameCounter %frameCount ==0) {
frameCounter =0
val now =System.currentTimeMillis()
val delta =now -lastFpsTimestamp
val fps =1000 *frameCount.toFloat() /delta
Log.d(TAG,"FPS: ${"%.02f".format(fps)}")
lastFpsTimestamp =now
}
})
// Create a new camera selector each time, enforcing lens facing
val cameraSelector =CameraSelector.Builder().requireLensFacing(lensFacing).build()
// Apply declared configs to CameraX using the same lifecycle owner
cameraProvider.unbindAll()
val camera =cameraProvider.bindToLifecycle(
this as LifecycleOwner,cameraSelector,preview,imageAnalysis
)
val c:CameraInfo =camera.cameraInfo;
// Use the camera object to link our preview use case with the view
preview?.setSurfaceProvider(textureView?.surfaceProvider)
},ContextCompat.getMainExecutor(this))
}
/** Convenience method used to check if all permissions required by this app are granted */
private fun hasPermissions(context:Context) =permissions.all {
ContextCompat.checkSelfPermission(context,it) ==PackageManager.PERMISSION_GRANTED
}
override fun onRequestPermissionsResult(
requestCode:Int,
permissions:Array,
grantResults:IntArray
) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults)
if (requestCode ==permissionsRequestCode && hasPermissions(this)) {
bindCameraUseCases()
}else {
finish()// If we don't have the required permissions, we can't run
}
}
4.YUV转nv21工具
@kotlin.annotation.Retention(AnnotationRetention.SOURCE)
@IntDef(ImageFormat.NV21,ImageFormat.YUV_420_888)
annotation class YuvType
class YuvByteBuffer(image:Image, dstBuffer:ByteBuffer? =null) {
@YuvType
val type:Int
val buffer:ByteBuffer
init {
val wrappedImage = ImageWrapper(image)
type =if (wrappedImage.u.pixelStride ==1) {
ImageFormat.YUV_420_888
}else {
ImageFormat.NV21
}
val size = image.width * image.height *3 /2
buffer =if (
dstBuffer ==null ||dstBuffer.capacity()
dstBuffer.isReadOnly || !dstBuffer.isDirect
) {
ByteBuffer.allocateDirect(size) }
else {
dstBuffer
}
buffer.rewind()
removePadding(wrappedImage)
}
// Input buffers are always direct as described in
// https://developer.android.com/reference/android/media/Image.Plane#getBuffer()
private fun removePadding(image:ImageWrapper) {
val sizeLuma = image.y.width * image.y.height
val sizeChroma = image.u.width * image.u.height
if (image.y.rowStride > image.y.width) {
removePaddingCompact(image.y,buffer,0)
}else {
buffer.position(0)
buffer.put(image.y.buffer)
}
if (type ==ImageFormat.YUV_420_888) {
if (image.u.rowStride > image.u.width) {
removePaddingCompact(image.u,buffer,sizeLuma)
removePaddingCompact(image.v,buffer,sizeLuma +sizeChroma)
}else {
buffer.position(sizeLuma)
buffer.put(image.u.buffer)
buffer.position(sizeLuma +sizeChroma)
buffer.put(image.v.buffer)
}
}else {
if (image.u.rowStride > image.u.width *2) {
removePaddingNotCompact(image,buffer,sizeLuma)
}else {
buffer.position(sizeLuma)
var uv = image.v.buffer
val properUVSize = image.v.height * image.v.rowStride -1
if (uv.capacity() >properUVSize) {
uv = clipBuffer(image.v.buffer,0,properUVSize)
}
buffer.put(uv)
val lastOne = image.u.buffer[image.u.buffer.capacity() -1]
buffer.put(buffer.capacity() -1,lastOne)
}
}
buffer.rewind()
}
private fun removePaddingCompact(
plane:PlaneWrapper,
dst:ByteBuffer,
offset:Int
) {
require(plane.pixelStride ==1){
"use removePaddingCompact with pixelStride == 1"
}
val src = plane.buffer
val rowStride = plane.rowStride
var row:ByteBuffer
dst.position(offset)
for (i in 0 until plane.height) {
row = clipBuffer(src,i *rowStride, plane.width)
dst.put(row)
}
}
private fun removePaddingNotCompact(
image:ImageWrapper,
dst:ByteBuffer,
offset:Int
) {
require(image.u.pixelStride ==2){
"use removePaddingNotCompact pixelStride == 2"
}
val width = image.u.width
val height = image.u.height
val rowStride = image.u.rowStride
var row:ByteBuffer
dst.position(offset)
for (i in 0 until height -1) {
row = clipBuffer(image.v.buffer,i *rowStride,width *2)
dst.put(row)
}
row = clipBuffer(image.u.buffer, (height -1) *rowStride -1,width *2)
dst.put(row)
}
private fun clipBuffer(buffer:ByteBuffer, start:Int, size:Int):ByteBuffer {
val duplicate = buffer.duplicate()
duplicate.position(start)
duplicate.limit(start + size)
return duplicate.slice()
}
private class ImageWrapper(image:Image) {
val width= image.width
val height = image.height
val y = PlaneWrapper(width,height, image.planes[0])
val u = PlaneWrapper(width /2,height /2, image.planes[1])
val v = PlaneWrapper(width /2,height /2, image.planes[2])
// Check this is a supported image format
// https://developer.android.com/reference/android/graphics/ImageFormat#YUV_420_888
init {
require(y.pixelStride ==1){
"Pixel stride for Y plane must be 1 but got ${y.pixelStride}instead."
}
require(u.pixelStride ==v.pixelStride &&u.rowStride ==v.rowStride){
"U and V planes must have the same pixel and row strides " +
"but got pixel=${u.pixelStride} row=${u.rowStride} for U " +
"and pixel=${v.pixelStride} and row=${v.rowStride}for V"
}
require(u.pixelStride ==1 ||u.pixelStride ==2){
"Supported" +" pixel strides for U and V planes are 1 and 2"
}
}
}
private class PlaneWrapper(width:Int, height:Int, plane:Image.Plane) {
val width = width
val height = height
val buffer:ByteBuffer = plane.buffer
val rowStride = plane.rowStride
val pixelStride = plane.pixelStride
}
}
5.AndroidManifest.xml
下载地址:https://download.csdn.net/download/qq_28884137/31845235