预览
创建SurfaceView
打开摄像头
CameraManager cameraManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
try {
Log.i(TAG,"camera size = "+cameraManager.getCameraIdList().length);
} catch (CameraAccessException e) {
e.printStackTrace();
}
try {
cameraManager.openCamera(cameraId, new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
mCameraDevice = camera;
startCamera();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
}
}, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
开启预览
CameraToH264 ameraToH264 = new CameraToH264();
String path = MainActivity.VIDEO_PATH+"_"+System.currentTimeMillis();
cameraToH264.init(width,height, path+fileName+".mp4");
cameraToH264.startEncoder();
ImageReader imageReader = ImageReader.newInstance(width, height, ImageFormat.YUV_420_888, 2);
try {
mCaptureRequestBuild = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) {
e.printStackTrace();
}
Surface surface = binding.previewSurface.getHolder().getSurface();
List surfaces = new ArrayList<>();
surfaces.add(surface);
surfaces.add(imageReader.getSurface();
mCaptureRequestBuild.addTarget(surface);
mCaptureRequestBuild.addTarget(imageReader.getSurface());
imageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
byte[] mYUVByteArray = YUV_420_888toNV21(image);
if (cameraToH264 != null) {
cameraToH264.putData(mYUVByteArray);//通过MediaCodec编码视频
}
image.close();
}
}, mCameraHandler);
try {
mCameraDevice.createCaptureSession(surfaces,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mCameraCaptureSession = session;
mCaptureRequestBuild.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
try {
mCameraCaptureSession.setRepeatingRequest(mCaptureRequestBuild.build(), null, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Toast.makeText(mContext, "配置失败", Toast.LENGTH_SHORT).show();
}
}, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
MediaCodec编码视频工具
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.ArrayBlockingQueue;
*/
public class CameraToH264 {
public ArrayBlockingQueue yuv420Queue = new ArrayBlockingQueue<>(2);
private boolean isRuning;
private byte[] input;
private int width;
private int height;
private MediaCodec mediaCodec;
private MediaMuxer mediaMuxer;
private int mVideoTrack=-1;
private long nanoTime;
public void init(int width, int heigth,String path) {
nanoTime = System.nanoTime();
this.width = width;
this.height = heigth;
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", width, heigth);
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilitie s.COLOR_FormatYUV420Flexible);
}
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * heigth * 5);
videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
try {
mediaCodec = MediaCodec.createEncoderByType("video/avc");
mediaCodec.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
mediaMuxer = new MediaMuxer(path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
e.printStackTrace();
}
}
public void putData(byte[] buffer) {
if (yuv420Queue.size() >= 2) {
yuv420Queue.poll();
}
yuv420Queue.add(buffer);
}
public void startEncoder() {
new Thread(new Runnable() {
@Override
public void run() {
isRuning = true;
while (isRuning) {
if (yuv420Queue.size() > 0) {
input = yuv420Queue.poll();
// byte[] yuv420sp = new byte[width * height * 3 / 2];
// // 必须要转格式,否则录制的内容播放出来为绿屏
// NV21ToNV12(input, yuv420sp, width, height);
// input = yuv420sp;
} else {
input = null;
}
if (input != null) {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, (System.nanoTime() - nanoTime) / 1000, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
mVideoTrack = mediaMuxer.addTrack(mediaCodec.getOutputFormat());
// Log.d("mmm", "改变format");
if (mVideoTrack >= 0) {
mediaMuxer.start();
// Log.d("mmm", "开始混合");
}
}
while (outputBufferIndex > 0) {
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
if (mVideoTrack >= 0) {
mediaMuxer.writeSampleData(mVideoTrack, outputBuffer, bufferInfo);
// Log.d("mmm", "正在写入");
}
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
// Log.e("mmm", "video end");
}
}
}
}
// Log.d("mmm", "停止写入");
try {
mediaMuxer.stop();
mediaMuxer.release();
}catch (Exception e){
e.printStackTrace();
}
try {
mediaCodec.stop();
mediaCodec.release();
}catch (Exception e){
e.printStackTrace();
// Log.i("mmm","stop出错");
}
}
}).start();
}
public void stop() {
isRuning = false;
}
private void NV21ToNV12(byte[] nv21, byte[] nv12, int width, int height) {
if (nv21 == null || nv12 == null) return;
int framesize = width * height;
int i = 0, j = 0;
System.arraycopy(nv21, 0, nv12, 0, framesize);
for (i = 0; i < framesize; i++) {
nv12[i] = nv21[i];
}
for (j = 0; j < framesize / 2; j += 2) {
nv12[framesize + j - 1] = nv21[j + framesize];
}
for (j = 0; j < framesize / 2; j += 2) {
nv12[framesize + j] = nv21[j + framesize - 1];
}
}
}
停止预览录像
if(cameraToH264!=null){
cameraToH264.stop();
cameraToH264 = null;
}
if (mCameraCaptureSession != null) {
try {
mCameraCaptureSession.stopRepeating();
} catch (Exception e) {
e.printStackTrace();
}
mCameraCaptureSession.close();
}
if(mCameraDevice!=null){
mCameraDevice.close();
}
if(cameraManager!=null){
cameraManager = null;
}