上篇文章《AAC 音频编码保存和解码播放》 讲述了通过 AudioRecord 录制音频数据,并通过 AAC 编码保存为 AAC 文件。这里的 aac 既是一种编码方式,也是一种容器,因此可以直接播放。本篇文章讲讲述如何使用 Camera 采集音频音频数据,并通过 H264 编码保存为 .h264 文件,因为 .h264 不是标准的容器,所以不能直接播放,但是可以通过 ffmpeg 播放。
Camera 的预览需要使用 SurfaceView (TextureView 也可以),SurfaceView 和 Surface , SurfaceHolder 搭配使用,它们的关系如下:
Camera 的创建需要设定一些参数。
// cameraFacing 表示打开前置摄像头还是后置摄像头
Camera.open(cameraFacing);
// 设置预览输出的格式 , 这里是 NV21 所有的相机都支持, 是 YUV420 的一种
mParameters = camera.getParameters();
mParameters.setPreviewFormat(ImageFormat.NV21);
//设置预览的大小,Camera 预览的大小(分辨率)只支持内置的几种 getSupportedPreviewSizes
Camera.Size previewSize = getBestSize(DEFAULT_WIDTH, DEFAULT_HEIGHT, mParameters.getSupportedPreviewSizes());
mParameters.setPreviewSize(previewSize.width, previewSize.height);
//如果使用截图接口,还需要设置截图大小(分辨率)
Camera.Size pictureSize = getBestSize(DEFAULT_WIDTH, DEFAULT_HEIGHT, mParameters.getSupportedPictureSizes());
mParameters.setPictureSize(pictureSize.width, pictureSize.height);
//设置支持的聚焦模式
if (supportFocus(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
mParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
private boolean supportFocus(String focus) {
List focusModes = mCamera.getParameters().getSupportedFocusModes();
if (focusModes.contains(focus)) {
return true;
}
return false;
}
//设置帧率
int defminFps = 0;
int defmaxFps = 0;
List supportedPreviewFpsRange = mParametergetSupportedPreviewFpsRange();
for (int[] fps : supportedPreviewFpsRange) {
if (defminFps <= fps[PREVIEW_FPS_MIN_INDEX] && defmaxFps
mCamera.setPreviewCallback(this);
//回调的接口
@Override
public void onPreviewFrame(byte[] bytes, Camera camera) {
//...
}
由于 MediaCodec 硬编码的兼容性问题,需要判断是否有支持 “video/avc” 的编码器(avc 就是 H264 )
private MediaCodecInfo selectCodecInfo() {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(H264Encoder.VIDEO_MIME_TYPE)) {
return codecInfo;
}
}
}
return null;
}
创建媒体格式用于编码器的参数配置
//查询编码器支持的输入像素格式
private int selectColorFormat(MediaCodecInfo codecInfo) {
if (codecInfo == null) {
return -1;
}
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(H264Encoder.VIDEO_MIME_TYPE);
int[] colorFormats = capabilities.colorFormats;
for (int i = 0; i < colorFormats.length; i++) {
if (isRecognizedFormat(colorFormats[i])) {
return colorFormats[i];
}
}
return -1;
}
private boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for this test
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar://对应Camera预览格式I420(YV21/YUV420P)
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: //对应Camera预览格式NV12
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar://对应Camera预览格式NV21
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: {对应Camera预览格式YV12
return true;
}
default:
return false;
}
}
//配置 MediaFormat
mBitRate = (mWidth * mHeight * 3 / 2) * 8 * fps;
mMediaFormat = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, mHeight, mWidth);
mMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);/todo 没有这一行会报错 configureCodec returning error -38
mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
mMediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMATmColorFormat);
mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
Log.e("eee", mMediaCodecInfo.getName());
try {
mMediaCodec = MediaCodec.createByCodecName(mMediaCodecInfgetName());
} catch (IOException e) {
e.printStackTrace();
}
//创建 编码器
try {
mMediaCodec = MediaCodec.createByCodecName(mMediaCodecInfo.getName());
} catch (IOException e) {
e.printStackTrace();
}
mMediaCodec.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
//在 Camera 的预览回调接口中将数据放入队列
mCameraHelper.setPreviewCallback(new CameraHelper.PreviewCallback() {
@Override
public void onFrame(byte[] data) {
mH264Encoder.putFrameData(data);
}
});
public void putFrameData(byte[] data) {
if (data == null || !mIsEncoding) {
return;
}
try {
mQueue.put(data);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
//循环从 队列中取出数据进行编解码,
mExecutorService.execute(new Runnable() {
@Override
public void run() {
mIsEncoding = true;
mPresentationTimeUs = System.currentTimeMillis() * 1000;
mMediaCodec.start();
while (mIsEncoding) {
byte[] data = getFrameData();
if (data == null) {
continue;
}
encodeVideoData(data);
}
mMediaCodec.stop();
mMediaCodec.release();
IOUtil.close(mFileOutputStream);
IOUtil.close(mBufferedOutputStream);
}
由于 Camera 预览的数据默认是横屏的,还需要将数据旋转 90 度 (这和 setDisplayOrientation 无关,即即使设置了 setDisplayOrientation,预览的数据还是横屏的数据),除此之外,由于 Camera 设置的输出格式 和 MediaCodec 支持的输入格式可能不同,还需要进行进一步转换。
private byte[] transferFrameData(byte[] data, byte[] yuvBuffer, byte[] rotatedYuvBuffer) {
//Camera 传入的是 NV21
//转换成 MediaCodec 支持的格式
switch (mColorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar://对应Camera预览格式I420(YV21/YUV420P)
YUVEngine.Nv21ToI420(data, yuvBuffer, mWidth, mHeight);
YUVEngine.I420ClockWiseRotate90(yuvBuffer, mWidth, mHeight, rotatedYuvBuffer, mOutWidth, mOutHeight);
Log.i("transferFrameData", "COLOR_FormatYUV420Planar");
break;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: //对应Camera预览格式NV12
YUVEngine.Nv21ToNv12(data, yuvBuffer, mWidth, mHeight);
YUVEngine.Nv12ClockWiseRotate90(yuvBuffer, mWidth, mHeight, rotatedYuvBuffer, mOutWidth, mOutHeight);
Log.i("transferFrameData", "COLOR_FormatYUV420SemiPlanar");
break;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar://对应Camera预览格式NV21
System.arraycopy(data, 0, yuvBuffer, 0, mWidth * mHeight * 3 / 2);
YUVEngine.Nv21ClockWiseRotate90(yuvBuffer, mWidth, mHeight, rotatedYuvBuffer, mOutWidth, mOutHeight);
Log.i("transferFrameData", "COLOR_FormatYUV420PackedSemiPlanar");
break;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: 对应Camera预览格式YV12
YUVEngine.Nv21ToYV12(data, yuvBuffer, mWidth, mHeight);
YUVEngine.Yv12ClockWiseRotate90(yuvBuffer, mWidth, mHeight, rotatedYuvBuffer, mOutWidth, mOutHeight);
Log.i("transferFrameData", "COLOR_FormatYUV420PackedPlanar");
break;
}
return rotatedYuvBuffer;
}
YUVEngine 是一个对 YUV 数据操作的封装类。
//编码的流程可以简单概括如下:
dequeueInputBuffer// 获取可用的输入缓存区 buffer 的下标 inputIndex
getInputBuffers// 根据 inputIndex 获取可用的输入缓冲区 bytebuffer
bytebuffer.put // 放入数据
queueInputBuffer // 将数据放入输入缓冲区
dequeueOutputBuffer // 获取可用的输出缓存区 buffer 的下标 outputIndex
getOutPutBuffers // 根据 outputIndex 获取可用的输出缓冲区 bytebuffer
outputBuffer.get() // 获取数据
releaseOutputBuffer // 处理完成,释放 buffer
//其中还有一个参数 pts,Presentation Time Stamp , 用于表示一帧的显示时间,我们知道 PTS 是告诉播放器播放一帧的时间,而 DTS 是解码时间,因此在进行编码的时候就应该传入 PTS 用于解码后的播放。除此之外,一些设备如果没有设置合理的值,那么在编码的时候就会采取丢弃帧和低质量编码的方式。
ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
int inputIndex = mMediaCodec.dequeueInputBuffer(10_000);
if (inputIndex >= 0) {
ByteBuffer byteBuffer = inputBuffers[inputIndex];
byteBuffer.clear();
byteBuffer.put(mRotatedYUVBuffer);
long pts = System.currentTimeMillis() * 1000 - mPresentationTimeUs;
mMediaCodec.queueInputBuffer(inputIndex, 0, mRotatedYUVBuffer.length, pts, 0);
}
ByteBuffer[] outputBuffers = mMediaCodec.getOutputBuffers();
int outputIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 10_000);
if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mMediaCodec.getOutputBuffers();
}
while (outputIndex >= 0) {
ByteBuffer byteBuffer = outputBuffers[outputIndex];
byte[] buffer = new byte[mBufferInfo.size];
byteBuffer.get(buffer);
//写入 .h264 文件
try {
mBufferedOutputStream.write(buffer);
} catch (IOException e) {
e.printStackTrace();
}
mMediaCodec.releaseOutputBuffer(outputIndex, false);
outputIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 10_000);
}
最后生成的 .h264 文件不能直接在播放器播放,但是可以通过 ffplay 播放
ffplay media_codec_video.h264
github demo