MediaCodec可以实现硬编码(相比FFmpeg来说,效率很高)。
可供参考的博客非常多,只是记录一下自己参考后改写的东西,难免会忘记。
(1)基于缓存(ByteBuffer)的同步编码
(2)基于缓存(ByteBuffer)的异步编码
(3)基于缓存数组的同步编码(废弃,可能效率没前面两种高吧)
很典型的代码例程。
public class AvcEncoderOnSynchronous {
private MediaCodec mediaCodec;
private int m_width;
private int m_height;
private byte[] configByte = null;
private BufferedOutputStream bos;
private long generateIndex = 0;
@SuppressLint("NewApi")
public AvcEncoderOnSynchronous(int width, int height, int frameRate, int bitRate, String outPath) throws IOException {
m_width = width;
m_height = height;
this.bos = new BufferedOutputStream(new FileOutputStream(new File(outPath), false));
mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
// 关键帧间隔时间 单位s,设置为0,则没一个都是关键帧
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mediaFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileHigh);
mediaFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel51);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
}
@SuppressLint("NewApi")
public void close() {
try {
mediaCodec.stop();
mediaCodec.release();
bos.flush();
bos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* @param input yuv420p格式的内容
*/
@SuppressLint("NewApi")
public void offerEncoder(byte[] input) {
try {
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufferIndex);
if (inputBuffer != null) {
inputBuffer.clear();
byte[] data_420sp = new byte[input.length];
yuv420pTo420sp(input, data_420sp, m_width, m_height);
input = data_420sp;
inputBuffer.put(input);
System.out.println("输入" + generateIndex + "帧");
}
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, computePresentationTime(generateIndex++), 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 12000);
while (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outputBufferIndex);
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
configByte = new byte[bufferInfo.size];
configByte = outData;
} else if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
byte[] keyframe = new byte[bufferInfo.size + configByte.length];
System.arraycopy(configByte, 0, keyframe, 0, configByte.length);
System.arraycopy(outData, 0, keyframe, configByte.length, outData.length);
bos.write(keyframe, 0, keyframe.length);
} else {
bos.write(outData, 0, outData.length);
}
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 12000);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
private void yuv420pTo420sp(byte[] yuv420p, byte[] yuv420sp, int width, int height) {
if (yuv420p == null || yuv420sp == null) return;
int frameSize = width * height;
int j;
System.arraycopy(yuv420p, 0, yuv420sp, 0, frameSize);
for (j = 0; j < frameSize / 4; j++) {
// u
yuv420sp[frameSize + 2 * j] = yuv420p[j + frameSize];
// v
yuv420sp[frameSize + 2 * j + 1] = yuv420p[(int) (j + frameSize * 1.25)];
}
}
/**
* Generates the presentation time for frame N, in microseconds.
*/
private long computePresentationTime(long frameIndex) {
return 132 + frameIndex * 1000000 / 30;
}
}
注:
(1)各个机器支持的KEY_COLOR_FORMAT
不同,但5.0系统后都支持COLOR_FormatYUV420Flexible
,这个格式就是YUV,Y是width x height
大小,U和V都是步长为2
进行放置(放一个,跳一个),长度均为width x height / 4
。
(2)放入的Byte目前两个机器(SM Tab S3,HW M5)都是需要放入420sp的数据,猜测Camera1的API出来的数据是sp数据,可能android原生的数据都是sp的吧,所以硬编码也需要sp数据。(待测试,不转格式会颜色异常)
(3)关于queueInputBuffer
方法的第四个参数,只要后一个比前一个大就可以,但是参考了别人的博客,有了computePresentationTime
方法。
(4)dequeueInputBuffer
方法的参数,是超时时间,负数就是阻塞式等待。
(5)编码后的数据,有一个BUFFER_FLAG_CODEC_CONFIG
信息,只会获取一次,需要保存下来,每一个关键帧(BUFFER_FLAG_KEY_FRAME
)都需要附加上该编码头信息以便解码时使用。
模式和同步的一样,只是加载回调函数里,效率可能更高。
public class AvcEncoderOnAsynchronous {
private MediaCodec mediaCodec;
private int m_width;
private int m_height;
private int frameSize;
private byte[] configByte = null;
private byte[] yuv420;
private BufferedOutputStream bos;
private AtomicInteger index = new AtomicInteger(0);
private RandomAccessFile randomAccessFile;
private int totalFrameNum;
@SuppressLint("NewApi")
public AvcEncoderOnAsynchronous(String inPath, final int width, final int height, int frameRate, int bitRate, String outPath) throws IOException {
m_width = width;
m_height = height;
frameSize = width * height * 3 / 2;
yuv420 = new byte[frameSize];
this.bos = new BufferedOutputStream(new FileOutputStream(new File(outPath), false));
mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); //关键帧间隔时间 单位s
mediaFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileHigh);
mediaFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel51);
File file = new File(inPath);
randomAccessFile = new RandomAccessFile(file, "r");
totalFrameNum = (int) (randomAccessFile.length() / frameSize);
mediaCodec.setCallback(new MediaCodec.Callback() {
@Override
public void onInputBufferAvailable(@NonNull MediaCodec codec, int inputBufferId) {
if (index.get() == totalFrameNum) {
return;
}
ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
try {
randomAccessFile.seek((long) index.get() * frameSize);
randomAccessFile.read(yuv420, 0, yuv420.length);
} catch (IOException e) {
e.printStackTrace();
}
// 420p转420sp
byte[] data_420sp = new byte[yuv420.length];
yuv420pTo420sp(yuv420, data_420sp, m_width, m_height);
yuv420 = data_420sp;
inputBuffer.put(yuv420);
System.out.println(Thread.currentThread().getId() + ":输入" + index.get() + "帧");
mediaCodec.queueInputBuffer(inputBufferId, 0, yuv420.length, computePresentationTime(index.getAndIncrement()), 0);
}
@Override
public void onOutputBufferAvailable(@NonNull MediaCodec codec, int outputBufferId, @NonNull MediaCodec.BufferInfo bufferInfo) {
ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
configByte = new byte[bufferInfo.size];
configByte = outData;
} else if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
byte[] keyframe = new byte[bufferInfo.size + configByte.length];
System.arraycopy(configByte, 0, keyframe, 0, configByte.length);
System.arraycopy(outData, 0, keyframe, configByte.length, outData.length);
try {
bos.write(keyframe, 0, keyframe.length);
} catch (IOException e) {
e.printStackTrace();
}
} else {
try {
bos.write(outData, 0, outData.length);
} catch (IOException e) {
e.printStackTrace();
}
}
mediaCodec.releaseOutputBuffer(outputBufferId, false);
}
@Override
public void onError(@NonNull MediaCodec codec, @NonNull MediaCodec.CodecException e) {
System.out.println(e.toString());
}
@Override
public void onOutputFormatChanged(@NonNull MediaCodec codec, @NonNull MediaFormat format) {
System.out.println(format);
}
});
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
}
@SuppressLint("NewApi")
public void close() {
try {
mediaCodec.stop();
mediaCodec.release();
bos.flush();
bos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
public void start() {
mediaCodec.start();
while (index.get() != totalFrameNum) {
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
close();
}
private void yuv420pTo420sp(byte[] yuv420p, byte[] yuv420sp, int width, int height) {
if (yuv420p == null || yuv420sp == null) return;
int frameSize = width * height;
int j;
// Y
System.arraycopy(yuv420p, 0, yuv420sp, 0, frameSize);
for (j = 0; j < frameSize / 4; j++) {
// u
yuv420sp[frameSize + 2 * j] = yuv420p[j + frameSize];
// v
yuv420sp[frameSize + 2 * j + 1] = yuv420p[(int) (j + frameSize * 1.25)];
}
}
/**
* Generates the presentation time for frame N, in microseconds.
*/
private long computePresentationTime(long frameIndex) {
return 132 + frameIndex * 1000000 / 30;
}
}
@Test
public void Test1() {
try {
long index = 0;
int width = 2160;
int height = 2880;
int frameSize = (int) (width * height * 1.5);
byte[] inData = new byte[frameSize];
File file = new File(Environment.getExternalStorageDirectory().getAbsoluteFile() + "/shen.data/test.yuv");
RandomAccessFile randomAccessFile = new RandomAccessFile(file, "r");
int total = Math.round(randomAccessFile.length() / frameSize);
AvcEncoderOnSynchronous avcEncoderOnSynchronous = new AvcEncoderOnSynchronous(2160, 2880, 30, 2160 * 2880 * 5, Environment.getExternalStorageDirectory().getAbsolutePath() + "/shen.data/test.h264");
while (index < total) {
randomAccessFile.seek(index++ * frameSize);
randomAccessFile.read(inData, 0, inData.length);
avcEncoderOnSynchronous.offerEncoder(inData);
}
} catch (IOException e) {
e.printStackTrace();
}
}
@Test
public void Test2() {
try {
AvcEncoderOnAsynchronous avcEncoderOnAsynchronous = new AvcEncoderOnAsynchronous(Environment.getExternalStorageDirectory().getAbsoluteFile() + "/shen.data/test.yuv",
1952, 2592, 30, 2592 * 1952 * 10, Environment.getExternalStorageDirectory().getAbsolutePath() + "/shen.data/test.h264");
avcEncoderOnAsynchronous.start();
} catch (IOException e) {
e.printStackTrace();
}
}
处理函数变成了回调,当输入可用时,调用输入函数;输出可用就调用输出函数。
节省了一部分同步等待的时间。
源码地址:https://github.com/shen511460468/MediaCodecDemo