本篇文章记录一下,android调用mediacodec编码camera回掉的YUV数据为h264的方法。
由于公司需要,软编码(X264)由于手机性能的瓶颈,已不能满足要求,所以决定使用硬编码。其实硬编码最早用过MediaRecord,但是不能直接得到h264数据,得先编成MP4,再从MP4里把H264的NALU取出来,感觉太绕了,所以当时抛弃了MediaRecord,选择了x264。不过看来,现在还得走上硬编码的路了 -- MediaCodec
这篇文章就用一个demo来说一下mediacodec的调用吧。
首先,要获取到CAMERA的回掉回来的YUV数据。
其次,将获得到的数据用MEDIACODEC编码为H264。
最后,将H264写入文件,程序结束后,可用VLC等支持播放H264的播放器查看效果。
先说下获取YUV数据吧,这个很简单了,直接上代码
package com.example.mediacodecencode;
import java.io.IOException;
import java.util.ArrayList;
import java.util.concurrent.ArrayBlockingQueue;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.Activity;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MainActivity extends Activity implements SurfaceHolder.Callback,PreviewCallback{
private SurfaceView surfaceview;
private SurfaceHolder surfaceHolder;
private Camera camera;
private Parameters parameters;
int width = 1280;
int height = 720;
int framerate = 30;
int biterate = 8500*1000;
private static int yuvqueuesize = 10;
public static ArrayBlockingQueue YUVQueue = new ArrayBlockingQueue(yuvqueuesize);
private AvcEncoder avcCodec;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
surfaceview = (SurfaceView)findViewById(R.id.surfaceview);
surfaceHolder = surfaceview.getHolder();
surfaceHolder.addCallback(this);
SupportAvcCodec();
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
camera = getBackCamera();
startcamera(camera);
avcCodec = new AvcEncoder(width,height,framerate,biterate);
avcCodec.StartEncoderThread();
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (null != camera) {
camera.setPreviewCallback(null);
camera.stopPreview();
camera.release();
camera = null;
avcCodec.StopThread();
}
}
@Override
public void onPreviewFrame(byte[] data, android.hardware.Camera camera) {
// TODO Auto-generated method stub
putYUVData(data,data.length);
}
public void putYUVData(byte[] buffer, int length) {
if (YUVQueue.size() >= 10) {
YUVQueue.poll();
}
YUVQueue.add(buffer);
}
@SuppressLint("NewApi")
private boolean SupportAvcCodec(){
if(Build.VERSION.SDK_INT>=18){
for(int j = MediaCodecList.getCodecCount() - 1; j >= 0; j--){
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(j);
String[] types = codecInfo.getSupportedTypes();
for (int i = 0; i < types.length; i++) {
if (types[i].equalsIgnoreCase("video/avc")) {
return true;
}
}
}
}
return false;
}
private void startcamera(Camera mCamera){
if(mCamera != null){
try {
mCamera.setPreviewCallback(this);
mCamera.setDisplayOrientation(90);
if(parameters == null){
parameters = mCamera.getParameters();
}
parameters = mCamera.getParameters();
parameters.setPreviewFormat(ImageFormat.NV21);
parameters.setPreviewSize(width, height);
mCamera.setParameters(parameters);
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
}
@TargetApi(9)
private Camera getBackCamera() {
Camera c = null;
try {
c = Camera.open(0); // attempt to get a Camera instance
} catch (Exception e) {
e.printStackTrace();
}
return c; // returns null if camera is unavailable
}
}
@SuppressLint("NewApi")
public AvcEncoder(int width, int height, int framerate, int bitrate) {
m_width = width;
m_height = height;
m_framerate = framerate;
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
try {
mediaCodec = MediaCodec.createEncoderByType("video/avc");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
createfile();
}
private void NV21ToNV12(byte[] nv21,byte[] nv12,int width,int height){
if(nv21 == null || nv12 == null)return;
int framesize = width*height;
int i = 0,j = 0;
System.arraycopy(nv21, 0, nv12, 0, framesize);
for(i = 0; i < framesize; i++){
nv12[i] = nv21[i];
}
for (j = 0; j < framesize/2; j+=2)
{
nv12[framesize + j-1] = nv21[j+framesize];
}
for (j = 0; j < framesize/2; j+=2)
{
nv12[framesize + j] = nv21[j+framesize-1];
}
}
public void StartEncoderThread(){
Thread EncoderThread = new Thread(new Runnable() {
@SuppressLint("NewApi")
@Override
public void run() {
isRuning = true;
byte[] input = null;
long pts = 0;
long generateIndex = 0;
while (isRuning) {
if (MainActivity.YUVQueue.size() >0){
input = MainActivity.YUVQueue.poll();
byte[] yuv420sp = new byte[m_width*m_height*3/2];
NV21ToNV12(input,yuv420sp,m_width,m_height);
input = yuv420sp;
}
if (input != null) {
try {
long startMs = System.currentTimeMillis();
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
pts = computePresentationTime(generateIndex);
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
generateIndex += 1;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
while (outputBufferIndex >= 0) {
//Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+"");
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
if(bufferInfo.flags == 2){
configbyte = new byte[bufferInfo.size];
configbyte = outData;
}else if(bufferInfo.flags == 1){
byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);
outputStream.write(keyframe, 0, keyframe.length);
}else{
outputStream.write(outData, 0, outData.length);
}
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
}
} catch (Throwable t) {
t.printStackTrace();
}
} else {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
});
EncoderThread.start();
}
/**
* Generates the presentation time for frame N, in microseconds.
*/
private long computePresentationTime(long frameIndex) {
return 132 + frameIndex * 1000000 / m_framerate;
}