最近在使用研究录屏投屏功能的实现,用到了MediaCodec编解码,之前有做过安卓摄像头实时采集与传输相关的研究。想实现用MediaCodec 编解码实时传播的功能,先做了一个通过摄像头采集视频MediaCodec实时编解码的DEMO。后续有空再完成摄像头采集视频实时编解码并传播功能的DEMO。
下面是MediaCodec实时编解码核心代码:
编码:
package com.qiandu.cameracodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Build;
import android.util.Log;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Queue;
public class SvcEncoder {
private final int SPACIAL_LAYER_CAPACITY = 4;
private int mMaxSpacialLayerIndex = 0;
private AvcEncoder[] mAvcEncoders = null;
private SvcEncodeSpacialParam[] mSvcEncodeParams = null;
private final String MIME_TYPE = "video/avc";
private int mPrimeColorFormat = 0; //
public static final String KEY_COLORFORMAT = "key_colorformat";
public static final String KEY_WIDTH = "key_width";
public static final String KEY_HEIGHT = "key_height";
Queue[] mRawDataQueue = null;
private int mPeriodIDR = 60; /*seconds*/
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
Log.d("AvcEncoder", "selectCodec OK, get "+mimeType);
return codecInfo;
}
}
}
return null;
}
public int Init()
{
Log.i("SvcEnc", "Init ++");
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(MIME_TYPE);
for (int i = 0; i < capabilities.colorFormats.length && mPrimeColorFormat == 0; i++) {
int format = capabilities.colorFormats[i];
switch (format) {
//primary color formats
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: /*I420 --- YUV4:2:0 --- Nvidia Tegra 3, Samsu */
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: /*yv12 --- YVU4:2:0 --- ?*/
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: /*NV12 --- Qualcomm Adreno330/320*/
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: /*NV21 --- TI OMAP */
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
mPrimeColorFormat = format;
Log.d("AvcEncoder", "get supportted color format " + format);
break;
default:
Log.d("AvcEncoder", "Skipping unsupported color format " + format);
break;
}
}
mAvcEncoders = new AvcEncoder[SPACIAL_LAYER_CAPACITY];
mSvcEncodeParams = new SvcEncodeSpacialParam[SPACIAL_LAYER_CAPACITY];
mRawDataQueue = (LinkedList[])new LinkedList>[SPACIAL_LAYER_CAPACITY];
for (int i=0;i();
}
Log.i("SvcEnc", "Init --");
return 0;
}
public int Uninit()
{
Log.i("SvcEnc", "Uninit ++");
if (mAvcEncoders == null)
{
return 1;
}
UninitAvcEncoders();
mSvcEncodeParams = null;
mRawDataQueue = null;
//mMaxSpacialLayerIndex = 0;
Log.i("SvcEnc", "Uninit --");
return 0;
}
private void UninitAvcEncoders()
{
for(int i=0;i SPACIAL_LAYER_CAPACITY)
{
return 1;
}
for (int i=0;i= SPACIAL_LAYER_CAPACITY)
{
return 1;
}
if (mSvcEncodeParams[spacial_idx] != null)
{
if (mAvcEncoders[spacial_idx] != null)
{
mAvcEncoders[spacial_idx].SetBitrateOnFly(bps);
}
}
return 0;
}
public int RequestKeyFrameSoon()
{
Log.i("SvcEnc", "RequestKeyFrameSoon");
if (mAvcEncoders == null)
{
return 1;
}
if (Build.VERSION.SDK_INT < 19)
{
Stop();
//hard code, for compatible
if (Build.VERSION.SDK_INT <= 17) //for "CP-DX80"
{
UninitAvcEncoders();
Configure(mSvcEncodeParams, mPeriodIDR);
}
Start();
}
else {
for(int i=0;i len)
{
res = AvcUtils.R_INVALIDATE_BUFFER_SIZE;
return res;
}
int src_width = 0;
int src_height = 0;
byte[] src_yuv = bytes;
byte[] dst_yuv = bytes;
for (int i=mMaxSpacialLayerIndex;i>=0;i--)
{
if (mSvcEncodeParams[i] != null)
{
if (mAvcEncoders[i] != null)
{
src_yuv = dst_yuv;
dst_yuv = null;
src_width = dst_width[0];
src_height = dst_height[0];
mAvcEncoders[i].queryInt(AvcEncoder.KEY_WIDTH, dst_width);
mAvcEncoders[i].queryInt(AvcEncoder.KEY_HEIGHT, dst_height);
dst_yuv = RawUtils.YuvDownsample(mPrimeColorFormat, src_yuv, src_width, src_height, dst_width[0], dst_height[0]);
if (dst_yuv != null)
{
int blen = (int) (RawUtils.BytesPerPixel(mPrimeColorFormat) * dst_width[0] * dst_height[0]);
if (mRawDataQueue[i] != null)
{
//For pro
VideoBufferInfo info = new VideoBufferInfo();
info.buffer = dst_yuv;
info.size = blen;
info.timestamp = timestamp;
info.flag = flag;
mRawDataQueue[i].add(info);
Iterator ite = mRawDataQueue[i].iterator();
while (ite.hasNext())
{
VideoBufferInfo infoo = ite.next();
res = mAvcEncoders[i].InputRawBuffer(dst_yuv, blen, timestamp, flag);
if (res != AvcUtils.R_BUFFER_OK)
{
break;
}
infoo.buffer = null;
infoo = null;
ite.remove();
}
}
else
{
res = mAvcEncoders[i].InputRawBuffer(dst_yuv, blen, timestamp, flag);
}
} //if (dst_yuv != null)
} //if (mAvcEncoders[i] != null)
} //if (mSvcEncodeParams[i] != null)
} //for()
dst_yuv = null;
return res;
}
public int OutputAvcBuffer(/*in*/byte[] bytes, /*in, out*/int[] len, /*out*/SvcEncodeOutputParam output)
{
int res = AvcUtils.R_UNKNOWN;
//rookie stage now
if (mAvcEncoders == null)
{
return AvcUtils.R_UNKNOWN;
}
int layeridx = mMaxSpacialLayerIndex;
for (;layeridx>=0;layeridx--)
{
if (mSvcEncodeParams[layeridx] != null)
{
if (mAvcEncoders[layeridx] != null)
{
long[] ts = new long[1];
int[] flag = new int[1];
res = mAvcEncoders[layeridx].OutputAvcBuffer(bytes, len, ts, flag);
if (AvcUtils.R_BUFFER_OK == res)
{
int[] width = new int[1];
int[] height = new int[1];
mAvcEncoders[layeridx].queryInt(AvcEncoder.KEY_WIDTH, width);
mAvcEncoders[layeridx].queryInt(AvcEncoder.KEY_HEIGHT, height);
output.timestamp = ts[0];
output.layernumber = mMaxSpacialLayerIndex + 1;
output.layerindex = layeridx;
output.layerwidth = width[0];
output.layerheight = height[0];
/*
* .....
*
*
* */
break;
}
}
}
}
return res;
}
}
解码:
package com.qiandu.cameracodec;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
public class AvcDecoder
{
public static final int STATUS_INVALID = 0;
public static final int STATUS_LOADED = 1;
public static final int STATUS_IDLE = 2;
public static final int STATUS_EXEC = 3;
public static final int STATUS_WAIT = 5;
private MediaCodec mMC = null;
private String MIME_TYPE = "video/avc";
private MediaFormat mMF = null;
private ByteBuffer[] mInputBuffers = null;
private ByteBuffer[] mOutputBuffers = null;
private BufferInfo mBI = null;
private int mStatus = STATUS_INVALID;
private final int BUFFER_TIMEOUT = 0; //microseconds
private final int RENDER_DELAY_VALUE = 1;
private int mDecodeRenderDelayCount = 0;
public int Init()
{
Log.i("AvcDecoder", "Init");
try {
mMC = MediaCodec.createDecoderByType(MIME_TYPE);
mStatus = STATUS_LOADED;
mBI = new BufferInfo();
Log.i("AvcDecoder", "Init, createDecoderByType");
} catch (IOException e) {
e.printStackTrace();
}
return 0;
}
public int tryConfig(Surface surface, byte[] sps, byte[] pps)
{
int[] width = new int[1];
int[] height = new int[1];
AvcUtils.parseSPS(sps, width, height);
mMF = MediaFormat.createVideoFormat(MIME_TYPE, width[0], height[0]);
mMF.setByteBuffer("csd-0", ByteBuffer.wrap(sps));
mMF.setByteBuffer("csd-1", ByteBuffer.wrap(pps));
mMF.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, width[0] * height[0]);
mMC.configure(mMF, surface, null, 0);
Log.i("AvcDecoder", "Init, configure");
mStatus = STATUS_IDLE;
return 0;
}
public void Uninit()
{
Log.i("AvcDecoder", "Uninit");
stop();
mMC.release();
mMC = null;
mBI = null;
}
public void start()
{
Log.i("AvcDecoder", "start");
if (mStatus == STATUS_EXEC)
{
Log.d("AvcDecoder", "wrong status:"+mStatus);
return;
}
if (mMC != null)
{
mDecodeRenderDelayCount = RENDER_DELAY_VALUE;
mMC.start();
mInputBuffers = mMC.getInputBuffers();
mOutputBuffers = mMC.getOutputBuffers();
mStatus = STATUS_EXEC;
}
}
public void stop()
{
Log.i("AvcDecoder", "stop");
if (mStatus != STATUS_EXEC)
{
Log.d("AvcDecoder", "wrong status:"+mStatus);
return;
}
if (mMC != null)
{
//mMC.flush();
mMC.stop();
mStatus = STATUS_IDLE;
}
}
public void flush()
{
Log.i("AvcDecoder", "flush");
if (mStatus != STATUS_EXEC)
{
Log.d("AvcDecoder", "wrong status:"+mStatus);
return;
}
if (mMC != null)
{
mMC.flush();
}
}
public int InputAvcBuffer(/*in*/byte[] bytes, /*in*/int len, /*in*/long timestamp, /*in*/int flag)
{
//Log.i("AvcDecoder", "InputAvcBuffer ++");
if (mStatus != STATUS_EXEC)
{
//Log.d("AvcDecoder", "wrong status:"+mStatus);
return AvcUtils.R_TRY_AGAIN_LATER;
}
int inputbufferindex = 0;
try {
inputbufferindex = mMC.dequeueInputBuffer(BUFFER_TIMEOUT);
}
catch (IllegalStateException ex)
{
Log.e("AvcDecoder", "dequeueInputBuffer throw IllegalStateException");
return AvcUtils.R_INVALID_STATE;
}
if (inputbufferindex >= 0)
{
ByteBuffer inputBuffer = mInputBuffers[inputbufferindex];
inputBuffer.clear();
int capacity = inputBuffer.capacity();
if (capacity < len)
{
mMC.queueInputBuffer(inputbufferindex, 0, 0, timestamp, flag);
Log.e("AvcDecoder", "capacity="+capacity+",len="+len);
return AvcUtils.R_INVALIDATE_BUFFER_SIZE;
}
inputBuffer.put(bytes, 0, len);
mMC.queueInputBuffer(inputbufferindex, 0, len, timestamp, flag);
//Log.i("AvcDecoder", "InputAvcBuffer -- OK, capacity="+capacity);
}
else if (inputbufferindex == MediaCodec.INFO_TRY_AGAIN_LATER)
{
//Log.i("AvcDecoder", "InputAvcBuffer -- INFO_TRY_AGAIN_LATER");
// try {
// Thread.sleep(1);
// } catch (InterruptedException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
return AvcUtils.R_TRY_AGAIN_LATER;
}
else
{
return AvcUtils.R_UNKNOWN;
}
return AvcUtils.R_BUFFER_OK;
}
//usage: int[] len = new int[1];
//long[] ts = new long[1];
public int OutputRawBuffer(/*out*/byte[] bytes, /*in, out*/int[] len, /*out*/long[] timestamp)
{
//Log.i("AvcDecoder", "OutputRawBuffer ++");
if (mStatus != STATUS_EXEC)
{
//Log.d("AvcDecoder", "wrong status:"+mStatus);
return AvcUtils.R_TRY_AGAIN_LATER;
}
int outputbufferindex = 0;
try {
outputbufferindex = mMC.dequeueOutputBuffer(mBI, BUFFER_TIMEOUT);
}
catch (IllegalStateException ex)
{
Log.e("AvcDecoder", "dequeueOutputBuffer throw IllegalStateException");
return AvcUtils.R_INVALID_STATE;
}
if (outputbufferindex >= 0)
{
timestamp[0] = mBI.presentationTimeUs;
if (mDecodeRenderDelayCount == 0)
{
mMC.releaseOutputBuffer(outputbufferindex, true);
}
else
{
mDecodeRenderDelayCount --;
mMC.releaseOutputBuffer(outputbufferindex, false);
}
// if (mOutputBuffers[outputbufferindex] != null)
// {
// mOutputBuffers[outputbufferindex].position(mBI.offset);
// mOutputBuffers[outputbufferindex].limit(mBI.offset + mBI.size);
//
// if (bytes != null)
// mOutputBuffers[outputbufferindex].get(bytes, 0, mBI.size);
// len[0] = mBI.size;
// timestamp[0] = mBI.presentationTimeUs;
//
// if (mDecodeRenderDelayCount == 0)
// {
// mMC.releaseOutputBuffer(outputbufferindex, true);
// }
// else
// {
// mDecodeRenderDelayCount --;
// mMC.releaseOutputBuffer(outputbufferindex, false);
// }
// }
// else
// {
// mMC.releaseOutputBuffer(outputbufferindex, false);
// }
//Log.i("AvcDecoder", "OutputRawBuffer -- OK at "+ outputbufferindex+", size="+len[0]);
}
else if (outputbufferindex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
{
mOutputBuffers = mMC.getOutputBuffers();
Log.i("AvcDecoder", "OutputRawBuffer -- INFO_OUTPUT_BUFFERS_CHANGED");
return AvcUtils.R_OUTPUT_UPDATE;
}
else if (outputbufferindex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)
{
mMF = mMC.getOutputFormat();
//int new_width = mMF.getInteger(MediaFormat.KEY_WIDTH);
//int new_height = mMF.getInteger(MediaFormat.KEY_HEIGHT);
//int new_bps = mMF.getInteger(MediaFormat.KEY_BIT_RATE);
//int new_cf = mMF.getInteger(MediaFormat.KEY_COLOR_FORMAT);
//int new_fps = mMF.getInteger(MediaFormat.KEY_FRAME_RATE);
Log.i("AvcDecoder", "OutputRawBuffer -- INFO_OUTPUT_FORMAT_CHANGED");
//Log.i("AvcDecoder", "OutputRawBuffer -- INFO_OUTPUT_FORMAT_CHANGED: "+new_width+"x"+new_height+"@"+new_fps+/*",in "+new_bps+"bps"+*/", cf="+new_cf);
return AvcUtils.R_OUTPUT_UPDATE;
}
else if (outputbufferindex == MediaCodec.INFO_TRY_AGAIN_LATER)
{
//Log.i("AvcDecoder", "OutputRawBuffer -- INFO_TRY_AGAIN_LATER");
// try {
// Thread.sleep(10);
// } catch (InterruptedException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
return AvcUtils.R_TRY_AGAIN_LATER;
}
else
{
return AvcUtils.R_UNKNOWN;
}
return AvcUtils.R_BUFFER_OK;
}
}
MainActivity:
package com.qiandu.cameracodec;
import android.app.Activity;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback,Camera.PreviewCallback, Camera.ErrorCallback{
public static final String TAG = MainActivity.class.getSimpleName();
Camera mCam;
private SurfaceView mSurfacePreview;
private SurfaceView mSurfaceDecoder;
int mSelectedFacing = 0;
int mSelectColorFormat = 17;
int mSelectWidth = 320;
int mSelectHeight = 240;
int mSelectFPS = 25;
private Handler m_CodecMsgHandler;
private final int PREVIEW_POOL_CAPACITY = 5;
final int EVENT_SCREEN_ROTATE_CAMERA_REFRESH = 1;
final int EVENT_CALC_ENCODE_FPS = 2;
final int EVENT_READ_RAW_FILE = 3;
private final int DECODE_UNI_SIZE = 512*1024; //512k for 1080p, it is enough
long mPreviewBufferSize = 0;
long mDelay_lastPreviewTick = 0;
long mDelay_lastEncodeTick = 0;
long mDelay_lastDecodeTick = 0;
int mCurAvcDecoderWidth = 0;
int mCurAvcDecoderHeight = 0;
int mFrameCountIntoEncoder = 0;
int mFrameCountOutofEncoder = 0;
int mFrameCountIntoDecoder = 0;
int mFrameCountOutofDecoder = 0;
private final int EVENT_GET_ENCODE_OUTPUT = 1;
private final int EVENT_REQUEST_KEY_FRAME = 2;
private final int EVENT_GO_DECODE = 3;
Queue mPreviewBuffers_clean = null;
Queue mPreviewBuffers_dirty = null;
Queue mDecodeBuffers_clean = null;
Queue mDecodeBuffers_dirty = null;
List mListPreviewFormats_front;
List mListPreviewFps_front;
List mListPreviewSizes_front; //0-w, 1-h
List mListPreviewFormats_back;
List mListPreviewFps_back;
List mListPreviewSizes_back; //0-w, 1-h
List mList_string_facing; //temp
int mPeriodKeyFrame = -1; //ms
private int mRawHeight;
private int mRawWidth;
private final static Object mAvcEncLock = new Object();
private final static Object mAvcDecLock = new Object();
SvcEncoder mSvcEnc = null;
AvcDecoder mAvcDec = null;
private byte[] mAvcBuf = null;
CodecThread m_codec_thread;
int mETFps = 30;
int mEncode_bps = 600 * 1000;
long mAvcEncodeFirstOutputGap_starttick = 0;
long mPreviewGap_starttick = 0;
byte[] mRawData = null;
private Spinner mSpnColorFormat;
private Spinner mSpnSize;
private Spinner mSpnFacing;
private Spinner mSpnFPS;
private boolean mTestingFPS;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main2);
initView();
initData();
initparam();
}
private void initView(){
mSurfacePreview = (SurfaceView)findViewById(R.id.surfaceView_preview);
mSpnColorFormat = (Spinner)findViewById(R.id.Spinner_cf);
mSpnSize = (Spinner)findViewById(R.id.Spinner_size);
mSpnFacing = (Spinner)findViewById(R.id.Spinner_facing);
mSpnFPS = (Spinner)findViewById(R.id.Spinner_fps);
SurfaceHolder sh = mSurfacePreview.getHolder();
sh.addCallback(this);
mSurfaceDecoder = (SurfaceView)findViewById(R.id.surfaceView_decode);
mSurfaceDecoder.getHolder().addCallback(this);
int rot = getWindowManager().getDefaultDisplay().getRotation();
DisplayMetrics dm = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(dm);
if (rot == Surface.ROTATION_0 || rot == Surface.ROTATION_180)
{
mSpnColorFormat.setMinimumWidth(dm.widthPixels/4);
mSpnSize.setMinimumWidth(dm.widthPixels/4);
mSpnFacing.setMinimumWidth(dm.widthPixels/4);
mSpnFPS.setMinimumWidth(dm.widthPixels/4);
}
else
{
mSpnColorFormat.setMinimumWidth(dm.heightPixels/4);
mSpnSize.setMinimumWidth(dm.heightPixels/4);
mSpnFacing.setMinimumWidth(dm.heightPixels/4);
mSpnFPS.setMinimumWidth(dm.heightPixels/4);
}
}
private void initData(){
mPreviewBuffers_clean = new LinkedList();
mPreviewBuffers_dirty = new LinkedList();
mDecodeBuffers_clean = new LinkedList();
mDecodeBuffers_dirty = new LinkedList();
mListPreviewFormats_front = new ArrayList();
mListPreviewFps_front = new ArrayList();
mListPreviewSizes_front = new ArrayList();
mListPreviewFormats_back = new ArrayList();
mListPreviewFps_back = new ArrayList();
mListPreviewSizes_back = new ArrayList();
mSvcEnc = new SvcEncoder();
mSvcEnc.Init();
mAvcBuf = new byte[AvcEncoder.DEFAULT_AVC_BUF_SIZE];
m_codec_thread = new CodecThread();
m_codec_thread.start();
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
startEncoder();
if (mEventHandler != null)
mEventHandler.sendEmptyMessage(EVENT_CALC_ENCODE_FPS);
mSpnColorFormat.setOnItemSelectedListener(new SpinnerSelectedListener());
mSpnSize.setOnItemSelectedListener(new SpinnerSelectedListener());
mSpnFacing.setOnItemSelectedListener(new SpinnerSelectedListener());
mSpnFPS.setOnItemSelectedListener(new SpinnerSelectedListener());
}
public void startEncoder(){
if (mDecodeBuffers_clean == null)
mDecodeBuffers_clean = new LinkedList();
if (mDecodeBuffers_dirty == null)
mDecodeBuffers_dirty = new LinkedList();
for(int i=0;i= 9)
{
numOfCameras = Camera.getNumberOfCameras();
}
else
{
numOfCameras = 1;
}
mList_string_facing = new ArrayList();
for(int i=0;i list_string_facing)
{
ArrayAdapter adapter_facing = new ArrayAdapter(this,android.R.layout.simple_spinner_item,list_string_facing);
adapter_facing.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mSpnFacing.setAdapter(adapter_facing);
}
private void GetherCameraInfoAndSetparam(CameraInfoCollector collector, List ListPreviewFormats_dst, List ListPreviewFps_dst, List ListPreviewSizes_dst)
{
List ListPreviewFormats;
List ListPreviewFpsRanges;
List ListPreviewSizes;
{
Log.i(TAG, "Group.................................................");
collector.getFacing(true); //for log
collector.getOrientation(true); //for log
ListPreviewFormats = collector.getSupportedPreviewFormats(true);
ListPreviewFpsRanges = collector.getSupportedPreviewFpsRanges(true);
Camera cam = collector.getCamera();
Camera.Parameters para = cam.getParameters();
ListPreviewSizes = collector.getSupportedPreviewSizes(true);
Iterator ite = ListPreviewSizes.iterator();
while(ite.hasNext())
{
Camera.Size siz = (Camera.Size) ite.next();
if (Math.ceil(siz.width/32.0) * 32 > siz.width)
{
ite.remove();
}
}
//copy the List
ListPreviewFormats_dst.clear();
for(Integer inte:ListPreviewFormats)
{
Integer intee = new Integer(inte.intValue());
if(!ListPreviewFormats_dst.contains(intee))
{
ListPreviewFormats_dst.add(intee);
}
}
ListPreviewFps_dst.clear();
for (int[] ints:ListPreviewFpsRanges)
{
for (int i=ints[0]/1000;i<=ints[1]/1000;i++)
{
if (!ListPreviewFps_dst.contains(i))
{
ListPreviewFps_dst.add(i);
}
}
}
ListPreviewSizes_dst.clear();
for (Camera.Size siz1:ListPreviewSizes)
{
int[] siz_wh = new int[2];
siz_wh[0] = siz1.width;
siz_wh[1] = siz1.height;
if (!ListPreviewSizes_dst.contains(siz_wh))
{
ListPreviewSizes_dst.add(siz_wh);
}
}
}
}
public void TestFPS(View view){
mCam = Camera.open(0);
setCameraDisplayOrientation(this, 0, mCam);
try {
startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void setCameraDisplayOrientation(Activity activity, int cameraId, Camera camera) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation)
{
case Surface.ROTATION_0: degrees = 0; break;
case Surface.ROTATION_90: degrees = 90; break;
case Surface.ROTATION_180: degrees = 180; break;
case Surface.ROTATION_270: degrees = 270; break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)
{
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else {
// back-facing
result = (info.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
@Override
public void onError(int error, Camera camera) {
Log.e(TAG, "onError, arg0="+error+",arg1="+camera.toString());
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
//Size siz = camera.new Size(mSelectWidth, mSelectHeight);
if (data == null)
{
return;
}
Camera.Parameters param = camera.getParameters();
Camera.Size siz = param.getPreviewSize();
int format = param.getPreviewFormat();
if (siz.height == 0 || siz.width == 0){
return;
}
if (mRawHeight != siz.height || mRawWidth != siz.width)
{
//mAvcEncOutputSuspend = true;
mEventHandler.removeMessages(EVENT_CALC_ENCODE_FPS);
mEventHandler.sendEmptyMessage(EVENT_CALC_ENCODE_FPS);
if (m_CodecMsgHandler != null)
{
m_CodecMsgHandler.removeMessages(EVENT_GET_ENCODE_OUTPUT);
if (mPeriodKeyFrame > 0)
{
m_CodecMsgHandler.removeMessages(EVENT_REQUEST_KEY_FRAME);
}
}
synchronized(mAvcEncLock) {
mRawHeight = siz.height;
mRawWidth = siz.width;
mFrameCountIntoEncoder = 0;
mFrameCountOutofEncoder = 0;
if (mSvcEnc != null)
{
mSvcEnc.Stop();
if (Build.VERSION.SDK_INT <= 17) //for "CP-DX80"
{
mSvcEnc.Uninit();
mSvcEnc.Init();
}
int capa = mSvcEnc.GetSpacialLayerCapacity();
SvcEncodeSpacialParam[] params = new SvcEncodeSpacialParam[capa];
params[0] = new SvcEncodeSpacialParam();
params[0].mWidth = mRawWidth;
params[0].mHeight = mRawHeight;
params[0].mFrameRate = mETFps;//mEncode_fps;
params[0].mBitrate = mEncode_bps;//mEncode_bps;
mSvcEnc.Configure(params, 60);
mSvcEnc.Start();
}
}
if (m_CodecMsgHandler != null)
{
m_CodecMsgHandler.sendEmptyMessage(EVENT_GET_ENCODE_OUTPUT);
if (mPeriodKeyFrame > 0)
{
m_CodecMsgHandler.sendEmptyMessageDelayed(EVENT_REQUEST_KEY_FRAME, mPeriodKeyFrame);
}
}
mAvcEncodeFirstOutputGap_starttick = System.currentTimeMillis();
}
//check size
int expected_size = getPreviewBufferSize(mRawWidth, mRawHeight, mSelectColorFormat);
if (data.length >= expected_size)
{
synchronized(mAvcEncLock) {
if (mPreviewBuffers_clean.isEmpty() == false)
{
VideoBufferInfo info = mPreviewBuffers_clean.poll();
info.buffer = data;
info.size = getPreviewBufferSize(mRawWidth, mRawHeight, mSelectColorFormat);
info.timestamp = System.currentTimeMillis();
mPreviewBuffers_dirty.add(info);
}
}
}
else
{
camera.addCallbackBuffer(data);
}
//debug for delay
mDelay_lastPreviewTick = System.currentTimeMillis();
}
class SpinnerSelectedListener implements AdapterView.OnItemSelectedListener {
public void onItemSelected(AdapterView> arg0, View arg1, int arg2, long arg3) {
boolean try_runtume = false;
switch(arg0.getId())
{
case R.id.Spinner_facing:
String str = mList_string_facing.get(arg2);
if (str.equals("0"))
{
//fill spinner
int siz = mListPreviewFormats_back.size();
Log.i("xxx","size="+siz);
FillSpinner_CF_Size_FPS(mListPreviewFormats_back, mListPreviewSizes_back, mListPreviewFps_back);
mSelectedFacing = 0;
}
else
{
FillSpinner_CF_Size_FPS(mListPreviewFormats_front, mListPreviewSizes_front, mListPreviewFps_front);
mSelectedFacing = 1;
}
mSpnColorFormat.setVisibility(View.VISIBLE);
mSpnSize.setVisibility(View.VISIBLE);
mSpnFPS.setVisibility(View.VISIBLE);
break;
case R.id.Spinner_cf:
if (mSelectedFacing == 0)
{
mSelectColorFormat = mListPreviewFormats_back.get(arg2).intValue();
}
else
{
mSelectColorFormat = mListPreviewFormats_front.get(arg2).intValue();
}
try_runtume = true;
break;
case R.id.Spinner_size:
if (mSelectedFacing == 0)
{
int[] siz_wh = mListPreviewSizes_back.get(arg2);
mSelectWidth = siz_wh[0];
mSelectHeight = siz_wh[1];
}
else
{
int[] siz_wh = mListPreviewSizes_front.get(arg2);
mSelectWidth = siz_wh[0];
mSelectHeight = siz_wh[1];
}
try_runtume = true;
break;
case R.id.Spinner_fps:
if (mSelectedFacing == 0)
{
mSelectFPS = mListPreviewFps_back.get(arg2).intValue();
}
else {
mSelectFPS = mListPreviewFps_front.get(arg2).intValue();
}
try_runtume = true;
break;
}
if (mTestingFPS == true && try_runtume == true)
{
mPreviewGap_starttick = System.currentTimeMillis();
stopPreview();
try {
startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void onNothingSelected(AdapterView> arg0) {
switch(arg0.getId())
{
case R.id.Spinner_facing:
Log.i("xxx","Spinner_facing ");
break;
case R.id.Spinner_cf:
Log.i("xxx","Spinner_cf");
break;
case R.id.Spinner_size:
Log.i("xxx","Spinner_size");
break;
}
}
}
private void stopPreview()
{
if (mCam == null)
return;
Log.i(TAG,"stopPreview1");
mCam.stopPreview();
Log.i(TAG,"stopPreview2");
/*
if (mUsePreviewBuffer == false)
mCam.setPreviewCallback(null);*/
}
private void FillSpinner_CF_Size_FPS(List ListPreviewFormats, List ListPreviewSizes, List ListPreviewFPS)
{
List list_string_cf = new ArrayList();
List list_string_size = new ArrayList();
List list_string_FPS = new ArrayList();
for(Integer inte:ListPreviewFormats)
{
switch(inte.intValue())
{
case 17:
{
String str = new String("NV21");
list_string_cf.add(str);
}
break;
case 842094169:
{
String str = new String("YV12");
list_string_cf.add(str);
}
break;
case 20:
{
String str = new String("YUY2");
list_string_cf.add(str);
}
break;
}
}
ArrayAdapter adapter_cf = new ArrayAdapter(this,android.R.layout.simple_spinner_item,list_string_cf);
adapter_cf.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mSpnColorFormat.setAdapter(adapter_cf);
//mSpnColorFormat.invalidate();
for(int[] siz_wh:ListPreviewSizes)
{
String str = Integer.toString(siz_wh[0]) + "x" + Integer.toString(siz_wh[1]);
list_string_size.add(str);
}
ArrayAdapter adapter_size = new ArrayAdapter(this,android.R.layout.simple_spinner_item,list_string_size);
adapter_size.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mSpnSize.setAdapter(adapter_size);
for (Integer fps:ListPreviewFPS)
{
String str = Integer.toString(fps.intValue());
list_string_FPS.add(str);
}
ArrayAdapter adapter_FPS = new ArrayAdapter(this,android.R.layout.simple_spinner_item,list_string_FPS);
adapter_FPS.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mSpnFPS.setAdapter(adapter_FPS);
}
Handler mEventHandler = new Handler() {
public void handleMessage(Message msg) {
switch (msg.what) {
case EVENT_SCREEN_ROTATE_CAMERA_REFRESH:
removeMessages(EVENT_SCREEN_ROTATE_CAMERA_REFRESH);
break;
case EVENT_CALC_ENCODE_FPS:
int dec_delay = 0;
if (mDelay_lastDecodeTick != 0){
dec_delay = (int) (mDelay_lastPreviewTick - mDelay_lastDecodeTick);
}
int enc_delay = 0;
if (mDelay_lastEncodeTick != 0){
enc_delay = (int)(mDelay_lastPreviewTick - mDelay_lastEncodeTick);
}
sendEmptyMessageDelayed(EVENT_CALC_ENCODE_FPS, 1000);
break;
case EVENT_READ_RAW_FILE:
break;
}
}
};
private void startPreview() throws IOException
{
if (mCam == null)
return;
mCam.setPreviewDisplay(mSurfacePreview.getHolder());
Camera.Parameters para = mCam.getParameters();
try {
para.setPreviewFormat(mSelectColorFormat);
para.setPreviewSize(mSelectWidth, mSelectHeight);
//para.setPreviewFrameRate(mSelectFPS);
para.setPreviewFpsRange(mSelectFPS*1000, mSelectFPS*1000);
List supportedFocus = para.getSupportedFocusModes();
if (supportedFocus != null && supportedFocus.indexOf(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO) >= 0){
para.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCam.setParameters(para);
Log.i(TAG," mSelectColorFormat="+mSelectColorFormat+",mSelectWidth="+mSelectWidth+",mSelectHeight="+mSelectHeight+",mSelectFPS="+mSelectFPS);
}
catch(Exception ex){
//if (ex == )
Log.i(TAG,"exception="+ex);
}
synchronized(mAvcEncLock) {
if (mPreviewBuffers_clean == null)
mPreviewBuffers_clean = new LinkedList();
if (mPreviewBuffers_dirty == null)
mPreviewBuffers_dirty = new LinkedList();
int size = getPreviewBufferSize(mSelectWidth, mSelectHeight, mSelectColorFormat);
if (size > mPreviewBufferSize){
mPreviewBufferSize = size;
mCam.setPreviewCallbackWithBuffer(null);
mPreviewBuffers_clean.clear();
mPreviewBuffers_dirty.clear();
for(int i=0;i ite = mPreviewBuffers_dirty.iterator();
while(ite.hasNext())
{
VideoBufferInfo info = ite.next();
mPreviewBuffers_clean.add(info);
ite.remove();
mCam.addCallbackBuffer(info.buffer);
}
}
} //synchronized(mAvcEncLock)
mCam.setPreviewCallbackWithBuffer(this);
mCam.setErrorCallback(this);
Log.i(TAG," startPreview1");
mCam.startPreview();
Log.i(TAG," startPreview2");
}
private int getPreviewBufferSize(int width, int height, int format) {
int size = 0;
switch(format)
{
case ImageFormat.YV12:
{
int yStride = (int) Math.ceil(width / 16.0) * 16;
int uvStride = (int) Math.ceil( (yStride / 2) / 16.0) * 16;
int ySize = yStride * height;
int uvSize = uvStride * height / 2;
size = ySize + uvSize * 2;
}
break;
case ImageFormat.NV21:
{
float bytesPerPix = (float)ImageFormat.getBitsPerPixel(format) / 8;
size = (int) (width * height * bytesPerPix);
}
break;
}
return size;
}
private class CodecThread extends Thread{
@Override
public void run(){
Looper.prepare();
m_CodecMsgHandler = new Handler(){
@Override
public void handleMessage(Message msg){
//Log.i("wme_android"," what= "+msg.what);
switch (msg.what) {
case EVENT_GET_ENCODE_OUTPUT:
{
int res = AvcUtils.R_BUFFER_OK;
synchronized(mAvcEncLock) {
if (mSvcEnc != null)
{
if (mPreviewBuffers_dirty != null && mPreviewBuffers_clean != null)
{
Iterator ite = mPreviewBuffers_dirty.iterator();
while (ite.hasNext())
{
VideoBufferInfo info = ite.next();
byte[] data = info.buffer;
int data_size = info.size;
if (mSelectColorFormat == ImageFormat.YV12)
{
int[] cs = new int[1];
mSvcEnc.queryInt(AvcEncoder.KEY_COLORFORMAT, cs);
if (cs[0] == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar)
{
if (mRawData == null ||
mRawData.length < data_size)
{
mRawData = new byte[data_size];
}
swapYV12toI420(data, mRawData, mRawWidth, mRawHeight);
}
else
{
mRawData = data;
}
}
else
{
if (mRawData == null) //only log once
Log.e(TAG, "ColorFormat有误, 当前的是: "+mSelectColorFormat);
mRawData = data;
}
int flag = 0;
res = mSvcEnc.InputRawBuffer(mRawData, data_size, info.timestamp, flag);
if (res != AvcUtils.R_BUFFER_OK)
{
break;
}
else
{
mFrameCountIntoEncoder ++;
//Log.d(TAG, "mFrameCountIntoEncoder = "+mFrameCountIntoEncoder);
ite.remove();
mPreviewBuffers_clean.add(info);
if (mCam != null)
{
mCam.addCallbackBuffer(data);
}
}
} //while
} //if (mPreviewBuffers_dirty != null && mPreviewBuffers_clean != null)
} //if (mSvcEnc != null)
} //synchronized(mAvcEncLock)
res = AvcUtils.R_BUFFER_OK;
while(res == AvcUtils.R_BUFFER_OK)
{
int[] len = new int[1];
len[0] = mAvcBuf.length;
SvcEncodeOutputParam svc_output = new SvcEncodeOutputParam();
synchronized(mAvcEncLock) {
if (mSvcEnc != null)
{
res = mSvcEnc.OutputAvcBuffer(mAvcBuf, len, svc_output);
// if (res == AvcEncoder.R_INVALIDATE_BUFFER_SIZE)
// {
// //mAvcBuf should be refreshed
// len[0] = mAvcBuf.length;
// res = mAvcEnc.OutputAvcBuffer(mAvcBuf, len, ts, flags);
// }
}
else
{
res = AvcUtils.R_TRY_AGAIN_LATER;
}
}
if (res == AvcUtils.R_BUFFER_OK)
{
mFrameCountOutofEncoder ++;
if (mAvcEncodeFirstOutputGap_starttick != 0)
{
long tick = System.currentTimeMillis();
Log.i("AvcEncoder", "第一个 encoder:"+(tick - mAvcEncodeFirstOutputGap_starttick));
mAvcEncodeFirstOutputGap_starttick = 0;
}
//debug for delay
mDelay_lastEncodeTick = svc_output.timestamp;
if (mDecodeBuffers_clean != null && mDecodeBuffers_dirty != null)
{
synchronized(mAvcDecLock) {
Iterator ite = mDecodeBuffers_clean.iterator();
if (ite.hasNext())
{
VideoBufferInfo info = ite.next();
if (info.buffer.length >= len[0])
{
info.timestamp = svc_output.timestamp;
info.size = len[0];
System.arraycopy(mAvcBuf, 0, info.buffer, 0, len[0]);
ite.remove();
mDecodeBuffers_dirty.add(info);
}
else {
Log.e(TAG, "decoder len: "+len[0]+" buffer len: "+info.buffer.length);
}
}
}
}
//Log.i(TAG, "get encoded data, len="+len[0]);
}
else if (res == AvcUtils.R_OUTPUT_UPDATE)
{
res = AvcUtils.R_BUFFER_OK;
}
else if (res == AvcUtils.R_TRY_AGAIN_LATER)
{
}
else
{
}
}
m_CodecMsgHandler.sendEmptyMessageDelayed(EVENT_GET_ENCODE_OUTPUT, 10);
}
break;
case EVENT_REQUEST_KEY_FRAME:
{
if (mSvcEnc != null)
{
mSvcEnc.RequestKeyFrameSoon();
}
if (mPeriodKeyFrame > 0)
m_CodecMsgHandler.sendEmptyMessageDelayed(EVENT_REQUEST_KEY_FRAME, mPeriodKeyFrame);
}
break;
case EVENT_GO_DECODE:
{
if (mAvcDec != null)
{
synchronized(mAvcDecLock) {
int res = AvcUtils.R_BUFFER_OK;
if (mDecodeBuffers_dirty != null && mDecodeBuffers_clean != null)
{
Iterator ite = mDecodeBuffers_dirty.iterator();
while (ite.hasNext())
{
int flag = 0;
VideoBufferInfo info = ite.next();
byte[] sps_nal = null;
int sps_len = 0;
byte[] pps_nal = null;
int pps_len = 0;
int nal_type = AvcUtils.NAL_TYPE_UNSPECIFY;
ByteBuffer byteb = ByteBuffer.wrap(info.buffer, 0, info.size);
//SPS
if (true == AvcUtils.goToPrefix(byteb))
{
int sps_position = 0;
int pps_position = 0;
nal_type = AvcUtils.getNalType(byteb);
if (AvcUtils.NAL_TYPE_SPS == nal_type)
{
sps_position = byteb.position() - AvcUtils.START_PREFIX_LENGTH - AvcUtils.NAL_UNIT_HEADER_LENGTH;
//PPS
if (true == AvcUtils.goToPrefix(byteb))
{
nal_type = AvcUtils.getNalType(byteb);
if (AvcUtils.NAL_TYPE_PPS == nal_type)
{
pps_position = byteb.position() - AvcUtils.START_PREFIX_LENGTH - AvcUtils.NAL_UNIT_HEADER_LENGTH;
sps_len = pps_position - sps_position;
sps_nal = new byte[sps_len];
int cur_pos = byteb.position();
byteb.position(sps_position);
byteb.get(sps_nal, 0, sps_len);
byteb.position(cur_pos);
//slice
if (true == AvcUtils.goToPrefix(byteb))
{
nal_type = AvcUtils.getNalType(byteb);
int pps_end_position = byteb.position() - AvcUtils.START_PREFIX_LENGTH - AvcUtils.NAL_UNIT_HEADER_LENGTH;
pps_len = pps_end_position - pps_position;
}
else {
pps_len = byteb.position() - pps_position;
//pps_len = byteb.limit() - pps_position;
}
if (pps_len > 0)
{
pps_nal = new byte[pps_len];
cur_pos = byteb.position();
byteb.position(pps_position);
byteb.get(pps_nal, 0, pps_len);
byteb.position(cur_pos);
}
}
else{
throw new UnsupportedOperationException("nal type :"+nal_type);
}
}
}
else{
//Log.d(TAG, "NAL type: "+nal_type);
}
//2. 配置 AVC decoder SPS/PPS
if (sps_nal != null && pps_nal != null)
{
int[] width = new int[1];
int[] height = new int[1];
AvcUtils.parseSPS(sps_nal, width, height);
Log.d(TAG, "mCurAvcDecoderHeight="+mCurAvcDecoderHeight+",height="+height[0]+",mCurAvcDecoderWidth="+mCurAvcDecoderWidth+",width="+width[0]);
if (mCurAvcDecoderWidth != width[0] || mCurAvcDecoderHeight != height[0])
{
mCurAvcDecoderWidth = width[0];
mCurAvcDecoderHeight = height[0];
// mFrameCountIntoDecoder = 0;
// mFrameCountOutofDecoder = 0;
if (Build.VERSION.SDK_INT <= 17) //for "CP-DX80"
{
mAvcDec.Uninit();
mAvcDec.Init();
}
mAvcDec.stop();
mAvcDec.tryConfig(mSurfaceDecoder.getHolder().getSurface(), sps_nal, pps_nal);
mAvcDec.start();
}
// else
// {
// mAvcDec.flush();
// }
flag = MediaCodec.BUFFER_FLAG_CODEC_CONFIG;
}
}
res = mAvcDec.InputAvcBuffer(info.buffer, info.size, info.timestamp, flag);
if (res != AvcUtils.R_BUFFER_OK){
if (res == AvcUtils.R_INVALID_STATE)
{
if (mSvcEnc != null)
{
mAvcDec.stop();
if (Build.VERSION.SDK_INT <= 17) //for "CP-DX80"
{
mSvcEnc.RequestKeyFrameSoon();
}
}
}
break; // 如果上一个出现了问题,剩余的数据不会被解码
}
else
{
mFrameCountIntoDecoder ++;
ite.remove();
mDecodeBuffers_clean.add(info);
Log.d(TAG, "size :"+mDecodeBuffers_clean.size());
}
} //while
}
int[] len = new int[1];
long[] ts = new long[1];
res = AvcUtils.R_BUFFER_OK;
while(res == AvcUtils.R_BUFFER_OK)
{
res = mAvcDec.OutputRawBuffer(null, len, ts);
if (res == AvcUtils.R_BUFFER_OK)
{
mFrameCountOutofDecoder ++;
mDelay_lastDecodeTick = ts[0];
}
else if (res == AvcUtils.R_OUTPUT_UPDATE)
{
res = AvcUtils.R_BUFFER_OK;
}
else if (res == AvcUtils.R_TRY_AGAIN_LATER)
{
}
else if (res == AvcUtils.R_INVALID_STATE)
{
if (mSvcEnc != null)
{
mAvcDec.stop();
if (Build.VERSION.SDK_INT <= 17)
{
Log.w(TAG, "AvcDecoder met(OutputRawBuffer)出现异常, 请求新的IDR");
mSvcEnc.RequestKeyFrameSoon();
}
}
}
else
{
}
}
}
}
m_CodecMsgHandler.sendEmptyMessageDelayed(EVENT_GO_DECODE, 10);
}
break;
default:
break;
}
}
};
Looper.loop();
}
}
//yv12 to yuv420
private void swapYV12toI420(byte[] yv12bytes, byte[] i420bytes, int width, int height)
{
System.arraycopy(yv12bytes, 0, i420bytes, 0,width*height);
System.arraycopy(yv12bytes, width*height+width*height/4, i420bytes, width*height, width*height/4);
System.arraycopy(yv12bytes, width*height, i420bytes, width*height+width*height/4, width*height/4);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
}
MediaCodec Demo源码