编码器使用的是x264的开源库,
很容易看懂的
简单的封装了一个JNI库
编码库在BBS里 CSDN的资源太难用了
http://www.eoeandroid.com/forum.php?mod=viewthread&tid=52739&extra=
x264的编译放方法
export ARM_ROOT=$ANDROID_NDK_ROOT
export ARM_INC=$ARM_ROOT/build/platforms/android-5/arch-arm/usr/include/
export ARM_LIB=$ARM_ROOT/build/platforms/android-5/arch-arm/usr/lib/
export ARM_TOOL=$ARM_ROOT/build/prebuilt/windows/arm-eabi-4.4.0
export ARM_LIBO=$ARM_TOOL/lib/gcc/arm-eabi/4.4.0
export PATH=$ARM_TOOL/bin:$PATH
export ARM_PRE=arm-eabi
./configure --prefix=/home/egmkang/libx264 --enable-shared /
-disable-asm --host=arm-linux --cross-prefix=arm-eabi-/
--extra-cflags=" -I$ARM_INC -fPIC -DANDROID -fpic -mthumb-interwork -ffunction-sections -funwind-tables -fstack-protector -fno-short-enums -D__ARM_ARCH_5__ -D__ARM_ARCH_5T__ -D__ARM_ARCH_5E__ -D__ARM_ARCH_5TE__ -Wno-psabi -march=armv5te -mtune=xscale -msoft-float -mthumb -Os -fomit-frame-pointer -fno-strict-aliasing -finline-limit=64 -DANDROID -Wa,--noexecstack -MMD -MP "/
--extra-ldflags="-nostdlib -Bdynamic -Wl,--no-undefined -Wl,-z,noexecstack -Wl,-z,nocopyreloc -Wl,-soname,/system/lib/libz.so -Wl,-rpath-link=$ARM_LIB,-dynamic-linker=/system/bin/linker -L$ARM_LIB -nostdlib $ARM_LIB/crtbegin_dynamic.o $ARM_LIB/crtend_android.o -lc -lm -ldl -lgcc"
这里生成的是x264的静态库
整个工程唯一有点麻烦的是 生成 JNI 动态库的时候 报错 。。
后来发现是少链接了一个库,
于是根据x264的编译方法 在Android.mk添加一些配置就可以了。当然这就是难点,在网上查了很多都没有结果。
有些目录的参数自己调整哈
我把前面生成的libx264.a 和 x264.h 文件放到jni的libx264目录下了 有问题自己调整
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_C_INCLUDES +=$(LOCAL_PATH)/libx264/include
LOCAL_MODULE := H264Android
LOCAL_SRC_FILES := H264Android.c
LOCAL_LDFLAGS += $(LOCAL_PATH)/libx264/lib/libx264.a
LOCAL_LDLIBS := -L$(SYSROOT)/usr/lib -lgcc
include $(BUILD_SHARED_LIBRARY)
估计很多人都会发现很熟悉 嘻嘻 这个就是根据
http://www.cnblogs.com/mcodec/articles/1780598.html
改的 连文件名字都没有换!!比较懒
另: 编码的效率很低下啊
AndroidVideo.java
import java.io.File; import java.io.RandomAccessFile; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.view.View; import android.content.res.Configuration; import android.os.Bundle; import android.util.Log; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.Window; import android.view.WindowManager; import android.view.SurfaceHolder.Callback; import android.graphics.PixelFormat; import android.hardware.Camera; public class AndroidVideo extends Activity implements Callback, Camera.PictureCallback { private SurfaceView mSurfaceView = null; private SurfaceHolder mSurfaceHolder = null; private Camera mCamera = null; private boolean mPreviewRunning = false; public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().setFormat(PixelFormat.TRANSLUCENT); requestWindowFeature(Window.FEATURE_NO_TITLE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); setContentView(R.layout.camera); mSurfaceView = (SurfaceView) this.findViewById(R.id.surface_camera); mSurfaceHolder = mSurfaceView.getHolder(); mSurfaceHolder.addCallback(this); mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); } @Override public void onPictureTaken(byte[] data, Camera camera) { } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { if (mPreviewRunning) { mCamera.stopPreview(); } Camera.Parameters p = mCamera.getParameters(); p.setPreviewSize(352, 288); mCamera.setPreviewCallback(new H264Encoder(352, 288)); mCamera.setParameters(p); try { mCamera.setPreviewDisplay(holder); } catch (Exception ex) { } mCamera.startPreview(); mPreviewRunning = true; } @Override public void surfaceCreated(SurfaceHolder holder) { mCamera = Camera.open(); } @Override public void surfaceDestroyed(SurfaceHolder holder) { if (mCamera != null) { mCamera.setPreviewCallback(null); mCamera.stopPreview(); mPreviewRunning = false; mCamera.release(); mCamera = null; } } public void onConfigurationChanged(Configuration newConfig) { try { super.onConfigurationChanged(newConfig); if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) { } else if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) { } } catch (Exception ex) { } } } class H264Encoder implements Camera.PreviewCallback { long encoder=0; RandomAccessFile raf=null; byte[] h264Buff =null; static { System.loadLibrary("H264Android"); } private H264Encoder(){}; public H264Encoder(int width, int height) { encoder = CompressBegin(width, height); h264Buff = new byte[width * height *8]; try { File file = new File("/sdcard/camera.h264"); raf = new RandomAccessFile(file, "rw"); } catch (Exception ex) { Log.v("System.out", ex.toString()); } }; protected void finalize() { CompressEnd(encoder); if (null != raf) { try { raf.close(); } catch (Exception ex) { Log.v("System.out", ex.toString()); } } try { super.finalize(); } catch (Throwable e) { // TODO Auto-generated catch block e.printStackTrace(); } } private native long CompressBegin(int width,int height); private native int CompressBuffer(long encoder, int type,byte[] in, int insize,byte[] out); private native int CompressEnd(long encoder); @Override public void onPreviewFrame(byte[] data, Camera camera) { int result=CompressBuffer(encoder, -1, data, data.length,h264Buff); try { if (result>0) raf.write(h264Buff, 0, result); } catch (Exception ex) { Log.v("System.out", ex.toString()); } } }
H264Android.c
#include <string.h> #include <jni.h> #include <stdio.h> #include <stdlib.h> #include <arpa/inet.h> #include <x264.h> #define DATA_MAX 3000000 #define H264_MTU 1024 typedef struct { x264_param_t * param; x264_t *handle; x264_picture_t * picture; x264_nal_t *nal; } Encoder; jlong Java_h264_com_H264Encoder_CompressBegin(JNIEnv* env, jobject thiz, jint width, jint height) { Encoder * en = (Encoder *) malloc(sizeof(Encoder)); en->param = (x264_param_t *) malloc(sizeof(x264_param_t)); en->picture = (x264_param_t *) malloc(sizeof(x264_picture_t)); x264_param_default(en->param); //set default param //en->param->rc.i_rc_method = X264_RC_CQP; en->param->i_log_level = X264_LOG_NONE; en->param->i_width = width; //set frame width en->param->i_height = height; //set frame height en->param->rc.i_lookahead =0; en->param->i_bframe=0; en->param->i_fps_num =5; en->param->i_fps_den = 1; if ((en->handle = x264_encoder_open(en->param)) == 0) { return 0; } /* Create a new pic */ x264_picture_alloc(en->picture, X264_CSP_I420, en->param->i_width, en->param->i_height); return (jlong) en; } jint Java_h264_com_H264Encoder_CompressEnd(JNIEnv* env, jobject thiz,jlong handle) { Encoder * en = (Encoder *) handle; if(en->picture) { x264_picture_clean(en->picture); free(en->picture); en->picture = 0; } if(en->param) { free(en->param); en->param=0; } if(en->handle) { x264_encoder_close(en->handle); } free(en); return 0; } jint Java_h264_com_H264Encoder_CompressBuffer(JNIEnv* env, jobject thiz,jlong handle,jint type,jbyteArray in, jint insize,jbyteArray out) { Encoder * en = (Encoder *) handle; x264_picture_t pic_out; int i_data=0; int nNal=-1; int result=0; int i=0,j=0; int nPix=0; jbyte * Buf = (jbyte*)(*env)->GetByteArrayElements(env, in, 0); jbyte * h264Buf = (jbyte*)(*env)->GetByteArrayElements(env, out, 0); unsigned char * pTmpOut = h264Buf; int nPicSize=en->param->i_width*en->param->i_height; /* Y数据全部从在一块,UV数据使用interleave方式存储 YYYY YYYY UVUV */ jbyte * y=en->picture->img.plane[0]; jbyte * v=en->picture->img.plane[1]; jbyte * u=en->picture->img.plane[2]; memcpy(en->picture->img.plane[0],Buf,nPicSize); for (i=0;i<nPicSize/4;i++) { *(u+i)=*(Buf+nPicSize+i*2); *(v+i)=*(Buf+nPicSize+i*2+1); } switch (type) { case 0: en->picture->i_type = X264_TYPE_P; break; case 1: en->picture->i_type = X264_TYPE_IDR; break; case 2: en->picture->i_type = X264_TYPE_I; break; default: en->picture->i_type = X264_TYPE_AUTO; break; } if( x264_encoder_encode( en->handle, &(en->nal), &nNal, en->picture ,&pic_out) < 0 ) { return -1; } for (i = 0; i < nNal; i++){ memcpy(pTmpOut, en->nal[i].p_payload, en->nal[i].i_payload); pTmpOut += en->nal[i].i_payload; result+=en->nal[i].i_payload; } return result; }