Camera 是手机当中最重要的多媒体模块之一,基于Camera可以做出许多非常有意思的feature,比如各种滤镜、图像识别以及最近非常热门的AR、VR。这些都涉及到图像处理,为了更加高效的处理和现实camera的图像,GPU是一个非常不错的选择,OpenGL接口是GPU硬件的访问接口。本文就介绍如何使用Opengl来render Camera的preview 数据。
一、Android Camera API
使用Android Camera API接口可以非常方便写出camera应用程序,在Android L之前可以使用android.hardware.camera API来编写Camera相关的应用程序,在Android L以后,google推出了android.hardware.camera2 API。相比较而言,camera2提供了更加灵活的API来访问camera底层硬件,能够控制并每一帧的参数(AEC/AWB/AF/等参数)。不过Camera2使用起来要稍微复杂一些,这里就使用camera API写camera。关键两点:open camera和start preview。这里参照了这个blog http://blog.csdn.net/yanzi1225627/article/details/33339965/ 写了camera 应用相关的类
package com.wfly.kui.magiccamera.camera;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.PixelFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import com.wfly.kui.magiccamera.utils.CamParaUtil;
import com.wfly.kui.magiccamera.utils.FileUtil;
import com.wfly.kui.magiccamera.utils.ImageUtil;
import java.io.IOException;
import java.util.List;
/**
* Created by kuiw on 11/3/2016.
*/
public class CameraInstance {
private static final String TAG="CameraInstance";
private Camera mCamera;
private Camera.Parameters mParams;
private static CameraInstance mCameraInstance;
private boolean isPreviewing=false;
private boolean isOpened=false;
private Camera.Size mPreviewSize;
private Camera.Size mPictureSize;
public interface CamOpenedCallback{
public void cameraHasOpened();
}
public boolean isPreviewing(){
return isPreviewing;
}
public boolean isOpened(){
return isOpened;
}
public Camera.Size getmPreviewSize(){return mPreviewSize;}
private CameraInstance(){
}
public static synchronized CameraInstance getInstance(){
if(mCameraInstance == null){
mCameraInstance = new CameraInstance();
}
return mCameraInstance;
}
public void doOpenCamera(CamOpenedCallback callback){
Log.i(TAG, "doOpenCamera....");
if(mCamera == null){
mCamera = Camera.open();
isOpened=true;
Log.i(TAG, "Camera open over....");
if(callback != null){
callback.cameraHasOpened();
}
}else{
Log.i(TAG, "Camera is in open status");
}
}
public void doStartPreview(SurfaceTexture surface, float previewRate){
Log.i(TAG, "doStartPreview...");
if(isPreviewing){
Log.e(TAG,"camera is in previewing state");
return ;
}
if(mCamera != null){
try {
mCamera.setPreviewTexture(surface);
} catch (IOException e) {
e.printStackTrace();
}
initCameraParams();
}
}
/**
* 停止预览,释放Camera
*/
public void doStopCamera(){
if(null != mCamera)
{
isOpened=false;
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
isPreviewing = false;
mCamera.release();
mCamera = null;
}
}
public void doStopPreview(){
Log.e(TAG,"doStopPreview....");
if (isPreviewing && null!=mCamera){
mCamera.stopPreview();
isPreviewing=false;
}else{
Log.e(TAG,"camera is in not in previewing status");
}
}
public void doTakePicture(){
if(isPreviewing && (mCamera != null)){
mCamera.takePicture(mShutterCallback, null, mJpegPictureCallback);
}
}
private void initCameraParams(){
if(mCamera != null){
mParams = mCamera.getParameters();
mParams.setPictureFormat(PixelFormat.JPEG);
mPictureSize = CamParaUtil.getInstance().getPropPictureSize(
mParams.getSupportedPictureSizes(),30, 800);
mParams.setPictureSize(mPictureSize.width, mPictureSize.height);
mPreviewSize = CamParaUtil.getInstance().getPropPreviewSize(
mParams.getSupportedPreviewSizes(), 30, 800);
mParams.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
mCamera.setDisplayOrientation(90);
List focusModes = mParams.getSupportedFocusModes();
if(focusModes.contains("continuous-video")){
mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(mParams);
mCamera.startPreview();
isPreviewing = true;
mParams = mCamera.getParameters();
}
}
Camera.ShutterCallback mShutterCallback = new Camera.ShutterCallback()
//快门按下的回调,在这里我们可以设置类似播放“咔嚓”声之类的操作。默认的就是咔嚓。
{
public void onShutter() {
// TODO Auto-generated method stub
Log.i(TAG, "myShutterCallback:onShutter...");
}
};
Camera.PictureCallback mRawCallback = new Camera.PictureCallback()
// 拍摄的未压缩原数据的回调,可以为null
{
public void onPictureTaken(byte[] data, Camera camera) {
// TODO Auto-generated method stub
Log.i(TAG, "myRawCallback:onPictureTaken...");
}
};
Camera.PictureCallback mJpegPictureCallback = new Camera.PictureCallback ()
//对jpeg图像数据的回调,最重要的一个回调
{
public void onPictureTaken(byte[] data, Camera camera) {
// TODO Auto-generated method stub
Log.i(TAG, "myJpegCallback:onPictureTaken...");
Bitmap b = null;
if(null != data){
b = BitmapFactory.decodeByteArray(data, 0, data.length);
mCamera.stopPreview();
isPreviewing = false;
}
if(null != b)
{
Bitmap rotaBitmap = ImageUtil.getRotateBitmap(b, 90.0f);
FileUtil.saveBitmap(rotaBitmap);
}
mCamera.startPreview();
isPreviewing = true;
}
};
}
二、GLSurfaceView
主要实现三个接口onSurfaceCreated,onSurfaceChanged,onDrawFrame。在surface创建成功后,调用camera start preview。
这里主要用三个类实现相关的调用:CameraSurfaceView, FilterRender, GLUtils
package com.wfly.kui.magiccamera.camera;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/**
* Created by kuiw on 11/3/2016.
*/
public class CameraSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer,SurfaceTexture.OnFrameAvailableListener, CameraInstance.CamOpenedCallback {
public static final String TAG="CameraSurfaceView";
private Context mContext;
private SurfaceTexture mSurface;
private int mTextureID = -1;
private DirectDrawer mDirectDrawer;
private FilterRender mFilterDrawer;
public CameraSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(RENDERMODE_WHEN_DIRTY);
this.mContext=context;
}
@Override
public void onPause() {
super.onPause();
CameraInstance.getInstance().doStopPreview();
}
@Override
public void onResume() {
super.onResume();
}
@Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
Log.i(TAG, "onSurfaceCreated...");
//mTextureID = createTextureID();
//mDirectDrawer = new DirectDrawer(mTextureID);
mFilterDrawer=new FilterRender(mContext);
mTextureID=mFilterDrawer.getmTexture();
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);
//CameraInstance.getInstance().doOpenCamera(null);
}
@Override
public void onSurfaceChanged(GL10 gl10, int width, int height) {
GLES20.glViewport(0, 0, width, height);
if(!CameraInstance.getInstance().isPreviewing()){
CameraInstance.getInstance().doStartPreview(mSurface, 1.33f);
}
}
@Override
public void onDrawFrame(GL10 gl10) {
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
Log.i(TAG, "onDrawFrame...");
mSurface.updateTexImage();
mFilterDrawer.drawTexture();
/*
float[] mtx = new float[16];
mSurface.getTransformMatrix(mtx);
mDirectDrawer.draw(mtx);*/
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
this.requestRender();
}
@Override
public void cameraHasOpened() {
}
private int createTextureID()
{
int[] texture = new int[1];
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
return texture[0];
}
}
package com.wfly.kui.magiccamera.camera;
import android.content.Context;
import android.hardware.Camera;
import android.media.effect.Effect;
import android.media.effect.EffectContext;
import android.media.effect.EffectFactory;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import com.wfly.kui.magiccamera.utils.GLUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.opengles.GL10;
import static android.R.attr.bitmap;
/**
* Created by kuiw on 11/4/2016.
*/
public class FilterRender {
public static final String TAG="FilterRender";
private int mTexture=-1;
private Context mContext;
private final float vertices[] = {
-1f, -1f,
1f, -1f,
-1f, 1f,
1f, 1f,
-1f,0f,
0f,0f,
-1f,1f,
0f,1f,
0f,0f,
1f,0f,
0f,1f,
1f,1f,
};
private final float texturevertices[]={
0f,1f,
1f,1f,
0f,0f,
1f,0f,
0f,1f,
1f,1f,
0f,0f,
1f,0f,
0f,1f,
1f,1f,
0f,0f,
1f,0f,
};
private FloatBuffer verticesbBuf;
private FloatBuffer textureVerticesBuf;
private String vertexShader;
private String fragmentShader;
private int aPositionHandle;
private int uTextureHandle;
private int aTexPositionHandle;
private int program;
private EffectContext effectContext;
private Effect effect;
private int mEffectTexture;
private Camera.Size mPreviewSize;
private void applyEffectForTexture(){
effect=effectContext.getFactory().createEffect(EffectFactory.EFFECT_BRIGHTNESS);
//effect.setParameter("scale", .5f);;
effect.setParameter("brightness", 4.0f);
effect.apply(mTexture,CameraInstance.getInstance().getmPreviewSize().width,CameraInstance.getInstance().getmPreviewSize().height,mEffectTexture);
}
private void initFloatBuffer(){
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertices.length*4);
byteBuffer.order(ByteOrder.nativeOrder());
verticesbBuf=byteBuffer.asFloatBuffer();
verticesbBuf.put(vertices);
verticesbBuf.position(0);
byteBuffer=ByteBuffer.allocateDirect(texturevertices.length*4);
byteBuffer.order(ByteOrder.nativeOrder());
textureVerticesBuf=byteBuffer.asFloatBuffer();
textureVerticesBuf.put(texturevertices);
textureVerticesBuf.position(0);
}
public void drawTexture(){
// GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,0);
GLES20.glUseProgram(program);
//GLES20.glDisable(GLES20.GL_BLEND);
//applyEffectForTexture();
aPositionHandle = GLES20.glGetAttribLocation(program, "aPosition");
aTexPositionHandle = GLES20.glGetAttribLocation(program, "aTexPosition");
uTextureHandle = GLES20.glGetUniformLocation(program, "uTexture");
GLES20.glVertexAttribPointer(aPositionHandle,2,GLES20.GL_FLOAT,false,0,verticesbBuf);
GLES20.glEnableVertexAttribArray(aPositionHandle);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,mTexture);
GLES20.glUniform1i(uTextureHandle,0);
GLES20.glVertexAttribPointer(aTexPositionHandle, 2, GLES20.GL_FLOAT, false, 0, textureVerticesBuf);
GLES20.glEnableVertexAttribArray(aTexPositionHandle);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
for(int index=0;index<3;index++){
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, index*4, 4);
}
GLES20.glDisableVertexAttribArray(aTexPositionHandle);
GLES20.glDisableVertexAttribArray(aPositionHandle);
}
private int createTextureID()
{
int[] texture = new int[1];
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
return texture[0];
}
public FilterRender(Context context){
this.mTexture=createTextureID();
this.mContext=context;
effectContext=EffectContext.createWithCurrentGlContext();
mEffectTexture=createTextureID();
initFloatBuffer();
mPreviewSize=CameraInstance.getInstance().getmPreviewSize();
vertexShader= GLUtils.loadFromAssetsFile("vertexshader.vs",mContext.getAssets());
fragmentShader=GLUtils.loadFromAssetsFile("fragmentshader.vs",mContext.getAssets());
program=GLUtils.createProgram(vertexShader,fragmentShader);
}
public int getmTexture(){return mTexture;}
}
package com.wfly.kui.magiccamera.utils;
import android.content.Context;
import android.content.res.AssetManager;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;
import android.util.Log;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
/**
* Created by Administrator on 2016/10/22.
*/
public class GLUtils {
public static final String TAG="GLUtils";
/**
* 加载着色器方法
*
* 流程 :
*
* ① 创建着色器
* ② 加载着色器脚本
* ③ 编译着色器
* ④ 获取着色器编译结果
*
* @param shaderType 着色器类型,顶点着色器(GLES20.GL_FRAGMENT_SHADER), 片元着色器(GLES20.GL_FRAGMENT_SHADER)
* @param source 着色脚本字符串
* @return 返回的是着色器的引用, 返回值可以代表加载的着色器
*/
public static int loadShader(int shaderType , String source){
//1.创建一个着色器, 并记录所创建的着色器的id, 如果id==0, 那么创建失败
int shader = GLES20.glCreateShader(shaderType);
if(shader != 0){
//2.如果着色器创建成功, 为创建的着色器加载脚本代码
GLES20.glShaderSource(shader, source);
//3.编译已经加载脚本代码的着色器
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
//4.获取着色器的编译情况, 如果结果为0, 说明编译失败
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if(compiled[0] == 0){
Log.e("ES20_ERROR", "Could not compile shader " + shaderType + ":");
Log.e("ES20_ERROR", GLES20.glGetShaderInfoLog(shader));
//编译失败的话, 删除着色器, 并显示log
GLES20.glDeleteShader(shader);
shader = 0;
}
}else{
Log.e(TAG,"create shader failed:");
}
return shader;
}
/**
* 检查每一步的操作是否正确
*
* 使用GLES20.glGetError()方法可以获取错误代码, 如果错误代码为0, 那么就没有错误
*
* @param op 具体执行的方法名, 比如执行向着色程序中加入着色器,
* 使glAttachShader()方法, 那么这个参数就是"glAttachShader"
*/
public static void checkGLError(String op){
int error;
//错误代码不为0, 就打印错误日志, 并抛出异常
while( (error = GLES20.glGetError()) != GLES20.GL_NO_ERROR ){
Log.e("ES20_ERROR", op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
/**
* 创建着色程序
*
* ① 加载顶点着色器
* ② 加载片元着色器
* ③ 创建着色程序
* ④ 向着色程序中加入顶点着色器
* ⑤ 向着色程序中加入片元着色器
* ⑥ 链接程序
* ⑦ 获取链接程序结果
*
* @param vertexSource 定点着色器脚本字符串
* @param fragmentSource 片元着色器脚本字符串
* @return
*/
public static int createProgram(String vertexSource , String fragmentSource){
//1. 加载顶点着色器, 返回0说明加载失败
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if(vertexShader == 0) {
Log.e(TAG,"load vertex shader failed!");
return 0;
}
//2. 加载片元着色器, 返回0说明加载失败
int fragShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if(fragShader == 0) {
Log.e(TAG,"load fragment shader failed!");
return 0;
}
//3. 创建着色程序, 返回0说明创建失败
int program = GLES20.glCreateProgram();
if(program != 0){
//4. 向着色程序中加入顶点着色器
GLES20.glAttachShader(program, vertexShader);
checkGLError("glAttachShader");
//5. 向着色程序中加入片元着色器
GLES20.glAttachShader(program, fragShader);
checkGLError("glAttachShader");
//6. 链接程序
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
//获取链接程序结果
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if(linkStatus[0] != GLES20.GL_TRUE){
Log.e("ES20.ERROR", "链接程序失败 : ");
Log.e("ES20.ERROR", GLES20.glGetProgramInfoLog(program));
//如果链接程序失败删除程序
GLES20.glDeleteProgram(program);
program = 0;
}
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragShader);
}
Log.e(TAG,"Create Programe Successfully");
return program;
}
/**
* 从assets中加载着色脚本
*
* ① 打开assets目录中的文件输入流
* ② 创建带缓冲区的输出流
* ③ 逐个字节读取文件数据, 放入缓冲区
* ④ 将缓冲区中的数据转为字符串
*
* @param fileName assets目录中的着色脚本文件名
* @param resources 应用的资源
* @return
*/
public static String loadFromAssetsFile(String fileName, Resources resources){
String result = null;
try {
//1. 打开assets目录中读取文件的输入流, 相当于创建了一个文件的字节输入流
InputStream is = resources.getAssets().open(fileName);
int ch = 0;
//2. 创建一个带缓冲区的输出流, 每次读取一个字节, 注意这里字节读取用的是int类型
ByteArrayOutputStream baos = new ByteArrayOutputStream();
//3. 逐个字节读取数据, 并将读取的数据放入缓冲器中
while((ch = is.read()) != -1){
baos.write(ch);
}
//4. 将缓冲区中的数据转为字节数组, 并将字节数组转换为字符串
byte[] buffer = baos.toByteArray();
baos.close();
is.close();
result = new String(buffer, "UTF-8");
result = result.replaceAll("\\r\\n", "\n");
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
public static String loadFromAssetsFile(String fileName, AssetManager assetManager){
String result = null;
try {
//1. 打开assets目录中读取文件的输入流, 相当于创建了一个文件的字节输入流
InputStream is = assetManager.open(fileName);
int ch = 0;
//2. 创建一个带缓冲区的输出流, 每次读取一个字节, 注意这里字节读取用的是int类型
ByteArrayOutputStream baos = new ByteArrayOutputStream();
//3. 逐个字节读取数据, 并将读取的数据放入缓冲器中
while((ch = is.read()) != -1){
baos.write(ch);
}
//4. 将缓冲区中的数据转为字节数组, 并将字节数组转换为字符串
byte[] buffer = baos.toByteArray();
baos.close();
is.close();
result = new String(buffer, "UTF-8");
result = result.replaceAll("\\r\\n", "\n");
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
public static Bitmap loadBitmapFromAssetsFile(String fileName,Resources resources){
Bitmap bitmap=null;
try {
//1. 打开assets目录中读取文件的输入流, 相当于创建了一个文件的字节输入流
InputStream is = resources.getAssets().open(fileName);
BitmapFactory.Options options = new BitmapFactory.Options();
bitmap = BitmapFactory.decodeStream(is, null, options);
if(bitmap==null){
Log.e(TAG,"load image from asset file:"+fileName);
}
} catch (Exception e) {
e.printStackTrace();
}
return bitmap;
}
}