Android OpenGL ES从入门到进阶(四)—— OpenGL ES 2.0+GLSurfaceView预览相机

源码链接:https://github.com/smzhldr/AGLFramework

一、综述

安卓中的相机预览方式可以有好几种,比如SurfaceView,TextureView,GLSurfaceView等都可以预览相机。如果在预览中要改变预览效果,比如说美颜,磨皮的话使用GLSurfaceView+OpenGL ES预览就非常方便,由于5.0以上不支持Camera2,所以大多数相机类App都还使用的Camera的API,本篇文章就以Camera为例学习。

二、预览步骤

1.打开相机,创建SurfaceTexture接收摄像头拍摄的数据(打开相机记得申请摄像头权限 )

public void openCamera(){
	Camera camera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
	//SurfaceTexture用于接收相机返回的预览数据,需要一个Texture来填充
	int textureId = createCameraTexture();
	SurfaceTexture surfaceTexture = new SurfaceTexture(textureId);     	
	camera.setPreviewTexture(surfaceTexture);
	camera.startPreview();
}

2.创建OpenGL ES扩展纹理
安卓相机的预览需要使用OpenGL ES扩展纹理,为了方便理解,纹理可以理解为类似于图片或者画布一类的东西,填充在SurfaceTexture中用于接收相机回调数据,创建纹理的方法暂且可以认为是固定如下的。

//注意相机预览需要 GL_TEXTURE_EXTERNAL_OES 类型
private int createCameraTexture() {
    int[] texture = new int[1];
    GLES20.glGenTextures(1, texture, 0);
  	GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,texture[0]);
    GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
    GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
	return texture[0];
}

三、将相机预览数据显示到屏幕上

前面我们说了,SurfaceTexture用于接收摄像头预览数据,也就是说相机预览的数据会时时传到SurfaceTexture中,在这里,我们需要手动调用surfaceTexture.updateTexImage()去主动刷新SurfaceTexture上的数据。

surfaceTexture.updateTexImage();

那怎么获取SurfaceTexture上的数据的,我们前面创建SurfaceTexture时传入的Texture就具有预览的画面信息,这样问题就变成了一个拥有数据的texture,我们要将其绘制到屏幕上,这就简单了,基本上与Android OpenGL ES从入门到精通(二)—— OpenGL ES 2.0渲染纹理到屏幕中讲述的内容一样,不熟悉的可以回顾一下,当然下面我们会给出完整demo,为了能比较直观的表达出整体的逻辑,整个功能都写在一个activity中,想用的粘过去(申请拍照权限)直接就能用,若是没有OpengGLUUtils这样的工具类,可以参考上一篇Android OpenGL ES从入门到精通(三)—— OpenGL ES 2.0基础知识入门篇(Hello Triangle)中对shader进行编译链接。如有疑问欢迎留言讨论。

以下是demo源码,拿去随便用:

public class CameraActivity extends Activity {

    GLSurfaceView glSurfaceView;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        glSurfaceView = new GLSurfaceView(this);
        glSurfaceView.setEGLContextClientVersion(2);
        glSurfaceView.setRenderer(new CameraRenderer());
        glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
    }

    @Override
    protected void onResume() {
        super.onResume();
        glSurfaceView.onResume();
    }


    @Override
    protected void onPause() {
        super.onPause();
        glSurfaceView.onPause();
    }


    private class CameraRenderer implements GLSurfaceView.Renderer {

        private final String vertexShaderCode =
                "attribute vec4 vPosition;" +
                        "uniform mat4 u_Matrix;" +
                        "attribute vec2 atextureCoordinate;" +
                        "varying vec2 aCoordinate;" +
                        "void main() {" +
                        "  gl_Position = u_Matrix * vPosition;" +
                        "  aCoordinate = atextureCoordinate;" +
                        "}";

        private final String fragmentShaderCode =
                "#extension GL_OES_EGL_image_external : require\n" +
                        "precision mediump float;" +
                        "uniform samplerExternalOES uTexture;" +
                        "varying vec2 aCoordinate;" +
                        "void main() {" +
                        "gl_FragColor = texture2D(uTexture,aCoordinate);" +
                        "}";

        float[] cube = {
                -1.0f, -1.0f,
                1.0f, -1.0f,
                -1.0f, 1.0f,
                1.0f, 1.0f,
        };

        float[] textureCoord = {
                0.0f, 0.0f,
                1.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 1.0f,
        };

        private FloatBuffer vertexBuffer, textureBuffer;

        private int program;
        private int glPosition;
        private int glTextCoordinate;
        private int glTexture;
        private int glMatrix;

        private SurfaceTexture surfaceTexture;
        private int textureId;

        CameraRenderer() {
            vertexBuffer = ByteBuffer.allocateDirect(cube.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
            vertexBuffer.put(cube);
            vertexBuffer.position(0);

            textureBuffer = ByteBuffer.allocateDirect(textureCoord.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
            textureBuffer.put(textureCoord);
            textureBuffer.position(0);
        }

        @Override
        public void onSurfaceCreated(GL10 gl, EGLConfig config) {
            Camera camera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
            textureId = createCameraTexture();
            surfaceTexture = new SurfaceTexture(textureId);
            surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
                @Override
                public void onFrameAvailable(SurfaceTexture surfaceTexture) {
                    glSurfaceView.requestRender();
                }
            });
            try {
                camera.setPreviewTexture(surfaceTexture);
                camera.startPreview();
            } catch (IOException e) {
                e.printStackTrace();
            }

            program = OpenGLUtils.createGlProgram(vertexShaderCode, fragmentShaderCode);
            glPosition = GLES20.glGetAttribLocation(program, "vPosition");
            glTextCoordinate = GLES20.glGetAttribLocation(program, "atextureCoordinate");
            glMatrix = glGetUniformLocation(program, "u_Matrix");
            glTexture = GLES20.glGetUniformLocation(program, "uTexture");
        }

        @Override
        public void onSurfaceChanged(GL10 gl, int width, int height) {
            GLES20.glViewport(0, 0, width, height);
        }

        @Override
        public void onDrawFrame(GL10 gl) {
            GLES20.glClearColor(0, 0, 0, 0);
            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
            GLES20.glUseProgram(program);

            float[] matrix = new float[16];
            surfaceTexture.getTransformMatrix(matrix);

            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
            glUniform1i(glTexture, 0);
            GLES20.glUniformMatrix4fv(glMatrix, 1, false, matrix, 0);

            vertexBuffer.clear();
            vertexBuffer.put(cube).position(0);
            GLES20.glEnableVertexAttribArray(glPosition);
            GLES20.glVertexAttribPointer(glPosition, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer);

            textureBuffer.clear();
            textureBuffer.put(textureCoord).position(0);
            GLES20.glEnableVertexAttribArray(glTextCoordinate);
            GLES20.glVertexAttribPointer(glTextCoordinate, 2, GLES20.GL_FLOAT, false, 0, textureBuffer);

            GLES20.glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

            GLES20.glDisableVertexAttribArray(glPosition);
            GLES20.glDisableVertexAttribArray(glTextCoordinate);
            surfaceTexture.updateTexImage();
        }

        private int createCameraTexture() {
            int[] texture = new int[1];
            GLES20.glGenTextures(1, texture, 0);
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
            GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                    GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
            GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                    GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                    GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                    GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
            return texture[0];
        }
    }
}

你可能感兴趣的:(OpenGL_ES,Android)