Android Camera2 Opengles2.0 图像实时滤镜 显示 视频编码

demo:
http://download.csdn.net/download/keen_zuxwang/10043183

在博文”Android Camera2 Opengles2.0 预览图像实时滤镜 视频编码”
http://blog.csdn.net/keen_zuxwang/article/details/78366598
的基础上添加FBO实时滤镜、回调显示—其中用到glReadPixels:
glReadPixels实际上是从缓冲区中读取数据,如果使用了双缓冲区,
则默认是从正在显示的缓冲(即前缓冲)中读取,而绘制工作是默认绘制到后缓冲区的。因此,如果需要读取已经绘制好的像素,往往需要先交换前后缓冲
void glReadPixels( GLint x,
GLint y,
GLsizei width,
GLsizei height,
GLenum format,
GLenum type,
GLvoid * data)

vertex
shader, fragment shader部分不变

增加, FBO 操作类:

public class EasyGlUtils {
    EasyGlUtils(){

    }

    public static void defaultTexParameter(){
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,GLES20.GL_CLAMP_TO_EDGE);
    }

    public static void useTexParameter(int gl_wrap_s, int gl_wrap_t, int gl_min_filter, int gl_mag_filter){
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,gl_wrap_s);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,gl_wrap_t);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,gl_min_filter);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,gl_mag_filter);
    }

    //生产纹理、并设置纹理类型、尺寸等参数,调用GLES20.glDrawElements()  GLES20.glDrawArrays()将片元绘制到该设置的纹理上
    public static void genTexturesWithParameter(int size, int[] textures,int start, int gl_format,int width,int height){
        GLES20.glGenTextures(size, textures, start);
        for (int i = 0; i < size; i++) {
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, gl_format, width, height, 0, gl_format, GLES20.GL_UNSIGNED_BYTE, null);
            defaultTexParameter();
        }
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,0);
    }

    public static void generateBindFrameTexture(int[] frameBufferId, int[] renderId, int[] textureId, int width, int height){ 
        //生成fb
        GLES20.glGenFramebuffers(1, frameBufferId, 0);
        GLES20.glGenRenderbuffers(1, renderId, 0);
        genTexturesWithParameter(1, textureId, 0, GLES20.GL_RGBA, width, height);

        //绑定fb
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId[0]);
        GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId[0]);
        GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height);

        //绑定纹理到fb
        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
            GLES20.GL_TEXTURE_2D, textureId[0], 0);
        GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT,
            GLES20.GL_RENDERBUFFER, renderId[0]);
    }

    //绑定Framebuffer Texture2D
    public static void bindFrameTexture(int frameBufferId, int textureId){
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
    }

    public static void unBindFrameBuffer(){
        //GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, 0);
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,0);
    }

}

shader 操作类增加FBO离屏渲染、显示部分:

public  class SurfaceRenderer implements Runnable, SurfaceTexture.OnFrameAvailableListener{
    public static String LOG_TAG = SurfaceRenderer.class.getSimpleName();
    private static float squareCoords[] = {
            -1.0f,  1.0f,  // top left
            -1.0f, -1.0f,  // bottom left
             1.0f, -1.0f,  // bottom right
             1.0f,  1.0f   // top right
    };
    private static short drawOrder[] = { 0, 1, 2, 0, 2, 3};
    // Texture to be shown in backgrund  
    private float textureCoords[] = {
            0.0f, 1.0f, 0.0f, 1.0f,
            0.0f, 0.0f, 0.0f, 1.0f,
            1.0f, 0.0f, 0.0f, 1.0f,
            1.0f, 1.0f, 0.0f, 1.0f 
    };

    private int[] textures = new int[1];
    private Context context;
    private int shaderProgram;
    private FloatBuffer vertexBuffer;
    private FloatBuffer textureBuffer;
    private ShortBuffer drawListBuffer;

    private SurfaceTexture videoTexture;
    private float[] videoTextureTransform;
    private boolean frameAvailable = false;

    int textureParamHandle;
    int textureCoordinateHandle;
    int positionHandle;
    int textureTranformHandle;

    protected  Surface surface;
    protected int width;
    protected int height;

    private EGL10 egl;
    private EGLContext eglContext;
    private EGLDisplay eglDisplay;
    private EGLSurface eglSurface;

    TextureViewMediaActivity instance;

    public boolean running = false;
    private float[] frameMatrix=new float[16]; //用于绘制回调缩放的矩阵

    private boolean isRecord=false;                            
    private boolean isShoot=false;                              
    private ByteBuffer[] outPutBuffer = new ByteBuffer[3]; //用于存储回调数据的buffer
    private OnFrameCallback onFrameCallback;  //回调
    private int frameCallbackWidth, frameCallbackHeight; //回调数据的宽高
    private int indexOutput=0;  

    public interface OnFrameCallback {
        void onFrame(byte[] bytes, long time);
    }

    public void setOnFrameCallback(int width, int height, OnFrameCallback onFrameCallback){
        this.frameCallbackWidth =  width;
        this.frameCallbackHeight = height;
        if (frameCallbackWidth > 0 && frameCallbackHeight > 0) {
            for(int i=0; i<3; i++) {
                outPutBuffer[i] = ByteBuffer.allocate(width*height*4);
            }
            setFrameCallbackMatrix();
            /*
            IntBuffer imp_fmt = null;
            IntBuffer imp_type = null;  
            GLES20.glGetIntegerv(GLES20.GL_IMPLEMENTATION_COLOR_READ_FORMAT, imp_fmt);  
            GLES20.glGetIntegerv(GLES20.GL_IMPLEMENTATION_COLOR_READ_TYPE, imp_type); 
            */
            this.onFrameCallback = onFrameCallback;
            isRecord = true;
        } else {
            this.onFrameCallback = null;
        }
    }

    private void setFrameCallbackMatrix(){
       if(frameCallbackHeight>0 && frameCallbackWidth>0 && width>0 && height>0){
           //计算输出的变换矩阵
           MatrixUtils.getMatrix(frameMatrix, MatrixUtils.TYPE_CENTERCROP, width, height, frameCallbackWidth,frameCallbackHeight);
           MatrixUtils.flip(frameMatrix, false, true);
       }
    }

    //需要回调,则缩放图片到指定大小,读取数据并回调
    private void callbackIfNeeded() {
        if (onFrameCallback != null && (isRecord || isShoot)) {
             //设置绘制窗口,同一般直接绘制到屏幕的原理是一样的,这里只是离屏绘制到Framebuffer绑定纹理上
            GLES20.glViewport(0, 0, frameCallbackWidth, frameCallbackHeight);
            //绑定纹理,纹理输出
            EasyGlUtils.bindFrameTexture(fFrame[0], fTexture[0]);
            //调用GLES20.glDrawElements()  GLES20.glDrawArrays()将片元绘制到该Framebuffer绑定的纹理上
            drawTexture(2); //Y 镜像
            //调用回调显示
            frameCallback();
            //解绑定
            EasyGlUtils.unBindFrameBuffer();
        }
    }

    //读取数据并回调
    private void frameCallback(){
        //OpenGL提供了简洁的函数来操作像素:
        //glReadPixels:读取一些像素。当前可以简单理解为“把已经绘制好的像素(它可能已经被保存到显卡的显存中)读取到内存”。
        //glDrawPixels:绘制一些像素。当前可以简单理解为“把内存中一些数据作为像素数据,进行绘制”。
        //glCopyPixels:复制一些像素。当前可以简单理解为“把已经绘制好的像素从一个位置复制到另一个位置”。
        //虽然从功能上看,好象等价于先读取像素再绘制像素,但实际上它不需要把已经绘制的像素(它可能已经被保存到显卡的显存中)转换为内存数据,然后再由内存数据进行重新的绘制,
        //所以要比先读取后绘制快很多。
        //这三个函数可以完成简单的像素读取、绘制和复制任务,但实际上也可以完成更复杂的任务
        //glReadPixels实际上是从缓冲区中读取数据,如果使用了双缓冲区,
        //则默认是从正在显示的缓冲(即前缓冲)中读取,而绘制工作是默认绘制到后缓冲区的。因此,如果需要读取已经绘制好的像素,往往需要先交换前后缓冲
        /*
        void glReadPixels(  GLint x,
                GLint y,
                GLsizei width,
                GLsizei height,
                GLenum format,
                GLenum type,
                GLvoid * data)
        type和format要匹配上:
        format: GL_RGBA,GL_RGB,GL_ALPHA,GL_LUMINANCE等格式
        GL_UNSIGNED_BYTE,0-255 
        GL_UNSIGNED_SHORT_5_6_5, GL_UNSIGNED_SHORT_4_4_4_4, or GL_UNSIGNED_SHORT_5_5_5_1, 这个每一个通道的范围在0-2n次方的范围内
                            查询匹配的format和type值方法:
        glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_TYPE,&eReadType); 
        glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_FORMAT,&eReadFormat); 
        */
        GLES20.glReadPixels(0, 0, frameCallbackWidth, frameCallbackHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, outPutBuffer[indexOutput]);
        onFrameCallback.onFrame(outPutBuffer[indexOutput].array(),0);
    }

    private int[] fFrame = new int[1];
    private int[] fRender = new int[1];
    private int[] fTexture = new int[1];

    private void deleteFrameBuffer() {
        //GLES20.glDeleteRenderbuffers(1, fRender, 0);
        GLES20.glDeleteFramebuffers(1, fFrame, 0);
        GLES20.glDeleteTextures(1, fTexture, 0);
    }
    public SurfaceRenderer(Context context, Surface surface, int width, int height) {
        Log.e
        ("TAG", "           SurfaceRenderer create       ");
        this.surface = surface;
        this.width = width;
        this.height = height;
        this.running = true;
        this.context = context;

        instance = (TextureViewMediaActivity)context;
        videoTextureTransform = new float[16];

        Thread thread = new Thread(this); // 渲染线程
        thread.start();
    }

    @Override
    public void run() {
        initEGL();
        initGLComponents();

        deleteFrameBuffer(); 
        GLES20.glGenFramebuffers(1, fFrame, 0); //产生Framebuffers
        EasyGlUtils.genTexturesWithParameter(1, fTexture, 0, GLES20.GL_RGBA, width, height);//生成纹理

        Log.d(LOG_TAG, "OpenGL init OK. start draw...");

        while (running) {
            if (draw()) {
                  //EGL交换缓存区,实现双缓存交换并刷新显示缓存(由底层的FramebufferNativeWindow输出--FramebufferNativeWindo是ANativeWindow的继承类,其内部实现了queuebuffer dequeuebuffer等操作)
                  //双缓冲刷新 front buffer 和 back buffer 
                //eglSwapBuffers会去触发queuebuffer,dequeuebuffer, 
                //queuebuffer将画好的buffer(back->front)交给surfaceflinger处理, 
                //dequeuebuffer新创建一个buffer用来画图 
                egl.eglSwapBuffers(eglDisplay, eglSurface); 
            }
        }

        deinitGLComponents();
        deinitEGL();
    }

    private void initEGL() {
        egl = (EGL10)EGLContext.getEGL();
        //
        eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); 
        //version
        int version[] = new int[2];
        egl.eglInitialize(eglDisplay, version); // 初始化显示设备、获取EGL版本号

        EGLConfig eglConfig = chooseEglConfig(); 

        //将Surface转换为本地窗口
        eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, surface, null); // 创建EGLSurface,通过上层传入的Surface surface创建本地EGLSurface(ANativeWindow)

        eglContext = createContext(egl, eglDisplay, eglConfig);

        try {
            if (eglSurface == null || eglSurface == EGL10.EGL_NO_SURFACE) {
                throw new RuntimeException("GL error:" + GLUtils.getEGLErrorString(egl.eglGetError()));
            }
            //将EGLDisplay、EGLSurface和EGLContext进行绑定(渲染上下文绑定到渲染面,指定当前的环境为绘制环境 EGLContext->context)
            ///eglMakeCurrent后生成的surface就可以利用opengl画图了
            if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { // 绑定EGLSurface到本地EGLContext上下文,实现上层opengles的surface到底层egl的eglSurface的全局环境绑定
                throw new RuntimeException("GL Make current Error"+ GLUtils.getEGLErrorString(egl.eglGetError()));
            }
        }catch (Exception e) {
            e.printStackTrace();
        }
    }

    private void deinitEGL() {
        egl.eglMakeCurrent(eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); 
        egl.eglDestroySurface(eglDisplay, eglSurface);
        egl.eglDestroyContext(eglDisplay, eglContext);
        egl.eglTerminate(eglDisplay);
        Log.d(LOG_TAG, "OpenGL deinit OK.");
    }

    //创建EGL环境, EGLContext: OpenGL ES图形上下文,它代表了OpenGL状态机
    private EGLContext createContext(EGL10 egl, EGLDisplay eglDisplay, EGLConfig eglConfig) {
          //EGLContext 属性
        int[] attrs = {
                EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, // opengles 客户版本 2.0 
                EGL10.EGL_NONE
        };
        return egl.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrs); //根据EGLContext属性、EGLConfig配置,创建EGLContext(egl上下文)
    }

    //
    private EGLConfig chooseEglConfig() {
        int[] configsCount = new int[1];
        EGLConfig[] configs = new EGLConfig[1];
        int[] attributes = getAttributes();
        int confSize = 1;

        if (!egl.eglChooseConfig(eglDisplay, attributes, configs, confSize, configsCount)) {  
            throw new IllegalArgumentException("Failed to choose config:"+ GLUtils.getEGLErrorString(egl.eglGetError()));
        }
        else if (configsCount[0] > 0) {
            return configs[0];
        }

        return null;
    }

    //EGLConfig 属性
    private int[] getAttributes()
    {
        return new int[] {
                EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,  //渲染类型 EGL_OPENGL_ES2
                EGL10.EGL_RED_SIZE, 8, // 渲染rgba大小 
                EGL10.EGL_GREEN_SIZE, 8,
                EGL10.EGL_BLUE_SIZE, 8,
                EGL10.EGL_ALPHA_SIZE, 8,
                EGL10.EGL_DEPTH_SIZE, 0, // EGL_DEPTH_SIZE 深度、模板尺寸
                EGL10.EGL_STENCIL_SIZE, 0,
                EGL10.EGL_NONE      
        };
    }

    public void onPause(){
        running = false;
    }

    @Override
    protected  void finalize() throws Throwable {
        super.finalize();
        running = false;
    }

    public  int mColorFlag=0;
    public  int xyFlag=0;
    public  int   mRatio;
    public  float ratio=0.5f;
    public  int textureHandle;
    public  int textureIdOne;
    private int gHWidth;
    private int gHHeight;
    private float[] matrix=new float[16];
    private float[] matrix0=new float[16];
    private float[] mModelMatrix=new float[16];
    private float[] mModelMatrix0=new float[16];

    //镜像
    public  float[] flip(float[] m,boolean x,boolean y){
        if(x||y){
            Matrix.scaleM(m,0,x?-1:1,y?-1:1,1);
        }
        return m;
    }

    public void setSize(){
        Matrix.setIdentityM(mModelMatrix,0);
          Matrix.setIdentityM(mModelMatrix0,0);

        matrix = flip(mModelMatrix, true, false);
        matrix0 = flip(mModelMatrix0, false, true);
    }

    private void setupGraphics()
    {
        final String vertexShader = HelpUtils.readTextFileFromRawResource(context, R.raw.vetext_sharder);
        final String fragmentShader = HelpUtils.readTextFileFromRawResource(context, R.raw.fragment_sharder);

        final int vertexShaderHandle = HelpUtils.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
        final int fragmentShaderHandle = HelpUtils.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
        shaderProgram = HelpUtils.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle,
                new String[]{"texture","vPosition","vTexCoordinate","textureTransform"});

        GLES20.glUseProgram(shaderProgram);
        textureParamHandle = GLES20.glGetUniformLocation(shaderProgram, "texture"); // 摄像头图像外部扩展纹理
        textureCoordinateHandle = GLES20.glGetAttribLocation(shaderProgram, "vTexCoordinate"); // 顶点纹理坐标
        positionHandle = GLES20.glGetAttribLocation(shaderProgram, "vPosition"); // 顶点坐标
        textureTranformHandle = GLES20.glGetUniformLocation(shaderProgram, "textureTransform");

        textureHandle = GLES20.glGetUniformLocation(shaderProgram, "texture0"); // 获得贴图对应的纹理采样器句柄(索引)
        mRatio = GLES20.glGetUniformLocation(shaderProgram, "mratio"); // 融合因子

        gHWidth=GLES20.glGetUniformLocation(shaderProgram,"mWidth"); // 视窗宽、高
        gHHeight=GLES20.glGetUniformLocation(shaderProgram,"mHeight");

        GLES20.glUniform1i(gHWidth,width);
        GLES20.glUniform1i(gHHeight,height);

        setSize();
    }

    private void setupVertexBuffer()
    {
        // Draw list buffer
        ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder. length * 2);
        dlb.order(ByteOrder.nativeOrder()); //转换成本地字节序
        drawListBuffer = dlb.asShortBuffer();
        drawListBuffer.put(drawOrder);
        drawListBuffer.position(0);

        // Initialize the texture holder
        ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
        bb.order(ByteOrder.nativeOrder()); //转换成本地字节序

        vertexBuffer = bb.asFloatBuffer();
        vertexBuffer.put(squareCoords);
        vertexBuffer.position(0);
    }

    private void setupTexture()
    {
        ByteBuffer texturebb = ByteBuffer.allocateDirect(textureCoords.length * 4);
        texturebb.order(ByteOrder.nativeOrder());  // 转换成本地字节序
        textureBuffer = texturebb.asFloatBuffer();
        textureBuffer.put(textureCoords);
        textureBuffer.position(0);

        // Generate the actual texture
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // 激活(使能)相应的纹理单元
        GLES20.glGenTextures(1, textures, 0); // 产生纹理id
        checkGlError("Texture generate");

        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]); //通过纹理id,绑定到相应的纹理单元,纹理单元内存放的类型可以很多种,比如GLES20.GL_TEXTURE_1D、GLES20.GL_TEXTURE_2D、GLES20.GL_TEXTURE_3D、GLES11Ext.GL_TEXTURE_EXTERNAL_OES等

        checkGlError("Texture bind");

        videoTexture = new SurfaceTexture(textures[0]); // 通过创建的纹理id,生成SurfaceTexture
        videoTexture.setOnFrameAvailableListener(this);
    }

    public int initTexture(int drawableId)
      {
            //生成纹理ID
            int[] textures = new int[1];
            GLES20.glGenTextures
           (
                  1,          //产生的纹理id的数量
                textures,   //纹理id的数组
                0           //偏移量
            );    
            int textureId = textures[0];    
            Log.i(LOG_TAG, " initTexture textureId = " + textureId);

            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST); // 纹素放大、缩小设置GL_LINEAR对应线性滤波,GL_NEAREST对应最近邻滤波方式
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,GLES20.GL_CLAMP_TO_EDGE); // 纹理边界处理,当纹理坐标超出[0,1]的范围时该怎么处理,GL_CLAMP_TO_EDGE --- 纹理坐标会被截断到[0,1]之间。坐标值大的被截断到纹理的边缘部分,形成了一个拉伸的边缘
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,GLES20.GL_CLAMP_TO_EDGE);

        //加载图片
        InputStream is = context.getResources().openRawResource(drawableId);
        Bitmap bitmapTmp;
        try {
            bitmapTmp = BitmapFactory.decodeStream(is);
        } finally {
            try {
                is.close();
            } 
            catch(IOException e) {
                e.printStackTrace();
            }
        }
        //载纹理
        GLUtils.texImage2D
        (
                GLES20.GL_TEXTURE_2D,   //纹理类型,在OpenGL ES中必须为GL10.GL_TEXTURE_2D
                0,                    //纹理的层次,0表示基本图像层,直接贴图
                bitmapTmp,    //纹理图像
                0                       //纹理边框尺寸
        );
        bitmapTmp.recycle();          //纹理加载成功后释放图片 
        return textureId;
    }
    protected boolean draw()
    {
        synchronized (this){
            if (frameAvailable) {
                videoTexture.updateTexImage(); // 更新SurfaceTexture纹理图像信息,然后绑定的GLES11Ext.GL_TEXTURE_EXTERNAL_OES纹理才能渲染
                videoTexture.getTransformMatrix(videoTextureTransform); // 获取SurfaceTexture纹理变换矩
                frameAvailable = false;
            }
            else{
                return false;
            }
        }
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);  //设置清除颜色
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
        //GL_COLOR_BUFFER_BIT 设置窗口颜色
        //GL_DEPTH_BUFFER_BIT 设置深度缓存--把所有像素的深度值设置为最大值(一般为远裁剪面)
        GLES20.glViewport(0, 0, width, height);
        drawTexture();

        callbackIfNeeded(); //离屏渲染、回调显示
        return true;
    }

    private void drawTexture() {
        // Draw texture
          int mHProjMatrix=GLES20.glGetUniformLocation(shaderProgram,"uProjMatrix");
        GLES20.glUniformMatrix4fv(mHProjMatrix,1,false,matrix,0);

        int mHProjMatrix0=GLES20.glGetUniformLocation(shaderProgram,"uProjMatrix0");
        GLES20.glUniformMatrix4fv(mHProjMatrix0,1,false,matrix0,0);

        int mXyFlag = GLES20.glGetUniformLocation(shaderProgram, "xyFlag"); //镜像类型: x镜像,y镜像---通过不同的变化矩阵与顶点位置向量进行左乘,如:uProjMatrix*vPosition;
        GLES20.glUniform1i(mXyFlag, xyFlag);

        int mColorFlagHandle = GLES20.glGetUniformLocation(shaderProgram, "colorFlag"); // 纹理操作类型(滤镜处理):饱和度/灰度/冷暖色/放大镜/模糊/美颜/纹理融合
        GLES20.glUniform1i(mColorFlagHandle, mColorFlag);

        //顶点属性一般包括位置、颜色、法线、纹理坐标属性
        GLES20.glEnableVertexAttribArray(positionHandle); // 使能相应的顶点位置属性的顶点属性数组
        GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer); // 指定(绑定)该相应的顶点位置属性的顶点属性数组

        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]); // 摄像头图像纹理
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glUniform1i(textureParamHandle, 0);

        GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIdOne); // 贴图的图像纹理
        GLES20.glUniform1i(textureHandle, 1);      

        GLES20.glEnableVertexAttribArray(textureCoordinateHandle);
        GLES20.glVertexAttribPointer(textureCoordinateHandle, 4, GLES20.GL_FLOAT, false, 0, textureBuffer);

        GLES20.glUniformMatrix4fv(textureTranformHandle, 1, false, videoTextureTransform, 0); // GL_TEXTURE_EXTERNAL_OES纹理的变化矩
        GLES20.glUniform1f(mRatio, ratio); // 纹理融合因子

        GLES20.glDrawElements(GLES20.GL_TRIANGLE_STRIP, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer); // 根据顶点位置索引进行绘制片元
        GLES20.glDisableVertexAttribArray(positionHandle);
        GLES20.glDisableVertexAttribArray(textureCoordinateHandle);
    }

    protected void initGLComponents() {
        setupVertexBuffer();
        setupTexture();
        setupGraphics();
        textureIdOne = initTexture(R.drawable.bg);

        Message message = new Message();   
          message.what = 1;     
          instance.myHandler.sendMessage(message);
    }

    protected void deinitGLComponents() {
        GLES20.glDeleteTextures(1, textures, 0);
        GLES20.glDeleteProgram(shaderProgram);
        videoTexture.release();
        videoTexture.setOnFrameAvailableListener(null);
    }

    public void checkGlError(String op) {
        int error;
        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
            Log.e("SurfaceTest", op + ": glError " + GLUtils.getEGLErrorString(error));
        }
    }

    public SurfaceTexture getVideoTexture() {
        return videoTexture;
    }

    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        synchronized (this){
            frameAvailable = true;
        }
    }
}

设置camera2 预览、mediacodec/medianuxer 视频编码设置

package com.vr.jarry.playvideo_texuture;

import android.Manifest;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.media.MediaPlayer;
import android.net.Uri;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.support.v4.app.ActivityCompat;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.TextureView;
import android.view.View;
import android.view.Window;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.SeekBar;
import android.widget.Toast;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;

import com.vr.jarry.playvideo_texuture.SurfaceRenderer.OnFrameCallback;

public class TextureViewMediaActivity extends Activity implements OnFrameCallback, TextureView.SurfaceTextureListener{
    private static final String TAG = "GLViewMediaActivity";
    private boolean clickFlag = false;
    public static final String videoPath = Environment.getExternalStorageDirectory()+"/live.mp4";

    private SurfaceRenderer videoRenderer;
    private Button btn_shutter, btn_mirror, btn_color;
    ImageView imagView;

    Surface mEncoderSurface;
    BufferedOutputStream outputStream;
    private MediaCodec mCodec, mDecodec;
    boolean isEncode = false;
    private MediaMuxer mMuxer;
    TextureView mPreviewView;
    CameraCaptureSession mSession;
    CaptureRequest.Builder mPreviewBuilder;
    public CameraDevice mCameraDevice;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        requestWindowFeature(Window.FEATURE_NO_TITLE);
        setContentView(R.layout.activity_main_0);

        mPreviewView = (TextureView) findViewById(R.id.id_textureview);
        mPreviewView.setSurfaceTextureListener(this);

        imagView = (ImageView) findViewById(R.id.id_textureview0);

        SeekBar seekBar = (SeekBar) findViewById(R.id.id_seekBar);
        seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
            @Override
            public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
                // TODO Auto-generated method stub
                if(videoRenderer != null) {
                    videoRenderer.ratio = progress/100.0f;
                }
            }

            @Override
            public void onStartTrackingTouch(SeekBar seekBar) {
                // TODO Auto-generated method stub

            }

            @Override
            public void onStopTrackingTouch(SeekBar seekBar) {
                // TODO Auto-generated method stub

            }
        });

        btn_color = (Button) findViewById(R.id.btn_color);
        btn_shutter = (Button) findViewById(R.id.btn_shutter);
        btn_mirror = (Button) findViewById(R.id.btn_mirror);
        Button btn_play = (Button) findViewById(R.id.btn_play);

        btn_play.setOnClickListener(new OnClickListener() {
            @Override
            public void onClick(View v) {
                File f = new File(mOutputPath); 
                if(f.exists() && mVideoTrack==-1){
                   Log.e(TAG, "       play video     ");
                   Intent intent = new Intent(Intent.ACTION_VIEW);
                   //intent.setDataAndType(Uri.parse(mOutputPath), "video/mp4");
                   intent.setDataAndType(Uri.parse(Environment.getExternalStorageDirectory().getAbsolutePath()+"/mcodecmux26.mp4"), "video/mp4");
                   startActivity(intent);
                }else {
                   Log.e(TAG, "       can not play video     ");
                   if(!f.exists()) {
                       Toast.makeText(TextureViewMediaActivity.this, "Video file not exists!", Toast.LENGTH_SHORT).show();
                   }else {
                       if(mVideoTrack != -1) {  
                          Toast.makeText(TextureViewMediaActivity.this, "Video record not stop!", Toast.LENGTH_SHORT).show();
                       }
                   }
                }
            }
        });

        btn_shutter.setOnClickListener(new OnClickListener() {
            @Override
            public void onClick(View v) {
                // TODO Auto-generated method stub
                clickFlag = !clickFlag;
                if(clickFlag) {
                    if(cameraFlag) {
                        Toast.makeText(TextureViewMediaActivity.this, "Start Record!", Toast.LENGTH_SHORT).show();
                        btn_shutter.setText("Stop");

                        try {
                            cameraManager.openCamera(CameraIdList[0], mCameraDeviceStateCallback, null);
                        } catch (CameraAccessException e) {
                            // TODO Auto-generated catch block
                            e.printStackTrace();
                        }

                        startCodec();
                    }else {
                        Toast.makeText(TextureViewMediaActivity.this, "No camera permission!", Toast.LENGTH_SHORT).show();
                    }
                }else {

                    btn_shutter.setText("Start");

                    videoRenderer.running = false;
                    try {
                        videoRenderer.join();
                        Log.e(TAG, "       videoRenderer stop     ");
                    } catch (InterruptedException e) {
                         // TODO Auto-generated catch block
                         e.printStackTrace();
                    }

                    if (mCameraDevice != null) {
                        mCameraDevice.close();
                        mCameraDevice = null;
                    }

                    stopCodec();

                    Toast.makeText(TextureViewMediaActivity.this, "Stop Record!", Toast.LENGTH_SHORT).show();
                    /*
                    try {
                        mSession.stopRepeating();
                        mPreviewBuilder.removeTarget(surface);
                        mPreviewBuilder.removeTarget(surface0);
                        surface.release();
                        surface0.release();
                        surface  = null;
                        surface0 = null;        
                        mSession.close();
                        Log.e(TAG, "       mSession stop     ");
                    } catch (CameraAccessException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                    */
                }
            }
        });

        btn_color.setOnClickListener(new OnClickListener() {
            @Override
            public void onClick(View v) {
                // TODO Auto-generated method stub
                if(videoRenderer != null) {
                    if(videoRenderer.mColorFlag == 0) {
                        videoRenderer.mColorFlag = 7;
                        Toast.makeText(TextureViewMediaActivity.this, "Saturation adjust!", Toast.LENGTH_SHORT).show();
                    }else if(videoRenderer.mColorFlag == 7) {
                        videoRenderer.mColorFlag = 1;
                        Toast.makeText(TextureViewMediaActivity.this, "Gray Color!", Toast.LENGTH_SHORT).show();
                    }else if(videoRenderer.mColorFlag == 1) {
                        videoRenderer.mColorFlag = 2;
                        Toast.makeText(TextureViewMediaActivity.this, "Warm Color!", Toast.LENGTH_SHORT).show();
                    }else if(videoRenderer.mColorFlag == 2){
                        videoRenderer.mColorFlag = 3;
                        Toast.makeText(TextureViewMediaActivity.this, "Cool Color!", Toast.LENGTH_SHORT).show();
                    }else if(videoRenderer.mColorFlag == 3){
                        videoRenderer.mColorFlag = 4;
                        Toast.makeText(TextureViewMediaActivity.this, "Amplify!", Toast.LENGTH_SHORT).show();
                    }else if(videoRenderer.mColorFlag == 4){
                        videoRenderer.mColorFlag = 5;
                        Toast.makeText(TextureViewMediaActivity.this, "Vague!", Toast.LENGTH_SHORT).show();
                    }else if(videoRenderer.mColorFlag == 5){
                        videoRenderer.mColorFlag = 6;
                        Toast.makeText(TextureViewMediaActivity.this, "Beauty!", Toast.LENGTH_SHORT).show();
                    }else if(videoRenderer.mColorFlag ==6){
                        videoRenderer.mColorFlag = 0;
                        Toast.makeText(TextureViewMediaActivity.this, "Orignal Color!", Toast.LENGTH_SHORT).show();
                    }
                }
            }
        });

        btn_mirror.setOnClickListener(new OnClickListener() {
            @Override
            public void onClick(View v) {
                // TODO Auto-generated method stub
                if(videoRenderer != null) {
                    if(videoRenderer.xyFlag == 0) {
                        Toast.makeText(TextureViewMediaActivity.this, "X Mirror!", Toast.LENGTH_SHORT).show();
                        videoRenderer.xyFlag = 1;
                    }else if(videoRenderer.xyFlag == 1){
                        videoRenderer.xyFlag = 2;
                        Toast.makeText(TextureViewMediaActivity.this, "Y Mirror!", Toast.LENGTH_SHORT).show();
                    }else if(videoRenderer.xyFlag == 2) {
                        videoRenderer.xyFlag = 0;
                        Toast.makeText(TextureViewMediaActivity.this, "Normal!", Toast.LENGTH_SHORT).show();
                    }
                }
            }
        });
    }

    public Handler myHandler = new Handler() {  
        public void handleMessage(Message msg) {   
             switch (msg.what) {   
                  case 1:
                      Log.d(TAG,"  videoTexture created!   ");
                      try {
                        startPreview(mCameraDevice);
                      } catch (CameraAccessException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                      }
                      break; 
                  case 2:
                      Log.d(TAG,"  vidoe  play!   ");
                      Intent intent = new Intent(Intent.ACTION_VIEW);
                      intent.setDataAndType(Uri.parse(mOutputPath), "video/mp4");
                      //intent.setDataAndType(Uri.parse(Environment.getExternalStorageDirectory().getAbsolutePath()+"/mcodecmux26.mp4"), "video/mp4");
                      startActivity(intent);
                      break; 
                  default :
                      break;
             }   
             super.handleMessage(msg);   
        }   
    }; 

    Bitmap bitmap=Bitmap.createBitmap(640, 480, Bitmap.Config.ARGB_8888);

    @Override
    public void onFrame(byte[] bytes, long time) {
        // TODO Auto-generated method stub
        ByteBuffer b=ByteBuffer.wrap(bytes);
        bitmap.copyPixelsFromBuffer(b);
        //imagView.setImageBitmap(bitmap);

        runOnUiThread(new Runnable() {
            @Override
            public void run() {
                imagView.setImageBitmap(bitmap);
            }
        });
    }

    protected String getSD(){
        return Environment.getExternalStorageDirectory().getAbsolutePath();
    }

    //图片保存
    public void saveBitmap(Bitmap b){
        String path =  getSD()+ "/photo/";
        File folder=new File(path);
        if(!folder.exists()&&!folder.mkdirs()){
            runOnUiThread(new Runnable() {
                @Override
                public void run() {
                    Toast.makeText(TextureViewMediaActivity.this, "can not save!", Toast.LENGTH_SHORT).show();
                }
            });
            return;
        }
        long dataTake = System.currentTimeMillis();
        final String jpegName=path+ dataTake +".jpg";
        try {
            FileOutputStream fout = new FileOutputStream(jpegName);
            BufferedOutputStream bos = new BufferedOutputStream(fout);
            b.compress(Bitmap.CompressFormat.JPEG, 100, bos);
            bos.flush();
            bos.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        runOnUiThread(new Runnable() {
            @Override
            public void run() {
                Toast.makeText(TextureViewMediaActivity.this, "save->"+jpegName, Toast.LENGTH_SHORT).show();
            }
        });

    }

    private String path = Environment.getExternalStorageDirectory() + "/mcodecv26.264";
    private String mOutputPath = Environment.getExternalStorageDirectory() + "/mcodecmux26.mp4";

    public void startCodec() {
        for(int i=0; i<2; i++) {
            File f = null;
            if(i == 0) {
                f = new File(path); 
            }else if(i == 1) {
                f = new File(mOutputPath);  
            }
            if(!f.exists()){    
                try {
                    f.createNewFile();
                    Log.e(TAG, "       create a file     ");
                } catch (IOException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }else{
                if(f.delete()){
                    try {
                        f.createNewFile();
                        Log.e(TAG, "      delete and create a file    ");
                    } catch (IOException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                }
            }
            if(i == 0) {
                try {
                    outputStream = new BufferedOutputStream(new FileOutputStream(f));
                    Log.i(TAG, "outputStream initialized");
                } catch (Exception e){ 
                     e.printStackTrace();
                }
            }
        }
        try {
            mCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
            //mCodec = MediaCodec.createByCodecName(codecInfo.getName());
        } catch (IOException e) {
            e.printStackTrace();
        }
        MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, mPreviewView.getWidth(), mPreviewView.getHeight());

        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 500000);//500kbps  
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);  
        //mediaFormat.setInteger("bitrate-mode", 2);
        //ediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); //COLOR_FormatSurface
        //mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, 
        //      MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);  //COLOR_FormatYUV420Planar
        //mediaFormat.setInteger(MediaFormat.KEY_ROTATION, 90);
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); 
        mCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 
        mEncoderSurface = mCodec.createInputSurface(); //用于使出话

        mCodec.setCallback(new EncoderCallback());
        mCodec.start();

        try {
            mMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }   

        videoRenderer = new SurfaceRenderer(TextureViewMediaActivity.this, mEncoderSurface,
                mPreviewView.getWidth(), mPreviewView.getHeight()); 
        //回调显示接口设置
        videoRenderer.setOnFrameCallback(mPreviewView.getWidth(), mPreviewView.getHeight(), TextureViewMediaActivity.this);
        videoRenderer.start();
        videoRenderer.mColorFlag = 4;
    }

    public void stopCodec() {
        try {
            new Thread() {
               @Override 
               public void run(){
                   //mCodec.setCallback(null);
                   while(mCodec != null) {
                     if(!endFlag) {
                       Log.e(TAG, "       stopCodec start     ");
                       //mCodec.flush();
                       //mCodec.setCallback(null);
                       mCodec.stop();
                       mCodec.release();
                       mCodec = null;

                       mMuxer.stop();
                       mMuxer.release();
                       mMuxer=null;

                       mVideoTrack=-1;
                       Log.i(TAG, "     stopCodec end    ");
                     }
                  }
                  //Message message = new Message();   
                  //message.what = 2;     
                  //myHandler.sendMessage(message); 
               }
            }.start();
        } catch (Exception e) {
            e.printStackTrace();
            mCodec = null;
        }
    }

    int mCount = 0;
    private int mVideoTrack=-1;
    private boolean isMuxStarted=false;  
    boolean endFlag = false; 
    private class EncoderCallback extends MediaCodec.Callback{
        @Override
        public void onInputBufferAvailable(MediaCodec codec, int index) {
           //  
        }

        @Override
        public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) {
            endFlag = true;
            ByteBuffer outPutByteBuffer = mCodec.getOutputBuffer(index);
            byte[] outDate = new byte[info.size];
            outPutByteBuffer.get(outDate);

            try {
                //Log.d(TAG, " outDate.length : " + outDate.length);
                outputStream.write(outDate, 0, outDate.length);
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } 
            if(isMuxStarted && info.size>0 && info.presentationTimeUs>0){
                mMuxer.writeSampleData(mVideoTrack, outPutByteBuffer, info);
            }
            mCodec.releaseOutputBuffer(index, false); 

            if(info.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
                Log.d(TAG, "CameraRecorder get video encode end of stream");
            }
            endFlag = false;
        }

        @Override
        public void onError(MediaCodec codec, MediaCodec.CodecException e) {
            Log.d(TAG, "Error: " + e);
        }

        @Override
        public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
            Log.d(TAG, "encoder output format changed: " + format);
            Log.d(TAG, "encoder output format changed: " + format);
            mVideoTrack=mMuxer.addTrack(codec.getOutputFormat());
            mMuxer.start();
            isMuxStarted=true;
        }
    }

    CameraManager cameraManager;
    String[] CameraIdList;
    boolean cameraFlag = false;
    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
        cameraManager = (CameraManager) getSystemService(CAMERA_SERVICE);
        try {
            Log.i(TAG, "onSurfaceTextureAvailable:  width = " + width + ", height = " + height);
            CameraIdList = cameraManager.getCameraIdList(); 
            CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(CameraIdList[0]);
            characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
            if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
                cameraFlag = false;
                return;
            }else {
                cameraFlag = true;
            }
            //cameraManager.openCamera(CameraIdList[0], mCameraDeviceStateCallback, null);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    @Override
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {}

    @Override
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
        return false;
    }

    @Override
    public void onSurfaceTextureUpdated(SurfaceTexture surface) {}


    private CameraDevice.StateCallback mCameraDeviceStateCallback = new CameraDevice.StateCallback() {
        @Override
        public void onOpened(CameraDevice camera) {
            Log.i(TAG, "       CameraDevice.StateCallback  onOpened            ");
            mCameraDevice = camera;
            //startPreview(camera);
        }

        @Override
        public void onDisconnected(CameraDevice camera) {
            if (null != mCameraDevice) {
                mCameraDevice.close();
                mCameraDevice = null;
            }
        }

        @Override
        public void onError(CameraDevice camera, int error) {}
    };

    Surface surface;
    Surface surface0;

    private void startPreview(CameraDevice camera) throws CameraAccessException {
        //f = new File(path);//Environment.getExternalStorageDirectory(), "camera2mediacodecv.264");
        SurfaceTexture texture = mPreviewView.getSurfaceTexture();
        texture.setDefaultBufferSize(mPreviewView.getWidth(), mPreviewView.getHeight());
        surface = new Surface(texture);

        surface0 = new Surface(videoRenderer.getVideoTexture());

        Log.i(TAG, "      startPreview          ");
        try {
            mPreviewBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); //CameraDevice.TEMPLATE_STILL_CAPTURE
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
        mPreviewBuilder.addTarget(surface);
        mPreviewBuilder.addTarget(surface0);

        camera.createCaptureSession(Arrays.asList(surface, surface0), mSessionStateCallback, null);
    }

    private CameraCaptureSession.StateCallback mSessionStateCallback = new CameraCaptureSession.StateCallback() {
        @Override
        public void onConfigured(CameraCaptureSession session) {
            try {
                Log.i(TAG, "      onConfigured          ");
                //session.capture(mPreviewBuilder.build(), mSessionCaptureCallback, mHandler);
                mSession = session;
                mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                mPreviewBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);

                session.setRepeatingRequest(mPreviewBuilder.build(), null, null); //null
            } catch (CameraAccessException e) {
                e.printStackTrace();
            }
        }

        @Override
        public void onConfigureFailed(CameraCaptureSession session) {}
    };

    int callback_time;
    private CameraCaptureSession.CaptureCallback mSessionCaptureCallback =new CameraCaptureSession.CaptureCallback() {
         @Override
         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
             //Toast.makeText(TextureViewMediaActivity.this, "picture success閿涳拷", Toast.LENGTH_SHORT).show();
             callback_time++;
             Log.i(TAG, "    CaptureCallback =  "+callback_time);
         }

         @Override
         public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult) {
             Toast.makeText(TextureViewMediaActivity.this, "picture failed閿涳拷", Toast.LENGTH_SHORT).show();
         }
    };

    @Override
    protected void onResume() {
        super.onResume();
        Log.v(TAG, "GLViewMediaActivity::onResume()");
    }


    @Override protected void onStart(){
        Log.v(TAG, "GLViewMediaActivity::onStart()");
        super.onStart();
    }

    @Override
    protected void onPause() {
        Log.v(TAG, "GLViewMediaActivity::onPause()");
        super.onPause();
    }

    @Override 
    protected void onStop(){
        Log.v(TAG, "GLViewMediaActivity::onStop()");
        super.onStop();
    }

    @Override 
    protected void onDestroy(){
        Log.v(TAG, "GLViewMediaActivity::onDestroy()");
        super.onDestroy();
        if(mCameraDevice != null) {
            mCameraDevice.close();
            mCameraDevice = null;
        }
    }

}

demo 效果图:
Android Camera2 Opengles2.0 图像实时滤镜 显示 视频编码_第1张图片

Android Camera2 Opengles2.0 图像实时滤镜 显示 视频编码_第2张图片

Android Camera2 Opengles2.0 图像实时滤镜 显示 视频编码_第3张图片

Android Camera2 Opengles2.0 图像实时滤镜 显示 视频编码_第4张图片

你可能感兴趣的:(android,opengl,es)