Android OpenGL开发实践 - GLSurfaceView对YUV格式数据的处理

一、创建OpenGL ES 环境

在清单中声明OpenGL ES

为了使您的应用程序能够使用OpenGL ES 2.0 API,您必须在清单中添加以下声明:


构建GLSurfaceView对象
class MyGLSurfaceView(context: Context, attributeSet: AttributeSet?) : GLSurfaceView(context, attributeSet) {
    companion object {
        private const val TAG = "MyGLSurfaceView"
    }

    constructor(context: Context) : this(context, null)

    private val renderer: MyGLRenderer

    init {

        // Create an OpenGL ES 2.0 context
        setEGLContextClientVersion(2)

        renderer = MyGLRenderer()

        // Set the Renderer for drawing on the GLSurfaceView
        setRenderer(renderer)

        // Render the view only when there is a change in the drawing data
        renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
    }
    
    /**
     * 设置显示方向
     * @param degrees 显示旋转角度(逆时针),有效值是(0, 90, 180, and 270.)
     */
    fun setDisplayOrientation(degrees: Int) {
        renderer.setDisplayOrientation(degrees)
    }
    
    /**
     * 设置渲染的YUV数据的宽高
     * @param width 宽度
     * @param height 高度
     */
    fun setYuvDataSize(width: Int, height: Int) {
        Log.d(TAG, "setYuvDataSize $width * $height")
        renderer.setYuvDataSize(width, height)
    }

    /**
     * 填充预览YUV格式数据
     * @param yuvData yuv格式的数据
     * @param type YUV数据的格式 0 -> I420  1 -> NV12  2 -> NV21
     */
    fun feedData(yuvData: ByteArray?, type: Int = 0) {
        if (yuvData == null) {
            return
        }
        renderer.feedData(yuvData, type)
        // 请求渲染新的YUV数据
        requestRender()
    }
}

主要工作:
1、指定OpenGL ES Context版本
2、设置渲染的 Renderer
3、设定渲染模式为RENDERMODE_WHEN_DIRTY,只有在调用requestRender()后才触发redraw工作
4、传入YUV数据的宽度、高度
5、传输需要渲染的YUV格式的数据(I420、NV12、NV21)

构建Renderer类
class MyGLRenderer : GLSurfaceView.Renderer {
    companion object {
        private const val TAG = "MyGLRenderer"
    }

    private lateinit var mProgram: MyGLProgram
    // GLSurfaceView宽度
    private var mScreenWidth: Int = 0
    // GLSurfaceView高度
    private var mScreenHeight: Int = 0
    // 预览YUV数据宽度
    private var mVideoWidth: Int = 0
    // 预览YUV数据高度
    private var mVideoHeight: Int = 0

    // vPMatrix is an abbreviation for "Model View Projection Matrix"
    private val vPMatrix = FloatArray(16)
    private val projectionMatrix = FloatArray(16)
    private val viewMatrix = FloatArray(16)

    // y分量数据
    private var y: ByteBuffer = ByteBuffer.allocate(0)
    // u分量数据
    private var u: ByteBuffer = ByteBuffer.allocate(0)
    // v分量数据
    private var v: ByteBuffer = ByteBuffer.allocate(0)
    // uv分量数据
    private var uv: ByteBuffer = ByteBuffer.allocate(0)

    // YUV数据格式 0 -> I420  1 -> NV12  2 -> NV21
    private var type: Int = 0
    // 标识GLSurfaceView是否准备好
    private var hasVisibility = false

    //  Called once to set up the view's OpenGL ES environment.
    override fun onSurfaceCreated(unused: GL10, config: EGLConfig) {
        // Set the background frame color
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f)

        // 配置OpenGL ES 环境
        mProgram = MyGLProgram()
    }

    //  Called if the geometry of the view changes, for example when the device's screen orientation changes.
    override fun onSurfaceChanged(unused: GL10, width: Int, height: Int) {
        GLES20.glViewport(0, 0, width, height)

        mScreenWidth = width
        mScreenHeight = height

        mScreenWidth = width
        mScreenHeight = height
        val ratio: Float = width.toFloat() / height.toFloat()

        // this projection matrix is applied to object coordinates
        // in the onDrawFrame() method
        Matrix.frustumM(projectionMatrix, 0, -ratio, ratio, -1f, 1f, 3f, 7f)

        // Set the camera position (View matrix)
        Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 1.0f, 0.0f, 0.0f)
        
        if (mVideoWidth > 0 && mVideoHeight > 0) {
            createBuffers(mVideoWidth, mVideoHeight)
        }
        hasVisibility = true
        Log.d(TAG, "onSurfaceChanged width:$width * height:$height")
    }

    //  Called for each redraw of the view.
    override fun onDrawFrame(unused: GL10) {
        synchronized(this) {
            if (y.capacity() > 0) {
                y.position(0)
                if (type == 0) {
                    u.position(0)
                    v.position(0)
                    mProgram.feedTextureWithImageData(y, u, v, mVideoWidth, mVideoHeight)
                } else {
                    uv.position(0)
                    mProgram.feedTextureWithImageData(y, uv, mVideoWidth, mVideoHeight)
                }
                // Redraw background color
                GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)

                // Calculate the projection and view transformation
                Matrix.multiplyMM(vPMatrix, 0, projectionMatrix, 0, viewMatrix, 0)

                try {
                    mProgram.drawTexture(vPMatrix, type)
                } catch (e: Exception) {
                    Log.w(TAG, e.message)
                }
            }
        }
    }

    /**
     * 设置显示方向
     * @param degrees 显示旋转角度(逆时针),有效值是(0, 90, 180, and 270.)
     */
    fun setDisplayOrientation(degrees: Int) {
        // Set the camera position (View matrix)
        if (degrees == 0) {
            Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 1.0f, 0.0f, 0.0f)
        } else if (degrees == 90) {
            Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 0.0f, 1.0f, 0.0f)
        } else if (degrees == 180) {
            Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, -1.0f, 0.0f, 0.0f)
        } else if (degrees == 270) {
            Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 0.0f, -1.0f, 0.0f)
        } else {
            Log.e(TAG, "degrees pram must be in (0, 90, 180, 270) ")
        }
    }
    
    /**
     * 设置渲染的YUV数据的宽高
     * @param width 宽度
     * @param height 高度
     */
    fun setYuvDataSize(width: Int, height: Int) {
        if (width > 0 && height > 0) {
            // 调整比例
            createBuffers(width, height)

            // 初始化容器
            if (width != mVideoWidth && height != mVideoHeight) {
                this.mVideoWidth = width
                this.mVideoHeight = height
                val yarraySize = width * height
                val uvarraySize = yarraySize / 4
                synchronized(this) {
                    y = ByteBuffer.allocate(yarraySize)
                    u = ByteBuffer.allocate(uvarraySize)
                    v = ByteBuffer.allocate(uvarraySize)
                    uv = ByteBuffer.allocate(uvarraySize * 2)
                }
            }
        }
    }

    /**
     * 调整渲染纹理的缩放比例
     * @param width YUV数据宽度
     * @param height YUV数据高度
     */
    private fun createBuffers(width: Int, height: Int) {
        if (mScreenWidth > 0 && mScreenHeight > 0) {
            val f1 = mScreenHeight.toFloat() / mScreenWidth.toFloat()
            val f2 = height.toFloat() / width.toFloat()
            if (f1 == f2) {
                mProgram.createBuffers(MyGLProgram.squareVertices)
            } else if (f1 < f2) {
                val widthScale = f1 / f2
                mProgram.createBuffers(
                    floatArrayOf(-widthScale, -1.0f, widthScale, -1.0f, -widthScale, 1.0f, widthScale, 1.0f)
                )
            } else {
                val heightScale = f2 / f1
                mProgram.createBuffers(
                    floatArrayOf(-1.0f, -heightScale, 1.0f, -heightScale, -1.0f, heightScale, 1.0f, heightScale)
                )
            }
        }
    }

    /**
     * 预览YUV格式数据
     * @param yuvdata yuv格式的数据
     * @param type YUV数据的格式 0 -> I420  1 -> NV12  2 -> NV21
     */
    fun feedData(yuvdata: ByteArray, type: Int = 0) {
        synchronized(this) {
            if (hasVisibility) {
                this.type = type
                if (type == 0) {
                    y.clear()
                    u.clear()
                    v.clear()
                    y.put(yuvdata, 0, mVideoWidth * mVideoHeight)
                    u.put(yuvdata, mVideoWidth * mVideoHeight, mVideoWidth * mVideoHeight / 4)
                    v.put(yuvdata, mVideoWidth * mVideoHeight * 5 / 4, mVideoWidth * mVideoHeight / 4)
                } else {
                    y.clear()
                    uv.clear()
                    y.put(yuvdata, 0, mVideoWidth * mVideoHeight)
                    uv.put(yuvdata, mVideoWidth * mVideoHeight, mVideoWidth * mVideoHeight / 2)
                }
            }
        }
    }
}
使用流程:

1、onSurfaceCreated()中配置 OpenGL ES 环境:加载着色器程序,链接program,生成纹理句柄等初始工作
2、onSurfaceChanged()设置渲染区域位置、大小,计算转换矩阵等
3、onDrawFrame()每次redraw时调用:

  • GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT) 清除背景色
  • 绘制纹理

二、Program

加载shader程序
/**
 * 加载着色器程序
 * @param type GLES20.GL_VERTEX_SHADER -> vertex shader
 *              GLES20.GL_FRAGMENT_SHADER -> fragment shader
 * @param shaderCode 着色器程序代码
 */
fun loadShader(type: Int, shaderCode: String): Int {

    // create a vertex shader type (GLES20.GL_VERTEX_SHADER)
    // or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
    return GLES20.glCreateShader(type).also { shader ->

        // add the source code to the shader and compile it
        GLES20.glShaderSource(shader, shaderCode)
        GLES20.glCompileShader(shader)
    }
}
Vertex Shader 顶点着色器 Code
/**
 * 顶点着色器程序
 * vertex shader在每个顶点上都执行一次,通过不同世界的坐标系转化定位顶点的最终位置。
 * 它可以传递数据给fragment shader,如纹理坐标、顶点坐标,变换矩阵等
 */
const val vertexShaderCode =
    "uniform mat4 uMVPMatrix;" +
            "attribute vec4 vPosition;" +
            "attribute vec2 texCoord;" +
            "varying vec2 tc;" +
            "void main() {" +
            "  gl_Position = uMVPMatrix * vPosition;" +
            "  tc = texCoord;" +
            "}"
  • uMVPMatrix 顶点坐标变换矩阵
  • vPosition 顶点坐标
  • texCoord 纹理贴图坐标
  • tc 顶点着色器传递给片段着色器的纹理坐标
Fragment Shader 片段着色器 Code
/**
 * 片段着色器程序
 * fragment shader在每个像素上都会执行一次,通过插值确定像素的最终显示颜色
 */
const val fragmentShaderCode =
    "precision mediump float;" +
            "uniform sampler2D samplerY;" +
            "uniform sampler2D samplerU;" +
            "uniform sampler2D samplerV;" +
            "uniform sampler2D samplerUV;" +
            "uniform int yuvType;" +
            "varying vec2 tc;" +
            "void main() {" +
            "  vec4 c = vec4((texture2D(samplerY, tc).r - 16./255.) * 1.164);" +
            "  vec4 U; vec4 V;" +
            "  if (yuvType == 0){" +
            // 因为是YUV的一个平面,所以采样后的r,g,b,a这四个参数的数值是一样的
            "    U = vec4(texture2D(samplerU, tc).r - 128./255.);" +
            "    V = vec4(texture2D(samplerV, tc).r - 128./255.);" +
            "  } else if (yuvType == 1){" +
            // 因为NV12是2平面的,对于UV平面,在加载纹理时,会指定格式,让U值存在r,g,b中,V值存在a中
            "    U = vec4(texture2D(samplerUV, tc).r - 128./255.);" +
            "    V = vec4(texture2D(samplerUV, tc).a - 128./255.);" +
            "  } else {" +
            // 因为NV21是2平面的,对于UV平面,在加载纹理时,会指定格式,让U值存在a中,V值存在r,g,b中
            "    U = vec4(texture2D(samplerUV, tc).a - 128./255.);" +
            "    V = vec4(texture2D(samplerUV, tc).r - 128./255.);" +
            "  } " +
            "  c += V * vec4(1.596, -0.813, 0, 0);" +
            "  c += U * vec4(0, -0.392, 2.017, 0);" +
            "  c.a = 1.0;" +
            "  gl_FragColor = c;" +
            "}"
  • samplerY/U/V/UV:sample2D的常量,用来获取YUV数据的Y/U/V/UV平面数据
  • yuvType:YUV数据类型 -> 0 代表 I420, 1 代表 NV12,2 代表 NV21
  • tc:顶点着色器传递过来的纹理坐标
主要的负责纹理渲染的类
class MyGLProgram {
    companion object {
        private const val TAG = "MyGLProgram"
        var squareVertices = floatArrayOf(-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f) // 全屏
    }

    private var mProgram: Int

    private var mPlanarTextureHandles = IntBuffer.wrap(IntArray(3))
    private val mSampleHandle = IntArray(3)
    // handles
    private var mPositionHandle = -1
    private var mCoordHandle = -1
    private var mVPMatrixHandle: Int = -1

    // vertices buffer
    private var mVertexBuffer: FloatBuffer? = null
    private var mCoordBuffer: FloatBuffer? = null
    // whole-texture
    private val mCoordVertices = floatArrayOf(0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f)

    init {
        val vertexShader: Int = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode)
        val fragmentShader: Int = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode)
        Log.d(TAG, "vertexShader = $vertexShader \n fragmentShader = $fragmentShader")

        // create empty OpenGL ES Program
        mProgram = GLES20.glCreateProgram().also {
            checkGlError("glCreateProgram")
            // add the vertex shader to program
            GLES20.glAttachShader(it, vertexShader)

            // add the fragment shader to program
            GLES20.glAttachShader(it, fragmentShader)

            // creates OpenGL ES program executables
            GLES20.glLinkProgram(it)
        }

        val linkStatus = IntArray(1)
        GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, linkStatus, 0)
        if (linkStatus[0] != GLES20.GL_TRUE) {
            Log.w(TAG, "Could not link program: ${GLES20.glGetProgramInfoLog(mProgram)}")
            GLES20.glDeleteProgram(mProgram)
            mProgram = 0
        }

        Log.d(TAG, "mProgram = $mProgram")

        checkGlError("glCreateProgram")

        // 生成纹理句柄
        GLES20.glGenTextures(3, mPlanarTextureHandles)

        checkGlError("glGenTextures")
    }

	/**
     * 绘制纹理贴图
     * @param mvpMatrix 顶点坐标变换矩阵
     * @param type YUV数据格式类型
     */
    fun drawTexture(mvpMatrix: FloatArray, type: Int) {

        GLES20.glUseProgram(mProgram)
        checkGlError("glUseProgram")
        /*
         * get handle for "vPosition" and "a_texCoord"
         */
        mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition").also {
            GLES20.glVertexAttribPointer(it, 2, GLES20.GL_FLOAT, false, 8, mVertexBuffer)
            GLES20.glEnableVertexAttribArray(it)
        }

        // 传纹理坐标给fragment shader
        mCoordHandle = GLES20.glGetAttribLocation(mProgram, "texCoord").also {
            GLES20.glVertexAttribPointer(it, 2, GLES20.GL_FLOAT, false, 8, mCoordBuffer)
            GLES20.glEnableVertexAttribArray(it)
        }

        // get handle to shape's transformation matrix
        mVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix")

        // Pass the projection and view transformation to the shader
        GLES20.glUniformMatrix4fv(mVPMatrixHandle, 1, false, mvpMatrix, 0)

        //传纹理的像素格式给fragment shader
        val yuvType = GLES20.glGetUniformLocation(mProgram, "yuvType")
        checkGlError("glGetUniformLocation yuvType")
        GLES20.glUniform1i(yuvType, type)

        //type: 0是I420, 1是NV12
        var planarCount = 0
        if (type == 0) {
            //I420有3个平面
            planarCount = 3
            mSampleHandle[0] = GLES20.glGetUniformLocation(mProgram, "samplerY")
            mSampleHandle[1] = GLES20.glGetUniformLocation(mProgram, "samplerU")
            mSampleHandle[2] = GLES20.glGetUniformLocation(mProgram, "samplerV")
        } else {
            //NV12、NV21有两个平面
            planarCount = 2
            mSampleHandle[0] = GLES20.glGetUniformLocation(mProgram, "samplerY")
            mSampleHandle[1] = GLES20.glGetUniformLocation(mProgram, "samplerUV")
        }
        (0 until planarCount).forEach { i ->
            GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i)
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mPlanarTextureHandles[i])
            GLES20.glUniform1i(mSampleHandle[i], i)
        }

        // 调用这个函数后,vertex shader先在每个顶点执行一次,之后fragment shader在每个像素执行一次,
        // 绘制后的图像存储在render buffer中
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
        GLES20.glFinish()

        GLES20.glDisableVertexAttribArray(mPositionHandle)
        GLES20.glDisableVertexAttribArray(mCoordHandle)
    }

    /**
     * 将图片数据绑定到纹理目标,适用于UV分量分开存储的(I420)
     * @param yPlane YUV数据的Y分量
     * @param uPlane YUV数据的U分量
     * @param vPlane YUV数据的V分量
     * @param width YUV图片宽度
     * @param height YUV图片高度
     */
    fun feedTextureWithImageData(yPlane: ByteBuffer, uPlane: ByteBuffer, vPlane: ByteBuffer, width: Int, height: Int) {
        //根据YUV编码的特点,获得不同平面的基址
        textureYUV(yPlane, width, height, 0)
        textureYUV(uPlane, width / 2, height / 2, 1)
        textureYUV(vPlane, width / 2, height / 2, 2)
    }

    /**
     * 将图片数据绑定到纹理目标,适用于UV分量交叉存储的(NV12、NV21)
     * @param yPlane YUV数据的Y分量
     * @param uvPlane YUV数据的UV分量
     * @param width YUV图片宽度
     * @param height YUV图片高度
     */
    fun feedTextureWithImageData(yPlane: ByteBuffer, uvPlane: ByteBuffer, width: Int, height: Int) {
        //根据YUV编码的特点,获得不同平面的基址
        textureYUV(yPlane, width, height, 0)
        textureNV12(uvPlane, width / 2, height / 2, 1)
    }

    /**
     * 将图片数据绑定到纹理目标,适用于UV分量分开存储的(I420)
     * @param imageData YUV数据的Y/U/V分量
     * @param width YUV图片宽度
     * @param height YUV图片高度
     */
    private fun textureYUV(imageData: ByteBuffer, width: Int, height: Int, index: Int) {
        // 将纹理对象绑定到纹理目标
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mPlanarTextureHandles[index])
        // 设置放大和缩小时,纹理的过滤选项为:线性过滤
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR)
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
        // 设置纹理X,Y轴的纹理环绕选项为:边缘像素延伸
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE)
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE)
        // 加载图像数据到纹理,GL_LUMINANCE指明了图像数据的像素格式为只有亮度,虽然第三个和第七个参数都使用了GL_LUMINANCE,
        // 但意义是不一样的,前者指明了纹理对象的颜色分量成分,后者指明了图像数据的像素格式
        // 获得纹理对象后,其每个像素的r,g,b,a值都为相同,为加载图像的像素亮度,在这里就是YUV某一平面的分量值
        GLES20.glTexImage2D(
            GLES20.GL_TEXTURE_2D, 0,
            GLES20.GL_LUMINANCE, width, height, 0,
            GLES20.GL_LUMINANCE,
            GLES20.GL_UNSIGNED_BYTE, imageData
        )
    }

    /**
     * 将图片数据绑定到纹理目标,适用于UV分量交叉存储的(NV12、NV21)
     * @param imageData YUV数据的UV分量
     * @param width YUV图片宽度
     * @param height YUV图片高度
     */
    private fun textureNV12(imageData: ByteBuffer, width: Int, height: Int, index: Int) {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mPlanarTextureHandles[index])
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR)
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE)
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE)
        GLES20.glTexImage2D(
            GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE_ALPHA, width, height, 0,
            GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, imageData
        )
    }

    /**
     * 创建两个缓冲区用于保存顶点 -> 屏幕顶点和纹理顶点
     * @param vert 屏幕顶点数据
     */
    fun createBuffers(vert: FloatArray) {
        mVertexBuffer = ByteBuffer.allocateDirect(vert.size * 4).run {
            // use the device hardware's native byte order
            order(ByteOrder.nativeOrder())

            // create a floating point buffer from the ByteBuffer
            asFloatBuffer().apply {
                // add the coordinates to the FloatBuffer
                put(vert)
                // set the buffer to read the first coordinate
                position(0)
            }
        }

        if (mCoordBuffer == null) {
            mCoordBuffer = ByteBuffer.allocateDirect(mCoordVertices.size * 4).run {
                // use the device hardware's native byte order
                order(ByteOrder.nativeOrder())

                // create a floating point buffer from the ByteBuffer
                asFloatBuffer().apply {
                    // add the coordinates to the FloatBuffer
                    put(mCoordVertices)
                    // set the buffer to read the first coordinate
                    position(0)
                }
            }
        }
        Log.d(TAG, "createBuffers vertice_buffer $mVertexBuffer  coord_buffer $mCoordBuffer")
    }

    /**
     * 检查GL操作是否有error
     * @param op 检查当前所做的操作
     */
    private fun checkGlError(op: String) {
        var error: Int = GLES20.glGetError()
        while (error != GLES20.GL_NO_ERROR) {
            Log.e(TAG, "***** $op: glError $error")
            error = GLES20.glGetError()
        }
    }
}

三、使用

1、在布局文件中加入自定义的 GLSufaceView

<com.lkl.opengl.MyGLSurfaceView
        android:id="@+id/openGlSurface"
        android:layout_width="match_parent"
        android:layout_height="match_parent"/>

2、设置YUV数据的宽度,高度

openGlSurface.setYuvDataSize(width, height)

3、在GLSurfaceView.Renderer onSurfaceChanged() 回调方法调用后传递数据

openGlSurface.feedData(yuvData, 2)

源代码

Android OpenGL处理YUV数据(I420、NV12、NV21)

参考文献

https://developer.android.google.cn/training/graphics/opengl
Android OpenGL开发实践 - GLSurfaceView对摄像头数据的再处理
https://blog.csdn.net/junzia/article/category/6462864
Android上使用OpenGLES2.0显示YUV数据
OpenGL渲染YUV数据
Android OpenGLES2.0(九)——利用OpenGL进行图片处理

你可能感兴趣的:(Android,OpenGL)