转自:http://blog.csdn.net/ueryueryuery/article/details/17608185
在Android上用OpenGLES来显示YUV图像,之所以这样做,是因为:
1.Android本身也不能直接显示YUV图像,YUV转成RGB还是必要的;
2.YUV手动转RGB会占用大量的CPU资源,如果以这样的形式播放视频,手机会很热,所以我们尽量让GPU来做这件事;
3.OpenGLES是Android集成到自身框架里的第三方库,它有很多的可取之处。
博主的C/C++不是很好,所以整个过程是在Java层实现的,大家见笑,我主要参考(但不限于)以下文章,十分感谢这些朋友的分享:
1. http://blog.csdn.net/xiaoguaihai/article/details/8672631
2.http://chenshun87.blog.163.com/blog/static/18859389201232011727615/
3.http://blog.csdn.net/ypist/article/details/8950903
4.http://blog.csdn.net/wanglang3081/article/details/8480281
5.http://blog.csdn.net/xdljf/article/details/7178620
一、首先我先说一下这个解决方案是怎么运行的,给大家一个概念
1.显示在哪 -> GLSurfaceVIew
2.谁来把数据贴到GLSurfaceVIew上 -> Renderer
3.谁来负责YUV数据转换成RGB -> GL中的Program/Shader
一句话说明白就是:GL的Program/Shader把用户传过来的YUV数据,转换成RGB数据后,通过Renderer贴在GLSurfaceView上。
二、怎么检查你的手机是不是支持GLES2.0呢,使用下面的代码段就行了:
- public static boolean detectOpenGLES20(Context context) {
- ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
- ConfigurationInfo info = am.getDeviceConfigurationInfo();
- return (info.reqGlEsVersion >= 0x20000);
- }
一般的手机,都是会支持GLES2.0的,大家不必担心。
三、开搞
A 先要有一个GLSurfaceView,把它放入你的布局中就好了。
找到这个家伙,对它进行简单的设置,并为它设置一个Renderer。
Renderer的作用就是在GLSurfaceView上画出图像。
- mGLSurface = (GLFrameSurface) findViewById(R.id.glsurface);
- mGLSurface.setEGLContextClientVersion(2);
- mGLFRenderer = new GLFrameRenderer(this, mGLSurface);
- mGLSurface.setRenderer(mGLFRenderer);
B 再就是看下GLFrameRenderer怎么来写了
- public class GLFrameRenderer implements Renderer {
-
- private ISimplePlayer mParentAct;
- private GLSurfaceView mTargetSurface;
- private GLProgram prog = new GLProgram(0);
- private int mVideoWidth = -1, mVideoHeight = -1;
- private ByteBuffer y;
- private ByteBuffer u;
- private ByteBuffer v;
-
- public GLFrameRenderer(ISimplePlayer callback, GLSurfaceView surface) {
- mParentAct = callback;
- mTargetSurface = surface;
- }
-
- @Override
- public void onSurfaceCreated(GL10 gl, EGLConfig config) {
- Utils.LOGD("GLFrameRenderer :: onSurfaceCreated");
- if (!prog.isProgramBuilt()) {
- prog.buildProgram();
- Utils.LOGD("GLFrameRenderer :: buildProgram done");
- }
- }
-
- @Override
- public void onSurfaceChanged(GL10 gl, int width, int height) {
- Utils.LOGD("GLFrameRenderer :: onSurfaceChanged");
- GLES20.glViewport(0, 0, width, height);
- }
-
- @Override
- public void onDrawFrame(GL10 gl) {
- synchronized (this) {
- if (y != null) {
-
- y.position(0);
- u.position(0);
- v.position(0);
- prog.buildTextures(y, u, v, mVideoWidth, mVideoHeight);
- GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
- prog.drawFrame();
- }
- }
- }
-
-
-
-
-
- public void update(int w, int h) {
- Utils.LOGD("INIT E");
- if (w > 0 && h > 0) {
- if (w != mVideoWidth && h != mVideoHeight) {
- this.mVideoWidth = w;
- this.mVideoHeight = h;
- int yarraySize = w * h;
- int uvarraySize = yarraySize / 4;
- synchronized (this) {
- y = ByteBuffer.allocate(yarraySize);
- u = ByteBuffer.allocate(uvarraySize);
- v = ByteBuffer.allocate(uvarraySize);
- }
- }
- }
-
- mParentAct.onPlayStart();
- Utils.LOGD("INIT X");
- }
-
-
-
-
- public void update(byte[] ydata, byte[] udata, byte[] vdata) {
- synchronized (this) {
- y.clear();
- u.clear();
- v.clear();
- y.put(ydata, 0, ydata.length);
- u.put(udata, 0, udata.length);
- v.put(vdata, 0, vdata.length);
- }
-
-
- mTargetSurface.requestRender();
- }
- }
代码很简单,Renderer主要处理这么几个事:
1.Surface create的时候,我初始化了一些需要用到的Program/Shader,因为马上就要用到它们了;
2.Surface change的时候,重置一下画面;
3.onDrawFrame()时,把数据真正地“画”上去;
4.至于两个update方法,是用来把图像的宽高/数据传过来的。
C 看GLProgram是怎么写的,它的作用是向Renderer提供计算单元,你所有对数据的处理,都在这儿了。
- public boolean isProgramBuilt() {
- return isProgBuilt;
- }
-
- public void buildProgram() {
- createBuffers(_vertices, coordVertices);
- if (_program <= 0) {
- _program = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
- }
- Utils.LOGD("_program = " + _program);
-
-
-
-
- _positionHandle = GLES20.glGetAttribLocation(_program, "vPosition");
- Utils.LOGD("_positionHandle = " + _positionHandle);
- checkGlError("glGetAttribLocation vPosition");
- if (_positionHandle == -1) {
- throw new RuntimeException("Could not get attribute location for vPosition");
- }
- _coordHandle = GLES20.glGetAttribLocation(_program, "a_texCoord");
- Utils.LOGD("_coordHandle = " + _coordHandle);
- checkGlError("glGetAttribLocation a_texCoord");
- if (_coordHandle == -1) {
- throw new RuntimeException("Could not get attribute location for a_texCoord");
- }
-
-
-
-
- _yhandle = GLES20.glGetUniformLocation(_program, "tex_y");
- Utils.LOGD("_yhandle = " + _yhandle);
- checkGlError("glGetUniformLocation tex_y");
- if (_yhandle == -1) {
- throw new RuntimeException("Could not get uniform location for tex_y");
- }
- _uhandle = GLES20.glGetUniformLocation(_program, "tex_u");
- Utils.LOGD("_uhandle = " + _uhandle);
- checkGlError("glGetUniformLocation tex_u");
- if (_uhandle == -1) {
- throw new RuntimeException("Could not get uniform location for tex_u");
- }
- _vhandle = GLES20.glGetUniformLocation(_program, "tex_v");
- Utils.LOGD("_vhandle = " + _vhandle);
- checkGlError("glGetUniformLocation tex_v");
- if (_vhandle == -1) {
- throw new RuntimeException("Could not get uniform location for tex_v");
- }
-
- isProgBuilt = true;
- }
-
-
-
-
- public void buildTextures(Buffer y, Buffer u, Buffer v, int width, int height) {
- boolean videoSizeChanged = (width != _video_width || height != _video_height);
- if (videoSizeChanged) {
- _video_width = width;
- _video_height = height;
- Utils.LOGD("buildTextures videoSizeChanged: w=" + _video_width + " h=" + _video_height);
- }
-
-
- if (_ytid < 0 || videoSizeChanged) {
- if (_ytid >= 0) {
- Utils.LOGD("glDeleteTextures Y");
- GLES20.glDeleteTextures(1, new int[] { _ytid }, 0);
- checkGlError("glDeleteTextures");
- }
-
- int[] textures = new int[1];
- GLES20.glGenTextures(1, textures, 0);
- checkGlError("glGenTextures");
- _ytid = textures[0];
- Utils.LOGD("glGenTextures Y = " + _ytid);
- }
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _ytid);
- checkGlError("glBindTexture");
- GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, _video_width, _video_height, 0,
- GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, y);
- checkGlError("glTexImage2D");
- GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
- GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
-
-
- if (_utid < 0 || videoSizeChanged) {
- if (_utid >= 0) {
- Utils.LOGD("glDeleteTextures U");
- GLES20.glDeleteTextures(1, new int[] { _utid }, 0);
- checkGlError("glDeleteTextures");
- }
- int[] textures = new int[1];
- GLES20.glGenTextures(1, textures, 0);
- checkGlError("glGenTextures");
- _utid = textures[0];
- Utils.LOGD("glGenTextures U = " + _utid);
- }
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _utid);
- GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, _video_width / 2, _video_height / 2, 0,
- GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, u);
- GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
- GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
-
-
- if (_vtid < 0 || videoSizeChanged) {
- if (_vtid >= 0) {
- Utils.LOGD("glDeleteTextures V");
- GLES20.glDeleteTextures(1, new int[] { _vtid }, 0);
- checkGlError("glDeleteTextures");
- }
- int[] textures = new int[1];
- GLES20.glGenTextures(1, textures, 0);
- checkGlError("glGenTextures");
- _vtid = textures[0];
- Utils.LOGD("glGenTextures V = " + _vtid);
- }
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _vtid);
- GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, _video_width / 2, _video_height / 2, 0,
- GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, v);
- GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
- GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
- }
-
-
-
-
-
- public void drawFrame() {
- GLES20.glUseProgram(_program);
- checkGlError("glUseProgram");
-
- GLES20.glVertexAttribPointer(_positionHandle, 2, GLES20.GL_FLOAT, false, 8, _vertice_buffer);
- checkGlError("glVertexAttribPointer mPositionHandle");
- GLES20.glEnableVertexAttribArray(_positionHandle);
-
- GLES20.glVertexAttribPointer(_coordHandle, 2, GLES20.GL_FLOAT, false, 8, _coord_buffer);
- checkGlError("glVertexAttribPointer maTextureHandle");
- GLES20.glEnableVertexAttribArray(_coordHandle);
-
-
- GLES20.glActiveTexture(_textureI);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _ytid);
- GLES20.glUniform1i(_yhandle, _tIindex);
-
- GLES20.glActiveTexture(_textureII);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _utid);
- GLES20.glUniform1i(_uhandle, _tIIindex);
-
- GLES20.glActiveTexture(_textureIII);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _vtid);
- GLES20.glUniform1i(_vhandle, _tIIIindex);
-
- GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
- GLES20.glFinish();
-
- GLES20.glDisableVertexAttribArray(_positionHandle);
- GLES20.glDisableVertexAttribArray(_coordHandle);
- }
-
-
-
-
- public int createProgram(String vertexSource, String fragmentSource) {
-
- int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
- int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
-
- Utils.LOGD("vertexShader = " + vertexShader);
- Utils.LOGD("pixelShader = " + pixelShader);
-
- int program = GLES20.glCreateProgram();
- if (program != 0) {
- GLES20.glAttachShader(program, vertexShader);
- checkGlError("glAttachShader");
- GLES20.glAttachShader(program, pixelShader);
- checkGlError("glAttachShader");
- GLES20.glLinkProgram(program);
- int[] linkStatus = new int[1];
- GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
- if (linkStatus[0] != GLES20.GL_TRUE) {
- Utils.LOGE("Could not link program: ", null);
- Utils.LOGE(GLES20.glGetProgramInfoLog(program), null);
- GLES20.glDeleteProgram(program);
- program = 0;
- }
- }
- return program;
- }
-
-
-
-
- private int loadShader(int shaderType, String source) {
- int shader = GLES20.glCreateShader(shaderType);
- if (shader != 0) {
- GLES20.glShaderSource(shader, source);
- GLES20.glCompileShader(shader);
- int[] compiled = new int[1];
- GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
- if (compiled[0] == 0) {
- Utils.LOGE("Could not compile shader " + shaderType + ":", null);
- Utils.LOGE(GLES20.glGetShaderInfoLog(shader), null);
- GLES20.glDeleteShader(shader);
- shader = 0;
- }
- }
- return shader;
- }
-
-
-
-
- private void createBuffers(float[] vert, float[] coord) {
- _vertice_buffer = ByteBuffer.allocateDirect(vert.length * 4);
- _vertice_buffer.order(ByteOrder.nativeOrder());
- _vertice_buffer.asFloatBuffer().put(vert);
- _vertice_buffer.position(0);
-
- if (_coord_buffer == null) {
- _coord_buffer = ByteBuffer.allocateDirect(coord.length * 4);
- _coord_buffer.order(ByteOrder.nativeOrder());
- _coord_buffer.asFloatBuffer().put(coord);
- _coord_buffer.position(0);
- }
- }
-
- private void checkGlError(String op) {
- int error;
- while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
- Utils.LOGE("***** " + op + ": glError " + error, null);
- throw new RuntimeException(op + ": glError " + error);
- }
- }
-
- private static float[] squareVertices = { -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, };
-
- private static float[] coordVertices = { 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, };
-
- private static final String VERTEX_SHADER = "attribute vec4 vPosition;\n" + "attribute vec2 a_texCoord;\n"
- + "varying vec2 tc;\n" + "void main() {\n" + "gl_Position = vPosition;\n" + "tc = a_texCoord;\n" + "}\n";
-
- private static final String FRAGMENT_SHADER = "precision mediump float;\n" + "uniform sampler2D tex_y;\n"
- + "uniform sampler2D tex_u;\n" + "uniform sampler2D tex_v;\n" + "varying vec2 tc;\n" + "void main() {\n"
- + "vec4 c = vec4((texture2D(tex_y, tc).r - 16./255.) * 1.164);\n"
- + "vec4 U = vec4(texture2D(tex_u, tc).r - 128./255.);\n"
- + "vec4 V = vec4(texture2D(tex_v, tc).r - 128./255.);\n" + "c += V * vec4(1.596, -0.813, 0, 0);\n"
- + "c += U * vec4(0, -0.392, 2.017, 0);\n" + "c.a = 1.0;\n" + "gl_FragColor = c;\n" + "}\n";
这里面代码比较复杂,我在这里稍作解释:
1.首先,buildProgram()目的要生成一个program,作用是用来将YUV->RGB,其中用到了2个shader(shader就相当于一个小运算器,它运行一段代码),第1个shader运行VERTEX_SHADER里的代码,目的是将坐标作为参数传入第2个shader;第2个shader来做YUV->RGB的运算。
2.buildTextures()是要生成3个贴图,分别为了显示R/G/B数据,三个贴图重合在一起,显示出来的就是彩色的图片。
3.drawFrame()是使用program来做运算,并真正去做画这个动作了。