由于工作需要,这几天在研究OpenGLES的使用,并需要做出一个APK来实现NV12的渲染,由于对Java编程也不懂,所以还去补了点基础的东西,然后参考了很多文章才完成。这里就记录下我的工程和期间遇到的问题。
参考链接:
- https://blog.csdn.net/yu540135101/article/details/101023208
- https://blog.csdn.net/WuNLK/article/details/77017813
- https://blog.csdn.net/ueryueryuery/article/details/17608185
- https://www.jianshu.com/p/0ecc37cc2f6e
- https://www.cnblogs.com/firstdream/p/7809404.html
- https://blog.csdn.net/wang2470198567/article/details/45044283
- https://blog.csdn.net/biggbang/article/details/20037073
还有一些琐碎的就不贴了,零基础的(我这种)可以先看看链接一的大佬的博客,学习点OpenGLES的基础知识,最好再掌握点yuv数据存储方式,这样才会明白分离yuv分量的字节偏移量。
话不多说,开工
<uses-permission
android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-feature
android:glEsVersion="0x00020000" android:required="true" />
不要忘了加文件读取的权限~
2. 再放上Layout
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<android.opengl.GLSurfaceView
android:id="@+id/MyGLView"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>```
4. MainActivity
这里面的内容就很简单了,就指定了布局,然后让MyRender渲染就完事儿了。
```java
public class MainActivity extends AppCompatActivity {
//首先创建一个SurfaceView才能显示
private GLSurfaceView mGLSurface;
//创建自定义的Render才能渲染
private MyRender mGLRender;
//这里设定的宽高是yuv数据的宽高
private int pic_width = 1280;
private int pic_height = 720;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mGLSurface = findViewById(R.id.MyGLView);
//使用自定义宽高布局
ViewGroup.LayoutParams lp = mGLSurface.getLayoutParams();
Point point_width = new Point();
getWindowManager().getDefaultDisplay().getSize(point_width);
lp.width = 1280;
lp.height = 720;
mGLSurface.setLayoutParams(lp);
try {
mGLRender = new MyRender(mGLSurface, pic_width, pic_height);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
}
public class MyRender implements GLSurfaceView.Renderer{
private static final String TAG = "OpenGLES20_Test";
private GLSurfaceView _GLSurfaceView;
private int _program;
private int _vertexShader;
private int _fragmentShader;
private FloatBuffer _vertexArray = null;
private FloatBuffer _textureArray = null;
private int _positionHandle;
private int _textureHandle;
private FileInputStream _yuvfilestream = null;
private ByteBuffer _frameData = null;
private FileChannel _yuvChinnel = null;
private static String _filename = "/sdcard/test.yuv";
private static File _yuvfile = null;
private static long _filelen = 0;
private static int _width = 0;
private static int _height = 0;
private int[] _textures = new int[2]; //一个渲染y分量,一个渲染uv分量
private int[] _texUniLocation = new int[2];//一个加载y信息,一个加载uv信息
//指定坐标范围
private static final float VERTEX_ARRAY[] = {
-1.0f, -1.0f, // 0 bottom left
1.0f, -1.0f, // 1 bottom right
-1.0f, 1.0f, // 2 top left
1.0f, 1.0f, // 3 top right
};
private static final float TEXTURE_ARRAY[] = {
0.0f, 1.0f, // 0 top left
1.0f, 1.0f, // 1 top right
0.0f, 0.0f, // 2 bottom left
1.0f, 0.0f // 3 bottom right
};
//自定义shader 用来将yuv分量转为rgb的
private static final String gl_vertexShader =
"attribute vec4 aPosition;\n"
+ "attribute vec2 aTextureCoord;\n"
+ "varying vec2 vTextureCoord;\n"
+ "void main() {\n"
+ " gl_Position = aPosition;\n"
+ " vTextureCoord = aTextureCoord;\n"
+ "}\n";
private static final String gl_fragmentShader=
"precision mediump float;\n"
+ "uniform sampler2D Ytex;\n"
+ "uniform sampler2D UVtex;\n"
+ "varying vec2 vTextureCoord;\n"
+ "void main(void) {\n"
+ " float r,g,b,y,u,v;\n"
+ " y = texture2D(Ytex, vTextureCoord).r;\n"
+ " u = texture2D(UVtex,vTextureCoord).r - 0.5;\n"
+ " v = texture2D(UVtex,vTextureCoord).a - 0.5;\n"
+ " r = y + 1.13983*v;\n"
+ " g = y - 0.39465*u - 0.58060*v;\n"
+ " b = y + 2.03211*u;\n"
+ " gl_FragColor = vec4(r, g, b, 1.0);\n"
+ "}\n";
//上面的shader也可以是写在glsl文件里,以文件形式加载进来
public MyRender(GLSurfaceView view, int width, int height) throws FileNotFoundException {
_GLSurfaceView = view;
_width = width;
_height = height;
_GLSurfaceView.setEGLContextClientVersion(2);//选择版本
_GLSurfaceView.setRenderer(this);
_GLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {//主要做一些初始化工作
//创建顶点和纹理坐标数组
_vertexArray = createBuffer(VERTEX_ARRAY);
_textureArray = createBuffer(TEXTURE_ARRAY);
//创建program
_program = GLES20.glCreateProgram();
//加载shader
_vertexShader = loadshader(GLES20.GL_VERTEX_SHADER, gl_vertexShader);
_fragmentShader = loadshader(GLES20.GL_FRAGMENT_SHADER, gl_fragmentShader);
if(_program != 0) {
//绑定program和shader
GLES20.glAttachShader(_program, _vertexShader);
GLES20.glAttachShader(_program, _fragmentShader);
//生成gpu可执行program
GLES20.glLinkProgram(_program);
GLES20.glUseProgram(_program);
}
else{
Log.e(TAG, "onSurfaceCreated: program create failed!");
}
//获取属性值
_positionHandle = GLES20.glGetAttribLocation(_program, "aPosition");
checkGlError("glGetAttribLocation aPosition");
_textureHandle = GLES20.glGetAttribLocation(_program, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
//获取shader里y和uv对应的纹理uniform变量的地址,这个在后面会被设置到Texture上
_texUniLocation[0] = GLES20.glGetUniformLocation(_program, "Ytex");
checkGlError("glGetUniformLocation Ytex");
_texUniLocation[1] = GLES20.glGetUniformLocation(_program, "UVtex");
checkGlError("glGetUniformLocation UVtex");
//设定渲染时的坐标和纹理属性值
GLES20.glVertexAttribPointer(_positionHandle, 2, GLES20.GL_FLOAT, false, 8, _vertexArray);
GLES20.glEnableVertexAttribArray(_positionHandle);
checkGlError("glEnableVertexAttribArray positionHandle");
GLES20.glVertexAttribPointer(_textureHandle, 2, GLES20.GL_FLOAT, false, 8, _textureArray);
GLES20.glEnableVertexAttribArray(_textureHandle);
checkGlError("glEnableVertexAttribArray textureHandle");
//最后都创建好了就使用program
GLES20.glUseProgram(_program);
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {//一般来说这里是做分辨率更新使用的
}
@Override
public void onDrawFrame(GL10 gl) {//渲染工作
buildTextures();//创建Texture以渲染
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
try {
doDraw();//渲染;
} catch (IOException e) {
e.printStackTrace();
}
}
/*
** 以下为功能代码
*/
private FloatBuffer createBuffer(float[] Array) {
ByteBuffer bb = ByteBuffer.allocateDirect(Array.length * 4);
bb.order(ByteOrder.nativeOrder());
FloatBuffer fb = bb.asFloatBuffer();
fb.put(Array);
fb.position(0);
return fb;
}
private int loadshader(int shaderType, String shaderSource)
{
int shader = GLES20.glCreateShader(shaderType);
if (shader > 0) {
GLES20.glShaderSource(shader, shaderSource);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e("Load Shader Failed", "Compilation\n" + GLES20.glGetShaderInfoLog(shader));
return 0;
}
}
return shader;
}
private void buildTextures() {
//创建两个texture分别渲染y和uv分量
GLES20.glGenTextures(2, _textures, 0);
//绑定y_texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _textures[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
checkGlError(" Setup yTexture");
//绑定uv_texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _textures[1]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
checkGlError(" Setup uvTexture");
}
private void doDraw() throws IOException {
//读取文件到ByteBuffer(两种不同的读取方式)
readFile(0);//请使用type 0 方式读取,后文解释
_frameData.limit(_width*_height);
_frameData.position(0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);//使用Texture0
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _textures[0]);
GLES20.glUniform1i(_texUniLocation[0], 0);//这里将之前的纹理信息设置到纹理上,注意这里使用的纹理(x值)要和对应好
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
_width, _height, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, _frameData);
checkGlError("glTexImage2D_y");
//_frameData.limit(_width*_height*3/2); //这里不limit也行了,因为ByteBuffer创建的大小也就这么大
_frameData.position(_width*_height);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _textures[1]);
GLES20.glUniform1i(_texUniLocation[1], 1);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE_ALPHA,
_width/2, _height/2, 0, GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, _frameData);
checkGlError("glTexImage2D_uv");
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glFinish();
//关闭文件流
_yuvChinnel.close();
_yuvfilestream.close();
//禁用顶点属性数组
GLES20.glDisableVertexAttribArray(_positionHandle);
GLES20.glDisableVertexAttribArray(_textureHandle);
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, "***** " + op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
private void readFile(int readType) throws IOException {
if(readType == 0) {
_yuvfilestream = new FileInputStream(_filename);
_yuvChinnel = _yuvfilestream.getChannel();
_frameData = ByteBuffer.allocateDirect(_width * _height * 3/2);//一定要用这个方法分配内存
_yuvChinnel.read(_frameData);
}
else {
_yuvfile = new File(_filename);
_filelen = _yuvfile.length();
try {
_yuvfilestream = new FileInputStream(_yuvfile);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
byte[] frame = new byte[_height * _width * 3/2];
BufferedInputStream bis = new BufferedInputStream(_yuvfilestream);
try {
bis.read(frame);
} catch (IOException e) {
e.printStackTrace();
}
_frameData = ByteBuffer.allocateDirect(_width * _height * 3/2);
_frameData = ByteBuffer.wrap(frame);
}
}
}
以上为全部代码,都在Java层实现。
在整个实现过程中遇到两个问题:
一是读文件到ByteBuffer,我的readFile(int readType)中有两种读取方式,一种是通过filechannel获取,另一种是用byte数组存取后shiyongByteBuffer.wrap()转化为ByteBuffer,第二种方法是我最开始使用的方法,但是存在问题,在Buffer中读取uv数据(即偏移_width*_height字节)去渲染时会报地址错误。
我从Buffer拿到数据又写会文件,发现前面的数据和源文件对比都是一样的,后面的数据(偏移_width*_height)就开始不一样了,由于个人能力有限,不知道AndroidStudio的虚拟机怎么拿出crash文件,以及直接看java虚拟机的堆栈信息,再加上没有时间再去处理这个问题,所以这个问题到现在也没想明白,也有可能是因为我方法写错了,于是就换了个方法去读文件到Buffer里面;
第二个问题是ByteBuffer.allocate()和ByteBuffer.allocateDirect(),写入文件时,如果用第一个allocate方法分配内存,就会crash,一定要使用allocateDirect,我查了下两种方法的区别,主要的说法就是Direct方法是直接在内存中分配,而不是在jvm里,那么问题来了,从方法上看都是分配一块Buffer供你使用,但是使用起来大不相同,不知道什么时候应该使用Direct方法,什么时候使用not Direct方法去分配,并且内部的实现机制我可能还需要花点时间去看看。。。
综上,经历了一些痛苦也算是写出了一个能跑的程序,我这只实现了nv12的渲染,其他yuv存储格式的大同小异,只要修改shader和ByterBuffer偏移即可。