前言
随着AR效果越来越普及,摄像头在Android中的应用越来越重要。通常摄像头的预览方案,通常使用SurfaceView的方案。
SurfaceView使用非常方便,但是我们没法对SurfaceView显示的视频数据进行处理。这样的架构灵活性很差。因此,Android还提供了Texture的方式。摄像头将采集的数据保存成纹理,然后使用Opengles去显示。
Opengles基础
OpenGL- ES 是免授权费的,跨平台和3D图形应用程序接口API。
GLSL
OpenGL着色语言(OpenGL Shading Language)是用来在OpenGL中着色编程的语言,也即开发人员写的短小的自定义程序,他们是在图形卡的GPU (Graphic Processor Unit图形处理单元)上执行的,代替了固定的渲染管线的一部分,使渲染管线中不同层次具有可编程性。
我们通过编写GLSL来决定顶点数据,片段数据以怎样的方式进行组合。
private final String vertexShaderCode = "uniform mat4 textureTransform;\n" +
"attribute vec2 inputTextureCoordinate;\n" +
"attribute vec4 position; \n" +//NDK坐标点
"varying vec2 textureCoordinate; \n" +//纹理坐标点变换后输出
"\n" +
" void main() {\n" +
" gl_Position = vec4(position.x,-position.y,position.z,position.w);\n" +
" textureCoordinate = vec2(inputTextureCoordinate.x,inputTextureCoordinate.y);\n" +
" }";
private final String fragmentShaderCode = "#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"uniform samplerExternalOES videoTex;\n" +
"varying vec2 textureCoordinate;\n" +
"\n" +
"void main() {\n" +
" vec4 tc = texture2D(videoTex, textureCoordinate);\n" +
// " float color = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" + //所有视图修改成黑白
// " gl_FragColor = vec4(color,color,color,1.0);\n" +
" gl_FragColor = vec4(tc.r,tc.g,tc.b,1.0);\n" +
"}";
上面的代码中,position是输入变量,表示顶点坐标。** gl_Position**是最终的坐标。
例如:
gl_Position = vec4(position.x,-position.y,position.z,position.w);
表示显示图像与源数据上下颠倒。
Program
上面介绍了GLSL(着色器语言)的编写,但我们如何将GLSL写入GPU,让它为我们工作呢?这就需要Program。我们可以将GLSL编写的Shader绑定到特定的Program,然后将Program链接到OpenGL中。
private int loadShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
// 添加上面编写的着色器代码并编译它
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
private void creatProgram() {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
// 创建空的OpenGL ES程序
mProgram = GLES20.glCreateProgram();
// 添加顶点着色器到程序中
GLES20.glAttachShader(mProgram, vertexShader);
// 添加片段着色器到程序中
GLES20.glAttachShader(mProgram, fragmentShader);
// 创建OpenGL ES程序可执行文件
GLES20.glLinkProgram(mProgram);
// 释放shader资源
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
}
GLSurfaceView.Renderer
GLSurfaceView.Renderer是Android SDK提供的一个接口。需要实现三个方法
- onSurfaceCreated是画面创建时调用,用于创建Program,初始化纹理等一些初始化工作
- onSurfaceChanged是画面尺寸变化时调用,通常用于设置尺寸,设置视点
- onDrawFrame在GPU准备绘制时调用,用于具体的绘制过程
具体过程
创建Render
public static Camera camera;
GLSurfaceView mCameraGlsurfaceView;
public CameraRender mRenderer;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.gl_preview_fragment_layout);
mCameraGlsurfaceView = findViewById(R.id.glsv_effect_preview);
mCameraGlsurfaceView.setEGLContextClientVersion(2);//在setRenderer()方法前调用此方法
mRenderer = new CameraRender(camera , this);
mCameraGlsurfaceView.setRenderer(mRenderer);
mCameraGlsurfaceView.setRenderMode(RENDERMODE_WHEN_DIRTY);
if (camera != null) {
camera.stopPreview();
camera.release();
}
mRenderer.mBoolean = true;
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
mCameraGlsurfaceView.requestRender();
}
Render
private int camera_status = 0;
private final String vertexShaderCode = "uniform mat4 textureTransform;\n" +
"attribute vec2 inputTextureCoordinate;\n" +
"attribute vec4 position; \n" +//NDK坐标点
"varying vec2 textureCoordinate; \n" +//纹理坐标点变换后输出
"\n" +
" void main() {\n" +
" gl_Position = vec4(position.x,-position.y,position.z,position.w);\n" +
" textureCoordinate = vec2(inputTextureCoordinate.x,inputTextureCoordinate.y);\n" +
" }";
private final String fragmentShaderCode = "#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"uniform samplerExternalOES videoTex;\n" +
"varying vec2 textureCoordinate;\n" +
"\n" +
"void main() {\n" +
" vec4 tc = texture2D(videoTex, textureCoordinate);\n" +
// " float color = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" + //所有视图修改成黑白
// " gl_FragColor = vec4(color,color,color,1.0);\n" +
" gl_FragColor = vec4(tc.r,tc.g,tc.b,1.0);\n" +
"}";
private float[] mPosCoordinate = {-1, -1, -1, 1, 1, -1, 1, 1};
private float[] mTexCoordinateBackRight = {1, 1, 0, 1, 1, 0, 0, 0};//顺时针转90并沿Y轴翻转 后摄像头正确,前摄像头上下颠倒
private float[] mTexCoordinateForntRight = {0, 1, 1, 1, 0, 0, 1, 0};//顺时针旋转90 后摄像头上下颠倒了,前摄像头正确
private SurfaceTexture mSurfaceTexture;
private Camera mCamera;
private SurfaceTexture.OnFrameAvailableListener mOnFrameAvailableListener;
public int mProgram;
public boolean mBoolean = false;
public CameraRender( Camera camera , SurfaceTexture.OnFrameAvailableListener listener) {
mCamera = camera;
mOnFrameAvailableListener = listener;
Matrix.setIdentityM(mProjectMatrix, 0);
Matrix.setIdentityM(mCameraMatrix, 0);
Matrix.setIdentityM(mMVPMatrix, 0);
}
onSurfaceCreated
/**
* 加载着色器
* @param type 着色器类型
* @param shaderCode 着色器源码
* @return
*/
private int loadShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
// 添加上面编写的着色器代码并编译它
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
private void creatProgram() {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
// 创建空的OpenGL ES程序
mProgram = GLES20.glCreateProgram();
// 添加顶点着色器到程序中
GLES20.glAttachShader(mProgram, vertexShader);
// 添加片段着色器到程序中
GLES20.glAttachShader(mProgram, fragmentShader);
// 创建OpenGL ES程序可执行文件
GLES20.glLinkProgram(mProgram);
// 释放shader资源
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
mSurfaceTexture = new SurfaceTexture(createOESTextureObject());
creatProgram();
mCamera = Camera.open(1);
try {
mCamera.setPreviewTexture(mSurfaceTexture);
mCamera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
在onSurfaceCreated中,我们不仅创建了Program,还初始化了SurfaceTexture和Camera。其中,SurfaceTexture通过createOESTextureObject获取。
onSurfaceChanged
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
Matrix.scaleM(mMVPMatrix,0,1,-1,1);
float ratio = (float) width / height;
Matrix.orthoM(mProjectMatrix, 0, -1, 1, -ratio, ratio, 1, 7);// 3和7代表远近视点与眼睛的距离,非坐标点
Matrix.setLookAtM(mCameraMatrix, 0, 0, 0, 3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);// 3代表眼睛的坐标点
Matrix.multiplyMM(mMVPMatrix, 0, mProjectMatrix, 0, mCameraMatrix, 0);
}
onDrawFrame
//添加程序到ES环境中
private void activeProgram() {
// 将程序添加到OpenGL ES环境
GLES20.glUseProgram(mProgram);
mSurfaceTexture.setOnFrameAvailableListener(mOnFrameAvailableListener);
// 获取顶点着色器的位置的句柄
uPosHandle = GLES20.glGetAttribLocation(mProgram, "position");
aTexHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
FloatBuffer mPosBuffer = convertToFloatBuffer(mPosCoordinate);
FloatBuffer mTexBuffer;
if(camera_status == 0){
mTexBuffer = convertToFloatBuffer(mTexCoordinateBackRight);
}else{
mTexBuffer = convertToFloatBuffer(mTexCoordinateForntRight);
}
GLES20.glVertexAttribPointer(uPosHandle, 2, GLES20.GL_FLOAT, false, 0, mPosBuffer);
GLES20.glVertexAttribPointer(aTexHandle, 2, GLES20.GL_FLOAT, false, 0, mTexBuffer);
// 启用顶点位置的句柄
GLES20.glEnableVertexAttribArray(uPosHandle);
GLES20.glEnableVertexAttribArray(aTexHandle);
}
@Override
public void onDrawFrame(GL10 gl) {
if(mBoolean){
activeProgram();
mBoolean = false;
}
if (mSurfaceTexture != null) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
mSurfaceTexture.updateTexImage();
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, mPosCoordinate.length / 2);
}
}
在onDrawFrame中,我们使用Program,并传入对应的position,然后更新SurfaceTexture,并重绘画面。
经过以上的过程,我们可以在Android中显示摄像头的画面。
如有问题,欢迎指正。