Android OpenGL+Camera2渲染(1) —— OpenGL简单介绍
Android OpenGL+Camera2渲染(2) —— OpenGL实现Camera2图像预览
Android OpenGL+Camera2渲染(3) —— 大眼,贴纸功能实现
Android OpenGL+Camera2渲染(4) —— 美颜功能实现
Android OpenGL+Camera2渲染(5) —— 录制视频,实现快录慢录
和之前将的逻辑一样,有一个美颜的开关按钮。
public void enableBeauty(final boolean isChecked) {
queueEvent(new Runnable() {
@Override
public void run() {
glRender.enableBeauty(isChecked);
}
});
}
public void enableBeauty(boolean isChecked) {
this.beautyEnable = isChecked;
if (isChecked) {
beaytyFilter = new BeautifyFilter(glRenderView.getContext());
beaytyFilter.prepare(screenSurfaceWid, screenSurfaceHeight, screenX, screenY);
} else {
beaytyFilter.release();
beaytyFilter = null;
}
}
precision mediump float;
varying mediump vec2 aCoord;
uniform sampler2D vTexture;
uniform int width;
uniform int height;
vec2 blurCoordinates[20];
void main(){
//对于100*100 的像素,1/100 步长为
vec2 singleStepOffset = vec2(1.0/float(width), 1.0/float(height));
blurCoordinates[0] = aCoord.xy + singleStepOffset * vec2(0.0, -10.0);
blurCoordinates[1] = aCoord.xy + singleStepOffset * vec2(0.0, 10.0);
blurCoordinates[2] = aCoord.xy + singleStepOffset * vec2(-10.0, 0.0);
blurCoordinates[3] = aCoord.xy + singleStepOffset * vec2(10.0, 0.0);
blurCoordinates[4] = aCoord.xy + singleStepOffset * vec2(5.0, -8.0);
blurCoordinates[5] = aCoord.xy + singleStepOffset * vec2(5.0, 8.0);
blurCoordinates[6] = aCoord.xy + singleStepOffset * vec2(-5.0, 8.0);
blurCoordinates[7] = aCoord.xy + singleStepOffset * vec2(-5.0, -8.0);
blurCoordinates[8] = aCoord.xy + singleStepOffset * vec2(8.0, -5.0);
blurCoordinates[9] = aCoord.xy + singleStepOffset * vec2(8.0, 5.0);
blurCoordinates[10] = aCoord.xy + singleStepOffset * vec2(-8.0, 5.0);
blurCoordinates[11] = aCoord.xy + singleStepOffset * vec2(-8.0, -5.0);
blurCoordinates[12] = aCoord.xy + singleStepOffset * vec2(0.0, -6.0);
blurCoordinates[13] = aCoord.xy + singleStepOffset * vec2(0.0, 6.0);
blurCoordinates[14] = aCoord.xy + singleStepOffset * vec2(6.0, 0.0);
blurCoordinates[15] = aCoord.xy + singleStepOffset * vec2(-6.0, 0.0);
blurCoordinates[16] = aCoord.xy + singleStepOffset * vec2(-4.0, -4.0);
blurCoordinates[17] = aCoord.xy + singleStepOffset * vec2(-4.0, 4.0);
blurCoordinates[18] = aCoord.xy + singleStepOffset * vec2(4.0, -4.0);
blurCoordinates[19] = aCoord.xy + singleStepOffset * vec2(4.0, 4.0);
//计算平均值
vec4 currentColor = texture2D(vTexture, aCoord);
vec3 rgb = currentColor.rgb;
for (int i=0;i<20; i++){
rgb += texture2D(vTexture, blurCoordinates[i].xy).rgb;
}
//模糊像素 //直接显示就是高斯模糊效果
vec4 blur = vec4(rgb * 1.0/21.0, currentColor.a);
//高反差 边缘(轮廓)高亮
vec4 highPassColor = currentColor - blur;
//highPassColor 强光处理 color = 2 * color1 * color2
//highPassColor 高反差基础上,边缘信息进行彩色 深度高亮效果
highPassColor.r = clamp(2.0 * highPassColor.r * highPassColor.r * 24.0, 0.0, 1.0);
highPassColor.g = clamp(2.0 * highPassColor.g * highPassColor.g * 24.0, 0.0, 1.0);
highPassColor.b = clamp(2.0 * highPassColor.b * highPassColor.b * 24.0, 0.0, 1.0);
// 过滤疤痕
vec4 highPassBlur = vec4(highPassColor.rgb, 1.0);
//3、融合 -> 磨皮
//蓝色通道值
float b = min(currentColor.b, blur.b);
float value = clamp((b - 0.2) * 5.0, 0.0, 1.0);
// RGB的最大值
float maxChannelColor = max(max(highPassBlur.r, highPassBlur.g), highPassBlur.b);
// 磨皮程度
float intensity = 1.0;// 0.0 - 1.0f 再大会很模糊
float currentIntensity = (1.0 - maxChannelColor / (maxChannelColor + 0.2)) * value * intensity;
//gl_FragColor = highPassBlur;
// 一个滤镜
//opengl 内置函数 线性融合
//混合 x*(1−a)+y⋅a
// 第三个值越大,在这里融合的图像 越模糊
vec3 r = mix(currentColor.rgb, blur.rgb, currentIntensity);
//
gl_FragColor = vec4(r, 1.0);
}
首先获取周边20个点,取平均值,实现高斯模糊的效果。计算高反差,强光处理,融合的过程,实现美颜。
前面的几篇文章已经讲过大眼 贴纸的创建过程,美颜也一样,都是继承自BaseFilter 的这里就不多说了,直接看一下
@Override
public void onDrawFrame(GL10 gl) {
int textureId;
// 配置屏幕
//清理屏幕 :告诉opengl 需要把屏幕清理成什么颜色
GLES20.glClearColor(0, 0, 0, 0);
//执行上一个:glClearColor配置的屏幕颜色
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
//更新获取一张图
mSurfaceTexture.updateTexImage();
mSurfaceTexture.getTransformMatrix(mtx);
//cameraFiler需要一个矩阵,是Surface和我们手机屏幕的一个坐标之间的关系
cameraFilter.setMatrix(mtx);
textureId = cameraFilter.onDrawFrame(mTextures[0]);
if (bigEyeEnable) {
bigeyeFilter.setFace(tracker.mFace);
textureId = bigeyeFilter.onDrawFrame(textureId);
}
if (beautyEnable) {
textureId = beaytyFilter.onDrawFrame(textureId);
}
if (stickEnable) {
stickerFilter.setFace(tracker.mFace);
textureId = stickerFilter.onDrawFrame(textureId);
}
int id = screenFilter.onDrawFrame(textureId);
//进行录制
avcRecorder.encodeFrame(id, mSurfaceTexture.getTimestamp());
}
拿到上一层的纹理ID,进行处理。
@Override
public int onDrawFrame(int textureId) {
GLES20.glViewport(0, 0, mOutputWidth, mOutputHeight);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffers[0]);
GLES20.glUseProgram(mProgramId);
GLES20.glUniform1i(width, mOutputWidth);
GLES20.glUniform1i(height, mOutputHeight);
//传递坐标
mGlVertexBuffer.position(0);
GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 0, mGlVertexBuffer);
GLES20.glEnableVertexAttribArray(vPosition);
mGlTextureBuffer.position(0);
GLES20.glVertexAttribPointer(vCoord, 2, GLES20.GL_FLOAT, false, 0, mGlTextureBuffer);
GLES20.glEnableVertexAttribArray(vCoord);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glUniform1i(vTexture, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
return mFBOTextures[0];
}
给传入中传入 width height 等参数,执行 glDrawArrays,经过片元着色器,就把美颜后的效果写入了FBO的纹理中了。
github项目地址:https://github.com/wangchao0837/OpenGlCameraRender