尊重原创请附上链接:http://blog.csdn.net/u010949962/article/details/41865777
最近要把Android 原生的View渲染到OpenGL GLSurfaceView中,起初想到的是截图的方法,也就是把View截取成bitmap后,再把Bitmap渲染到OpenGL中;但是明显这种方法是不可行的,面对一些高速动态更新的View,只有不停的对view 进行截图才能渲染出原生View的效果。
通过大量的Google终于在国外的网站找到了一个做过类似的先例(链接:http://www.felixjones.co.uk/neo%20website/Android_View/)。不过经过测试该方法只能渲染直接父类为View的view,也就是只能渲染一层View(如progressbar,没不能添加child的view),当该原生Android View包含很多子view时(也就是根View为FramLayout、或者linearLayout之类),无法实时的监听到View动态改变,OpenGL中只能不停的渲染该view,才能渲染出原生View的效果。但是这样一来不同的渲染会耗费大量的资源,降低应用程序的效率。理想中的话,是监听到了该View的内容或者其子view 的内容发生了变化(如:View中的字幕发生滚动)才进行渲染。
经过接近两周的努力我终于完美地实现了该效果,既然是站在别人的基础上得来的成果,那么该方法就应当被共享,所以产生了此文,不过只支持api 15以上的
步骤一:重写根View
1.设置该View 绘制自己:
setWillNotDraw(false);
2.监听View的变化,重写View,用ViewTreeObServer来监听,方法如下:
private void addOnPreDrawListener() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
final ViewTreeObserver mObserver = getViewTreeObserver();
if (mObserver != null) {
mObserver.addOnPreDrawListener(new OnPreDrawListener() {
@Override
public boolean onPreDraw() {
if (isDirty()) {//View或者子view发生变化
invalidate();
}
return true;
}
});
}
}
}
3.重写该View的onDraw方法:
@Override
protected void onDraw(Canvas canvas) {
try {
if (mSurface != null) {
Canvas surfaceCanvas = mSurface.lockCanvas(null);
super.dispatchDraw(surfaceCanvas);
mSurface.unlockCanvasAndPost(surfaceCanvas);
mSurface.release();
mSurface = null;
mSurface = new Surface(mSurfaceTexture);
}
} catch (OutOfResourcesException e) {
e.printStackTrace();
}
}
步骤二:GLSurfaceView.Renderer
class CustomRenderer implements GLSurfaceView.Renderer {
int glSurfaceTex;
private final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
long currentTime;
long previousTime;
boolean b = false;
int frameCount = 0;
DirectDrawer mDirectDrawer;
ActivityManager activityManager;
MemoryInfo _memoryInfo;
// Fixed values
private int TEXTURE_WIDTH = 360;
private int TEXTURE_HEIGHT = 360;
Context context;
private LauncherAppWidgetHostView addedWidgetView;
private SurfaceTexture surfaceTexture = null;
private Surface surface;
float fps;
public CustomRenderer(Context context, LauncherAppWidgetHostView addedWidgetView, Display mDisplay){
this.context = context;
this.addedWidgetView = addedWidgetView;
TEXTURE_WIDTH = mDisplay.getWidth();
TEXTURE_HEIGHT = mDisplay.getHeight();
_memoryInfo = new MemoryInfo();
activityManager = (ActivityManager) context.getApplicationContext().getSystemService(Context.ACTIVITY_SERVICE);
}
@Override
public void onDrawFrame(GL10 gl) {
synchronized (this) {
surfaceTexture.updateTexImage();
}
activityManager.getMemoryInfo(_memoryInfo);
GLES20.glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);
float[] mtx = new float[16];
surfaceTexture.getTransformMatrix(mtx);
mDirectDrawer.draw(mtx);
calculateFps();
//getAppMemorySize();
//getRunningAppProcessInfo();
//Log.v("onDrawFrame", "FPS: " + Math.round(fps) + ", availMem: " + Math.round(_memoryInfo.availMem / 1048576) + "MB");
}
private void getAppMemorySize(){
ActivityManager mActivityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
android.os.Debug.MemoryInfo[] memoryInfos = mActivityManager.getProcessMemoryInfo(new int[]{android.os.Process.myPid()});
int size = memoryInfos[0].dalvikPrivateDirty;
Log.w("getAppMemorySize", size / 1024 + " MB");
}
private void getRunningAppProcessInfo() {
ActivityManager mActivityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
//获得系统里正在运行的所有进程
List runningAppProcessesList = mActivityManager.getRunningAppProcesses();
for (RunningAppProcessInfo runningAppProcessInfo : runningAppProcessesList) {
// 进程ID号
int pid = runningAppProcessInfo.pid;
// 用户ID
int uid = runningAppProcessInfo.uid;
// 进程名
String processName = runningAppProcessInfo.processName;
// 占用的内存
int[] pids = new int[] {pid};
Debug.MemoryInfo[] memoryInfo = mActivityManager.getProcessMemoryInfo(pids);
int memorySize = memoryInfo[0].dalvikPrivateDirty;
System.out.println("processName="+processName+",currentPid: "+ "pid= " +android.os.Process.myPid()+"----------->"+pid+",uid="+uid+",memorySize="+memorySize+"kb");
}
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
surface = null;
surfaceTexture = null;
glSurfaceTex = Engine_CreateSurfaceTexture(TEXTURE_WIDTH, TEXTURE_HEIGHT);
Log.d("GLES20Ext", "glSurfaceTex" + glSurfaceTex);
if (glSurfaceTex > 0) {
surfaceTexture = new SurfaceTexture(glSurfaceTex);
surfaceTexture.setDefaultBufferSize(TEXTURE_WIDTH, TEXTURE_HEIGHT);
surface = new Surface(surfaceTexture);
addedWidgetView.setSurface(surface);
addedWidgetView.setSurfaceTexture(surfaceTexture);
//addedWidgetView.setSurfaceTexture(surfaceTexture);
mDirectDrawer = new DirectDrawer(glSurfaceTex);
}
}
float calculateFps() {
frameCount++;
if (!b) {
b = true;
previousTime = System.currentTimeMillis();
}
long intervalTime = System.currentTimeMillis() - previousTime;
if (intervalTime >= 1000) {
b = false;
fps = frameCount / (intervalTime / 1000f);
frameCount = 0;
Log.w("calculateFps", "FPS: " + fps);
}
return fps;
}
int Engine_CreateSurfaceTexture(int width, int height) {
/*
* Create our texture. This has to be done each time the surface is
* created.
*/
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
glSurfaceTex = textures[0];
if (glSurfaceTex > 0) {
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, glSurfaceTex);
// Notice the use of GL_TEXTURE_2D for texture creation
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, width, height, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
}
return glSurfaceTex;
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
}
}
public class DirectDrawer {
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"attribute vec2 inputTextureCoordinate;" +
"varying vec2 textureCoordinate;" +
"void main()" +
"{"+
"gl_Position = vPosition;"+
"textureCoordinate = inputTextureCoordinate;" +
"}";
private final String fragmentShaderCode =
"#extension GL_OES_EGL_image_external : require\n"+
"precision mediump float;" +
"varying vec2 textureCoordinate;\n" +
"uniform samplerExternalOES s_texture;\n" +
"void main() {" +
" gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
"}";
private FloatBuffer vertexBuffer, textureVerticesBuffer;
private ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mTextureCoordHandle;
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
// number of coordinates per vertex in this array
private static final int COORDS_PER_VERTEX = 2;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
static float squareCoords[] = {
-1.0f, 0.0f,
-1.0f, -2.2f,
1.0f, -2.2f,
1.0f, 0.0f,
};
static float textureVertices[] = {
0f, 0f,
0f, 1f,
1f, 1f,
1f, 0f,
};
private int texture;
public DirectDrawer(int texture)
{
this.texture = texture;
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
bb2.order(ByteOrder.nativeOrder());
textureVerticesBuffer = bb2.asFloatBuffer();
textureVerticesBuffer.put(textureVertices);
textureVerticesBuffer.position(0);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL ES program executables
}
public void draw(float[] mtx)
{
GLES20.glUseProgram(mProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
// textureVerticesBuffer.clear();
// textureVerticesBuffer.put( transformTextureCoordinates(
// textureVertices, mtx ));
// textureVerticesBuffer.position(0);
GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureVerticesBuffer);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(mTextureCoordHandle);
}
private int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
private float[] transformTextureCoordinates( float[] coords, float[] matrix)
{
float[] result = new float[ coords.length ];
float[] vt = new float[4];
for ( int i = 0 ; i < coords.length ; i += 2 ) {
float[] v = { coords[i], coords[i+1], 0 , 1 };
Matrix.multiplyMV(vt, 0, matrix, 0, v, 0);
result[i] = vt[0];
result[i+1] = vt[1];
}
return result;
}
}
步骤三:配置GLSurfaceView:
GLSurfaceView glSurfaceView = new GLSurfaceView(getApplicationContext());
// Setup the surface view for drawing to
glSurfaceView.setEGLContextClientVersion(2);
glSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
glSurfaceView.setRenderer(renderer);
//glSurfaceView.setZOrderOnTop(true);
// Add our WebView to the Android View hierarchy
glSurfaceView.setLayoutParams(new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT));
效果如图所示,上面是猎豹清理大师的widget,下面是GLSurfaceView的Texture,原生的Widget和OpenGL中渲染的一模一样。并且点击widget进行清理时也能达到实时渲染的Veiw动画清理效果,源码链接:https://github.com/MrHuangXin/RenderViewToOpenGL/tree/master