package com.macrocheng.cubeopengl;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGL11;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import javax.microedition.khronos.opengles.GL10;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.opengl.GLU;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class GLTutorialBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
protected EGLContext glContext;
protected ViewAnimator animator;
protected SurfaceHolder sHolder;
protected Thread t;
protected boolean running;
int width;
int height;
boolean resize;
int fps;
/**
* Make a direct NIO FloatBuffer from an array of floats
* @param arr The array
* @return The newly created FloatBuffer
*/
protected static FloatBuffer makeFloatBuffer(float[] arr) {
//分配新的直接字节缓冲区
ByteBuffer bb = ByteBuffer.allocateDirect(arr.length*4);
//将获取的底层平台的本机字节顺序设置到新的缓冲区
bb.order(ByteOrder.nativeOrder());
//作为 float 缓冲区
FloatBuffer fb = bb.asFloatBuffer();
fb.put(arr);
fb.position(0);
return fb;
}
/**
* Make a direct NIO IntBuffer from an array of ints
* @param arr The array
* @return The newly created IntBuffer
*/
protected static IntBuffer makeFloatBuffer(int[] arr) {
ByteBuffer bb = ByteBuffer.allocateDirect(arr.length*4);
bb.order(ByteOrder.nativeOrder());
IntBuffer ib = bb.asIntBuffer();
ib.put(arr);
ib.position(0);
return ib;
}
/**
* Create a texture and send it to the graphics system
* @param gl The GL object
* @param bmp The bitmap of the texture
* @param reverseRGB Should the RGB values be reversed? (necessary workaround for loading .pngs...)
* @return The newly created identifier for the texture.
*/
protected static int loadTexture(GL10 gl, Bitmap bmp) {
return loadTexture(gl, bmp, false);
}
/**
* Create a texture and send it to the graphics system
* @param gl The GL object
* @param bmp The bitmap of the texture
* @param reverseRGB Should the RGB values be reversed? (necessary workaround for loading .pngs...)
* @return The newly created identifier for the texture.
*/
protected static int loadTexture(GL10 gl, Bitmap bmp, boolean reverseRGB) {
ByteBuffer bb = ByteBuffer.allocateDirect(bmp.getHeight()*bmp.getWidth()*4);
bb.order(ByteOrder.BIG_ENDIAN);
IntBuffer ib = bb.asIntBuffer();
for (int y=bmp.getHeight()-1;y>-1;y--)
for (int x=0;x<bmp.getWidth();x++) {
int pix = bmp.getPixel(x,bmp.getHeight()-y-1);
// Convert ARGB -> RGBA
@SuppressWarnings("unused")
byte alpha = (byte)((pix >> 24)&0xFF);
byte red = (byte)((pix >> 16)&0xFF);
byte green = (byte)((pix >> 8)&0xFF);
byte blue = (byte)((pix)&0xFF);
// It seems like alpha is currently broken in Android...
ib.put(red << 24 | green << 16 | blue << 8 | 0xFF);//255-alpha);
}
ib.position(0);
bb.position(0);
int[] tmp_tex = new int[1];
gl.glGenTextures(1, tmp_tex, 0);
int tex = tmp_tex[0];
gl.glBindTexture(GL10.GL_TEXTURE_2D, tex);
gl.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_RGBA, bmp.getWidth(), bmp.getHeight(), 0, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, bb);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
return tex;
}
/**
* Constructor
* @param c The View's context.
*/
public GLTutorialBase(Context c) {
this(c, -1);
}
/**
* Constructor for animated views
* @param c The View's context
* @param fps The frames per second for the animation.
*/
public GLTutorialBase(Context c, int fps) {
super(c);
Log.i("GLTutorialBase", "GLTutorialBase");
sHolder = getHolder();
sHolder.addCallback(this);
sHolder.setType(SurfaceHolder.SURFACE_TYPE_GPU);
this.fps = fps;
}
@Override
protected void onAttachedToWindow() {
if (animator != null) {
// If we're animated, start the animation
animator.start();
}
super.onAttachedToWindow();
}
@Override
protected void onDetachedFromWindow() {
if (animator != null) {
// If we're animated, stop the animation
animator.stop();
}
super.onDetachedFromWindow();
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
synchronized (this) {
this.width = width;
this.height = height;
this.resize = true;
}
}
public void surfaceCreated(SurfaceHolder holder) {
t = new Thread(this);
t.start();
}
public void surfaceDestroyed(SurfaceHolder arg0) {
running = false;
try {
t.join();
}
catch (InterruptedException ex) {}
t = null;
}
public void run() {
// Much of this code is from GLSurfaceView in the Google API Demos.
// I encourage those interested to look there for documentation.
//创建一个EGL实例
EGL10 egl = (EGL10)EGLContext.getEGL();
//
EGLDisplay dpy = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
//初始化EGLDisplay
int[] version = new int[2];
egl.eglInitialize(dpy, version);
int[] configSpec = {
EGL10.EGL_RED_SIZE, 5,
EGL10.EGL_GREEN_SIZE, 6,
EGL10.EGL_BLUE_SIZE, 5,
EGL10.EGL_DEPTH_SIZE, 16,
EGL10.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] num_config = new int[1];
//选择config创建opengl运行环境
egl.eglChooseConfig(dpy, configSpec, configs, 1, num_config);
EGLConfig config = configs[0];
EGLContext context = egl.eglCreateContext(dpy, config,
EGL10.EGL_NO_CONTEXT, null);
//创建新的surface
EGLSurface surface = egl.eglCreateWindowSurface(dpy, config, sHolder, null);
//将opengles环境设置为当前
egl.eglMakeCurrent(dpy, surface, surface, context);
//获取当前opengles画布
GL10 gl = (GL10)context.getGL();
init(gl);
int delta = -1;
if (fps > 0) {
delta = 1000/fps;
}
long time = System.currentTimeMillis();
running = true;
while (running) {
int w, h;
synchronized(this) {
w = width;
h = height;
}
if (System.currentTimeMillis()-time < delta) {
try {
Thread.sleep(System.currentTimeMillis()-time);
}
catch (InterruptedException ex) {}
}
drawFrame(gl, w, h);
//显示绘制结果到屏幕上
egl.eglSwapBuffers(dpy, surface);
if (egl.eglGetError() == EGL11.EGL_CONTEXT_LOST) {
Context c = getContext();
if (c instanceof Activity) {
((Activity)c).finish();
}
}
time = System.currentTimeMillis();
}
egl.eglMakeCurrent(dpy, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
egl.eglDestroySurface(dpy, surface);
egl.eglDestroyContext(dpy, context);
egl.eglTerminate(dpy);
}
private void drawFrame(GL10 gl, int w, int h) {
if (resize) {
resize(gl, w, h);
resize = false;
}
drawFrame(gl);
}
protected void resize(GL10 gl, int w, int h) {
gl.glMatrixMode(GL10.GL_PROJECTION);//GL_PROJECTION,对投影矩阵应用随后的矩阵操作.
gl.glLoadIdentity();//重置当前指定的矩阵为单位矩阵
gl.glViewport(0,0,w,h);
GLU.gluPerspective(gl, 45.0f, ((float)w)/h, 1f, 100f);
}
protected void init(GL10 gl) {}
protected abstract void drawFrame(GL10 gl);
}
package com.macrocheng.cubeopengl;
import java.nio.FloatBuffer;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLU;
import android.opengl.GLUtils;
import android.util.Log;
import android.view.MotionEvent;
/**
* http://www.zeuscmd.com/tutorials/opengles/20-Transparency.php
* @author bburns
*
*/
public class GLTutorialCube extends GLTutorialBase {
float lightAmbient[] = new float[] { 0.6f, 0.6f, 0.6f, 1.0f };
float lightDiffuse[] = new float[] { 0.6f, 0.6f, 0.6f, 1.0f };
float[] lightPos = new float[] {0,0,3,1};
float matAmbient[] = new float[] { 1f, 1f, 1f, 1.0f };
float matDiffuse[] = new float[] { 1f, 1f, 1f, 1.0f };
int tex;
Bitmap[] bmp = new Bitmap[6];
int[] tmp_tex = new int[6];
float box[][] = new float[][] {
{
-0.5f, -0.5f, 0.5f,
0.5f, -0.5f, 0.5f,
-0.5f, 0.5f, 0.5f,
0.5f, 0.5f, 0.5f,
},
{
-0.5f, -0.5f, -0.5f,
-0.5f, 0.5f, -0.5f,
0.5f, -0.5f, -0.5f,
0.5f, 0.5f, -0.5f,
},
{
-0.5f, -0.5f, 0.5f,
-0.5f, 0.5f, 0.5f,
-0.5f, -0.5f, -0.5f,
-0.5f, 0.5f, -0.5f,
},
{
0.5f, -0.5f, -0.5f,
0.5f, 0.5f, -0.5f,
0.5f, -0.5f, 0.5f,
0.5f, 0.5f, 0.5f,
},
{
-0.5f, 0.5f, 0.5f,
0.5f, 0.5f, 0.5f,
-0.5f, 0.5f, -0.5f,
0.5f, 0.5f, -0.5f,
},
{
-0.5f, -0.5f, 0.5f,
-0.5f, -0.5f, -0.5f,
0.5f, -0.5f, 0.5f,
0.5f, -0.5f, -0.5f,
},
};
float texCoords[][] = new float[][] {
{
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
},
{
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
},
{
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
},
{
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
},
{
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
},
{
// BOTTOM
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
}
};
FloatBuffer[] cubeBuff = new FloatBuffer[6];
FloatBuffer[] texBuff = new FloatBuffer[6];
public GLTutorialCube(Context c) {
super(c, 20);
for(int i = 0;i<box.length;i++)
{
cubeBuff[i] = makeFloatBuffer(box[i]);
texBuff[i] = makeFloatBuffer(texCoords[i]);
}
bmp[0] = BitmapFactory.decodeResource(c.getResources(), R.drawable.aa);
bmp[1] = BitmapFactory.decodeResource(c.getResources(), R.drawable.bb);
bmp[2] = BitmapFactory.decodeResource(c.getResources(), R.drawable.cc);
bmp[3] = BitmapFactory.decodeResource(c.getResources(), R.drawable.dd);
bmp[4] = BitmapFactory.decodeResource(c.getResources(), R.drawable.ee);
bmp[5] = BitmapFactory.decodeResource(c.getResources(), R.drawable.ff);
Log.i("GLTutorialCube", "GLTutorialCube");
setFocusable(true);
Log.i("GLTutorialCube", "GLTutorialCube");
}
protected void init(GL10 gl) {
Log.i("GLTutorialCube", "init");
gl.glEnable(GL10.GL_LIGHTING);
gl.glEnable(GL10.GL_LIGHT0);
gl.glMaterialfv(GL10.GL_FRONT_AND_BACK, GL10.GL_AMBIENT, matAmbient, 0);
gl.glMaterialfv(GL10.GL_FRONT_AND_BACK, GL10.GL_DIFFUSE, matDiffuse, 0);
gl.glLightfv(GL10.GL_LIGHT0, GL10.GL_AMBIENT, lightAmbient, 0);
gl.glLightfv(GL10.GL_LIGHT0, GL10.GL_DIFFUSE, lightDiffuse, 0);
gl.glLightfv(GL10.GL_LIGHT0, GL10.GL_POSITION, lightPos, 0);
gl.glEnable(GL10.GL_BLEND);
gl.glDisable(GL10.GL_DEPTH_TEST);
// gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
gl.glClearDepthf(1.0f);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f);
gl.glClearDepthf(1.0f);
gl.glEnable(GL10.GL_DEPTH_TEST);
gl.glDepthFunc(GL10.GL_LEQUAL);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
gl.glGenTextures(6, tmp_tex, 0);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
for(int i = 0; i < 6; i++)
{
gl.glBindTexture(GL10.GL_TEXTURE_2D, tmp_tex[i]);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bmp[i], 0);
}
}
float xrot = 45.0f;
float yrot = 45.0f;
protected void drawFrame(GL10 gl) {
Log.i("GLTutorialCube", "drawFrame");
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
GLU.gluLookAt(gl, 0, 0, 3, 0, 0, 0, 0, 1, 0);
gl.glRotatef(xrot, 1, 0, 0);
gl.glRotatef(yrot, 0, 1, 0);
for(int i = 0; i < 6; i++)
{
gl.glBindTexture(GL10.GL_TEXTURE_2D, tmp_tex[i]);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, cubeBuff[i]);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, texBuff[i]);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
}
if(flag == 1)
{
xrot += 2.0f;
}else if(flag == 2)
{
xrot -= 2.0f;
}else if(flag == 3)
{
yrot += 2.0f;
}else if(flag == 4)
{
yrot -= 2.0f;
}
if(flag == 1 || flag == 2)
if((int)(xrot%90)==0)
{
flag = 0;
}
if(flag == 3 || flag == 4)
if((int)(yrot%90) ==0)
{
flag =0;
}
}
float _x;
float _y;
int flag;
public boolean onTouchEvent(final MotionEvent event) {
Log.i("GLTutorialCube", "onTouchEvent");
if (event.getAction() == MotionEvent.ACTION_DOWN) {
_x = event.getX();
_y = event.getY();
}
if(event.getAction() == MotionEvent.ACTION_MOVE)
{
yrot += (_x - event.getX())/10;
xrot += (_y - event.getY())/10;
}
return true;
}
}