camera与opengl es的融合显示(AR基础)

本文简单介绍一下,camera取景画面与opengl es绘制的画面融合显示。其实说直接一点就是SurfaceView与GLSurfaceView的叠加。直接上代码吧!

package com.scy.cameraopengl;

import java.io.IOException;

import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.provider.Settings.System;
import android.app.Activity;
import android.view.Display;
import android.view.Menu;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.Window;
import android.view.SurfaceHolder.Callback;
import android.view.ViewGroup.LayoutParams;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.FrameLayout;

public class MainActivity extends Activity 
{
	FrameLayout frameLayout;
	
	Camera camera;
	SurfaceView surface;
	SurfaceHolder holder;
	GLSurfaceView surfaceView;
	MyRender render;
	
	int width,height;
	boolean isPreview = false;
	@Override
	protected void onCreate(Bundle savedInstanceState) 
	{
		super.onCreate(savedInstanceState);
		requestWindowFeature(Window.FEATURE_NO_TITLE);
		getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
		WindowManager wManager = (WindowManager)getSystemService(WINDOW_SERVICE);
		Display display = wManager.getDefaultDisplay();
		width = display.getWidth();
		height = display.getHeight();		
		setContentView(R.layout.activity_main);
		frameLayout = (FrameLayout)findViewById(R.id.framlayout);
		
		/**
		 * 先添加GLSurfaceView
		 */
		surfaceView = new GLSurfaceView(this);
		surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
		/**
		 * Set the desired PixelFormat of the surface. The default is OPAQUE. When working with a SurfaceView, 
		 * this must be called from the same thread running the SurfaceView's window.
		 */
		surfaceView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
		render = new MyRender();
		surfaceView.setRenderer(render);
		frameLayout.addView(surfaceView, new LayoutParams(LayoutParams.FILL_PARENT,LayoutParams.FILL_PARENT));
		/**
		 * 再添加SurfaceView图层
		 */
		surface = new SurfaceView(this);
		holder = surface.getHolder();
		holder.addCallback(new Callback() 
		{			
			@Override
			public void surfaceDestroyed(SurfaceHolder holder) 
			{
				if (camera!=null) 
				{
					if (isPreview) 
					{
						camera.stopPreview();
					}
					camera.release();
					camera = null;
				}
			}
			
			@Override
			public void surfaceCreated(SurfaceHolder holder) 
			{
				if (!isPreview) 
				{
					camera = Camera.open();
				}
				if (!isPreview&&camera!=null) 
				{
					try {
						Parameters parameters = camera.getParameters();
						parameters.setPictureSize(width, height);
						parameters.setPreviewSize(width, height);
						parameters.setPreviewFrameRate(6);
						camera.setParameters(parameters);
						camera.setPreviewDisplay(holder);
						camera.startPreview();
						camera.autoFocus(null);
					} catch (IOException e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}
					isPreview = true;
				}				
			}
			
			@Override
			public void surfaceChanged(SurfaceHolder holder, int format, int width,
					int height) 
			{				
			}
		});
		holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
		frameLayout.addView(surface, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));		
		
	}

@Override
protected void onResume() {
	super.onResume();
}

@Override
protected void onStop() 
{
	super.onStop();
}

}


 

package com.scy.cameraopengl;

import java.nio.FloatBuffer;
import java.nio.IntBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.R.integer;
import android.opengl.GLSurfaceView.Renderer;

public class MyRender implements Renderer 
{	
	float[] verteices = new float[]
			{
				0.1f,0.6f,0.0f,
				-0.3f,0.0f,0.0f,
				0.3f,0.1f,0.0f
			};
	int[] colors = new int[]
			{
				65535,0,0,0,
				0,65535,0,0,
				0,0,65535,0
			};
	
	FloatBuffer vBuffer = MemUtil.makeFloatBuffer(verteices);
	IntBuffer cBuffer = MemUtil.makeIntBuffer(colors);
	
	@Override
	public void onDrawFrame(GL10 gl) 
	{
		gl.glClear(GL10.GL_COLOR_BUFFER_BIT|GL10.GL_DEPTH_BUFFER_BIT);
		gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
		gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
		gl.glMatrixMode(GL10.GL_MODELVIEW);
		gl.glLoadIdentity();
		gl.glTranslatef(0.0f, 0.0f, -1.0f);
		gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vBuffer);
		gl.glColorPointer(4, GL10.GL_FIXED, 0, cBuffer);
		gl.glDrawArrays(GL10.GL_TRIANGLES, 0, 3);
		gl.glFinish();
		gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
		gl.glDisableClientState(GL10.GL_COLOR_ARRAY);
	}

	@Override
	public void onSurfaceChanged(GL10 gl, int width, int height) 
	{
		gl.glViewport(0, 0, width, height);
		gl.glMatrixMode(GL10.GL_PROJECTION);
		gl.glLoadIdentity();
		float ratio = (float)width/height;
		gl.glFrustumf(-ratio, ratio, -1, 1, 1, 9);
	}

	@Override
	public void onSurfaceCreated(GL10 gl, EGLConfig config) 
	{
		gl.glDisable(GL10.GL_DITHER);
		gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_FASTEST);
		gl.glClearColor(0, 0, 0, 0);
		gl.glShadeModel(GL10.GL_SMOOTH);
		gl.glEnable(GL10.GL_DEPTH_TEST);
		gl.glDepthFunc(GL10.GL_LEQUAL);
	}

}


运行的效果就是摄像头取景的真实环境,上面显示一个opengl绘制的三角形面。通过这种方式结合传感器,可以实现一个简单的增强现实应用。大家有兴趣的话可以自己试试。

你可能感兴趣的:(camera与opengl es的融合显示(AR基础))