前言:最近开发需要用到USB外接摄像头,公司提供了一个简单的demo供参考,结果实际运行的时候发现拍照界面的两边会出现彩色的波动条纹一直晃,一开始以为是底层驱动的问题,后来仔细看了下还是代码的问题。demo里的设计思想是绘制一个全局的自定义相机界面,开启一个线程把bitmap图片宽高比设置成4:3一帧帧的显示在界面上形成动态的视觉。但是因为bitmap比实际的画布小所以造成了两边一直晃动的感觉。
原demo代码:
package com.camera.simplewebcam;
import android.app.Activity;
import android.os.Bundle;
public class Main extends Activity {
CameraPreview cp;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
cp = (CameraPreview) findViewById(R.id.cp);
}
}
package com.camera.simplewebcam;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
class CameraPreview extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final boolean DEBUG = true;
private static final String TAG="WebCam";
protected Context context;
private SurfaceHolder holder;
Thread mainLoop = null;
private Bitmap bmp=null;
private boolean cameraExists=false;
private boolean shouldStop=false;
// /dev/videox (x=cameraId+cameraBase) is used.
// In some omap devices, system uses /dev/video[0-3],
// so users must use /dev/video[4-].
// In such a case, try cameraId=0 and cameraBase=4
private int cameraId=0;
private int cameraBase=9;
// This definition also exists in ImageProc.h.
// Webcam must support the resolution 1600x1200 with YUYV format.
static final int IMG_WIDTH=1600;
static final int IMG_HEIGHT=1200;
// The following variables are used to draw camera images.
private int winWidth=0;
private int winHeight=0;
private Rect rect;
private int dw, dh;
private float rate;
// JNI functions
public native int prepareCamera(int videoid);
public native int prepareCameraWithBase(int videoid, int camerabase);
public native void processCamera();
public native void stopCamera();
public native void pixeltobmp(Bitmap bitmap);
static {
System.loadLibrary("ImageProc");
}
public CameraPreview(Context context) {
super(context);
this.context = context;
if(DEBUG) Log.d(TAG,"CameraPreview constructed");
setFocusable(true);
holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
public CameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
this.context = context;
if(DEBUG) Log.d(TAG,"CameraPreview constructed");
setFocusable(true);
holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
@Override
public void run() {
while (true && cameraExists) {
//obtaining display area to draw a large image
if(winWidth==0){
winWidth=this.getWidth();
winHeight=this.getHeight();
if(winWidth*3/4<=winHeight){
dw = 0;
dh = (winHeight-winWidth*3/4)/2;
rate = ((float)winWidth)/IMG_WIDTH;
rect = new Rect(dw,dh,dw+winWidth-1,dh+winWidth*3/4-1);
}else{
dw = (winWidth-winHeight*4/3)/2;
dh = 0;
rate = ((float)winHeight)/IMG_HEIGHT;
rect = new Rect(dw,dh,dw+winHeight*4/3 -1,dh+winHeight-1);
}
}
// obtaining a camera image (pixel data are stored in an array in JNI).
processCamera();
// camera image to bmp
pixeltobmp(bmp);
Canvas canvas = getHolder().lockCanvas();
if (canvas != null)
{
// draw camera bmp on canvas
canvas.drawBitmap(bmp,null,rect,null);
getHolder().unlockCanvasAndPost(canvas);
}
if(shouldStop){
shouldStop = false;
break;
}
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if(DEBUG) Log.d(TAG, "surfaceCreated");
if(bmp==null){
bmp = Bitmap.createBitmap(IMG_WIDTH, IMG_HEIGHT, Bitmap.Config.ARGB_8888);
}
// /dev/videox (x=cameraId + cameraBase) is used
int ret = prepareCameraWithBase(cameraId, cameraBase);
if(ret!=-1) cameraExists = true;
mainLoop = new Thread(this);
mainLoop.start();
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if(DEBUG) Log.d(TAG, "surfaceChanged");
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if(DEBUG) Log.d(TAG, "surfaceDestroyed");
if(cameraExists){
shouldStop = true;
while(shouldStop){
try{
Thread.sleep(100); // wait for thread stopping
}catch(Exception e){}
}
}
stopCamera();
}
}
修改之后代码:
package com.camera.simplewebcam;
import android.app.Activity;
import android.os.Bundle;
import android.view.Display;
import android.view.Window;
import android.view.WindowManager;
import android.widget.LinearLayout;
import android.widget.LinearLayout.LayoutParams;
public class Main extends Activity {
CameraPreview cp;
private int winWidth;
private int winHeight;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.main);
WindowManager m = getWindowManager();
Display d = m.getDefaultDisplay(); // 获取屏幕宽、高
winWidth=d.getWidth();
winHeight=d.getHeight();
cp = (CameraPreview) findViewById(R.id.cp);
LinearLayout.LayoutParams p = new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
p.height = winHeight; // 高度设置为屏幕的1.0
p.width = winHeight*4/3; // 宽度设置为屏幕高度的4/3
cp.setLayoutParams(p);
}
}
package com.camera.simplewebcam;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
class CameraPreview extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final boolean DEBUG = true;
private static final String TAG="WebCam";
protected Context context;
private SurfaceHolder holder;
Thread mainLoop = null;
private Bitmap bmp=null;
private boolean cameraExists=false;
private boolean shouldStop=false;
// /dev/videox (x=cameraId+cameraBase) is used.
// In some omap devices, system uses /dev/video[0-3],
// so users must use /dev/video[4-].
// In such a case, try cameraId=0 and cameraBase=4
private int cameraId=0;
private int cameraBase=9;
// This definition also exists in ImageProc.h.
// Webcam must support the resolution 1600x1200 with YUYV format.
static final int IMG_WIDTH=1600;
static final int IMG_HEIGHT=1200;
// The following variables are used to draw camera images.
private int winWidth=0;
private int winHeight=0;
private Rect rect;
private int dw, dh;
private float rate;
// JNI functions
public native int prepareCamera(int videoid);
public native int prepareCameraWithBase(int videoid, int camerabase);
public native void processCamera();
public native void stopCamera();
public native void pixeltobmp(Bitmap bitmap);
static {
System.loadLibrary("ImageProc");
}
public CameraPreview(Context context) {
super(context);
this.context = context;
if(DEBUG) Log.d(TAG,"CameraPreview constructed");
setFocusable(true);
if(bmp==null){
bmp = Bitmap.createBitmap(IMG_WIDTH, IMG_HEIGHT, Bitmap.Config.ARGB_8888);
}
// /dev/videox (x=cameraId + cameraBase) is used
int ret = prepareCameraWithBase(cameraId, cameraBase);
if(ret!=-1) cameraExists = true;
mainLoop = new Thread(this);
mainLoop.start();
holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
public CameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
this.context = context;
if(DEBUG) Log.d(TAG,"CameraPreview constructed");
setFocusable(true);
if(bmp==null){
bmp = Bitmap.createBitmap(IMG_WIDTH, IMG_HEIGHT, Bitmap.Config.ARGB_8888);
}
// /dev/videox (x=cameraId + cameraBase) is used
int ret = prepareCameraWithBase(cameraId, cameraBase);
if(ret!=-1) cameraExists = true;
mainLoop = new Thread(this);
mainLoop.start();
holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
@Override
public void run() {
while (true && cameraExists) {
//obtaining display area to draw a large image
/*if(winWidth==0){
winWidth=this.getWidth();
winHeight=this.getHeight();
if(winWidth*3/4<=winHeight){
dw = 0;
dh = (winHeight-winWidth*3/4)/2;
rate = ((float)winWidth)/IMG_WIDTH;
rect = new Rect(dw,dh,dw+winWidth-1,dh+winWidth*3/4-1);
}else{
dw = (winWidth-winHeight*4/3)/2;
dh = 0;
rate = ((float)winHeight)/IMG_HEIGHT;
rect = new Rect(dw,dh,dw+winHeight*4/3 -1,dh+winHeight-1);
}
}*/
if(winWidth==0){
winWidth=this.getWidth();
winHeight=this.getHeight();
//设置camera图片矩阵大小,应该与控件大小保持一致以免造成空白地方闪烁
rect = new Rect(0,0,winWidth,winHeight);
}
// obtaining a camera image (pixel data are stored in an array in JNI).
processCamera();
// camera image to bmp
pixeltobmp(bmp);
Canvas canvas = getHolder().lockCanvas();
if (canvas != null)
{
// draw camera bmp on canvas
canvas.drawBitmap(bmp,null,rect,null);
getHolder().unlockCanvasAndPost(canvas);//绘图操作
}
if(shouldStop){
shouldStop = false;
break;
}
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if(DEBUG) Log.d(TAG, "surfaceCreated");
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if(DEBUG) Log.d(TAG, "surfaceChanged");
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if(DEBUG) Log.d(TAG, "surfaceDestroyed");
if(cameraExists){
shouldStop = true;
while(shouldStop){
try{
Thread.sleep(100); // wait for thread stopping
}catch(Exception e){}
}
}
stopCamera();
}
public void takePhoto(){
if(cameraExists){
cameraExists = false;
}
}
}
至于为什么实际填充的数据比View小会造成这种结果的原理,如果有人知道麻烦指导下