android二维码扫描(最近做的项目中用到的)

 最近做的项目中需要用到二维码扫描的功能,就把code.gooogle上面的Zxing抠出来了部分代码用于自己的项目。在这里分享一下

 把gooogle上代码导入工程之后的目录结构如下:

ZXing01

源码中的例子有很多代码用不到,用到的就是CaptureActivity,AutoFocusCallback,CameraConfigurationManager,FlashlightManager,PlanarYUVLuminanceSource,PreviewCallback,CaptureActivityHandler,Pattern,DecodeHandler,DecodeThread,FinishListener,InactivityTimer,Intents,ViewfinderResultPointCallback,ViewfinderView等着几个类也就是处理一下编码的格式,调用相机的相关方法等。

下面是主要要处理的两个类:

class CaptureActivity extends Activity implements Callback {

private CaptureActivityHandler handler;

private ViewfinderView viewfinderView;

private boolean hasSurface;

private Vector<BarcodeFormat> decodeFormats;

private String characterSet;

private TextView txtResult;

private InactivityTimer inactivityTimer;

private MediaPlayer mediaPlayer;

private boolean playBeep;

private static final float BEEP_VOLUME = 0.10f;

private boolean vibrate;

  /** Called when the activity is first created. */

@Override

public void onCreate(Bundle savedInstanceState) {

super.onCreate(savedInstanceState);

setContentView(R.layout.main);

//初始化 CameraManager

CameraManager.init(getApplication());

viewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view);

txtResult = (TextView) findViewById(R.id.txtResult);

hasSurface = false;

inactivityTimer = new InactivityTimer(this);

}

@Override

protected void onResume() {

super.onResume();

SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);

SurfaceHolder surfaceHolder = surfaceView.getHolder();

if (hasSurface) {

initCamera(surfaceHolder);

} else {

surfaceHolder.addCallback(this);

surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);

}

decodeFormats = null;

characterSet = null;

playBeep = true;

AudioManager audioService = (AudioManager) getSystemService(AUDIO_SERVICE);

if (audioService.getRingerMode() != AudioManager.RINGER_MODE_NORMAL) {

playBeep = false;

}

initBeepSound();

vibrate = true;

}

@Override

protected void onPause() {

super.onPause();

if (handler != null) {

handler.quitSynchronously();

handler = null;

}

CameraManager.get().closeDriver();

}

  @Override

protected void onDestroy() {

inactivityTimer.shutdown();

super.onDestroy();

}

private void initCamera(SurfaceHolder surfaceHolder) {

try {

CameraManager.get().openDriver(surfaceHolder);

} catch (IOException ioe) {

return;

} catch (RuntimeException e) {

return;

}

if (handler == null) {

handler = new CaptureActivityHandler(this, decodeFormats,

characterSet);

}

}

@Override

public void surfaceChanged(SurfaceHolder holder, int format, int width,

int height) {

}

@Override

public void surfaceCreated(SurfaceHolder holder) {

if (!hasSurface) {

hasSurface = true;

initCamera(holder);

}

}

@Override

public void surfaceDestroyed(SurfaceHolder holder) {

hasSurface = false;

}

public ViewfinderView getViewfinderView() {

return viewfinderView;

}

public Handler getHandler() {

return handler;

}

public void drawViewfinder() {

viewfinderView.drawViewfinder();

}

/**

*这里处理一下扫描的结果

*/

public void handleDecode(Result obj, Bitmap barcode) {

inactivityTimer.onActivity();

viewfinderView.drawResultBitmap(barcode);

playBeepSoundAndVibrate();

txtResult.setText(obj.getBarcodeFormat().toString() + ":"

+ obj.getText());

}

private void initBeepSound() {

if (playBeep && mediaPlayer == null) {

// The volume on STREAM_SYSTEM is not adjustable, and users found it

// too loud,

// so we now play on the music stream.

setVolumeControlStream(AudioManager.STREAM_MUSIC);

mediaPlayer = new MediaPlayer();

mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);

mediaPlayer.setOnCompletionListener(beepListener);

 

AssetFileDescriptor file = getResources().openRawResourceFd(

R.raw.beep);

try {

mediaPlayer.setDataSource(file.getFileDescriptor(),

file.getStartOffset(), file.getLength());

file.close();

mediaPlayer.setVolume(BEEP_VOLUME, BEEP_VOLUME);

mediaPlayer.prepare();

} catch (IOException e) {

mediaPlayer = null;

}

}

}

private static final long VIBRATE_DURATION = 200L;

 

private void playBeepSoundAndVibrate() {

if (playBeep && mediaPlayer != null) {

mediaPlayer.start();

}

if (vibrate) {

Vibrator vibrator = (Vibrator) getSystemService(VIBRATOR_SERVICE);

vibrator.vibrate(VIBRATE_DURATION);

}

}

/**

* When the beep has finished playing, rewind to queue up another one.

*/

private final OnCompletionListener beepListener = new OnCompletionListener() {

public void onCompletion(MediaPlayer mediaPlayer) {

mediaPlayer.seekTo(0);

}

};

 

}

红色字体的是我们在掉用这个类之后要处理扫描之后的结果

 final class CameraManager { 

 private static final String TAG = CameraManager.class.getSimpleName();

  private static final int MIN_FRAME_WIDTH = 240;

  private static final int MIN_FRAME_HEIGHT = 240;

  private static final int MAX_FRAME_WIDTH = 480;

  private static final int MAX_FRAME_HEIGHT = 360;

  private static CameraManager cameraManager;

static final int SDK_INT; // Later we can use Build.VERSION.SDK_INT

  static {

    int sdkInt;

    try {

      sdkInt = Integer.parseInt(Build.VERSION.SDK);

    } catch (NumberFormatException nfe) {

      // Just to be safe

      sdkInt = 10000;

    }

    SDK_INT = sdkInt;

  }

  private final Context context;

  private final CameraConfigurationManager configManager;

  private Camera camera;

  private Rect framingRect;

  private Rect framingRectInPreview;

  private boolean initialized;

  private boolean previewing;

  private final boolean useOneShotPreviewCallback;

  private final PreviewCallback previewCallback;

  private final AutoFocusCallback autoFocusCallback;

 public static void init(Context context) {

    if (cameraManager == null) {

      cameraManager = new CameraManager(context);

    }

  }

  public static CameraManager get() {

    return cameraManager;

  }

  private CameraManager(Context context) {

    this.context = context;

    this.configManager = new CameraConfigurationManager(context);

previewCallback = new PreviewCallback(configManager, useOneShotPreviewCallback);

    autoFocusCallback = new AutoFocusCallback();

  }

 /**

   * Opens the camera driver and initializes the hardware parameters.

   *

   * @param holder The surface object which the camera will draw preview frames into.

   * @throws IOException Indicates the camera driver failed to open.

   */

  public void openDriver(SurfaceHolder holder) throws IOException {

    if (camera == null) {

      camera = Camera.open();

      if (camera == null) {

        throw new IOException();

      }

      camera.setPreviewDisplay(holder);

 

      if (!initialized) {

        initialized = true;

        configManager.initFromCameraParameters(camera);

      }

      configManager.setDesiredCameraParameters(camera);

 //     SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);

      //是否使用前灯

//      if (prefs.getBoolean(PreferencesActivity.KEY_FRONT_LIGHT, false)) {

//        FlashlightManager.enableFlashlight();

//      }

      FlashlightManager.enableFlashlight();

    }

  } 

  /**

   * Closes the camera driver if still in use.

   */

  public void closeDriver() {

    if (camera != null) {

      FlashlightManager.disableFlashlight();

      camera.release();

      camera = null;

    }

  }

  /**

   * Asks the camera hardware to begin drawing preview frames to the screen.

   */

  public void startPreview() {

    if (camera != null && !previewing) {

      camera.startPreview();

      previewing = true;

    }

  }

 /**

   * Tells the camera to stop drawing preview frames.

   */

  public void stopPreview() {

    if (camera != null && previewing) {

      if (!useOneShotPreviewCallback) {

        camera.setPreviewCallback(null);

      }

      camera.stopPreview();

      previewCallback.setHandler(null, 0);

      autoFocusCallback.setHandler(null, 0);

      previewing = false;

    }

  } 

  /**

   * A single preview frame will be returned to the handler supplied. The data will arrive as byte[]

   * in the message.obj field, with width and height encoded as message.arg1 and message.arg2,

   * respectively.

   *

   * @param handler The handler to send the message to.

   * @param message The what field of the message to be sent.

   */

  public void requestPreviewFrame(Handler handler, int message) {

    if (camera != null && previewing) {

      previewCallback.setHandler(handler, message);

      if (useOneShotPreviewCallback) {

        camera.setOneShotPreviewCallback(previewCallback);

      } else {

        camera.setPreviewCallback(previewCallback);

      }

    }

  }

  /**

   * Asks the camera hardware to perform an autofocus.

   *

   * @param handler The Handler to notify when the autofocus completes.

   * @param message The message to deliver.

   */

  public void requestAutoFocus(Handler handler, int message) {

    if (camera != null && previewing) {

      autoFocusCallback.setHandler(handler, message);

      //Log.d(TAG, "Requesting auto-focus callback");

      camera.autoFocus(autoFocusCallback);

    }

  }

  /**

   * Calculates the framing rect which the UI should draw to show the user where to place the

   * barcode. This target helps with alignment as well as forces the user to hold the device

   * far enough away to ensure the image will be in focus.

   *

   * @return The rectangle to draw on screen in window coordinates.

   */

  public Rect getFramingRect() {

    Point screenResolution = configManager.getScreenResolution();

    if (framingRect == null) {

      if (camera == null) {

        return null;

      }

      int width = screenResolution.x * 3 / 4;

      if (width < MIN_FRAME_WIDTH) {

        width = MIN_FRAME_WIDTH;

      } else if (width > MAX_FRAME_WIDTH) {

        width = MAX_FRAME_WIDTH;

      }

      int height = screenResolution.y * 3 / 4;

      if (height < MIN_FRAME_HEIGHT) {

        height = MIN_FRAME_HEIGHT;

      } else if (height > MAX_FRAME_HEIGHT) {

        height = MAX_FRAME_HEIGHT;

      }

      int leftOffset = (screenResolution.x - width) / 2;

      int topOffset = (screenResolution.y - height) / 2;

      framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);

      Log.d(TAG, "Calculated framing rect: " + framingRect);

    }

    return framingRect;

  }

  /**

   * Like {@link #getFramingRect} but coordinates are in terms of the preview frame,

   * not UI / screen.

   */

  public Rect getFramingRectInPreview() {

    if (framingRectInPreview == null) {

      Rect rect = new Rect(getFramingRect());

      Point cameraResolution = configManager.getCameraResolution();

      Point screenResolution = configManager.getScreenResolution();

      rect.left = rect.left * cameraResolution.x / screenResolution.x;

      rect.right = rect.right * cameraResolution.x / screenResolution.x;

      rect.top = rect.top * cameraResolution.y / screenResolution.y;

      rect.bottom = rect.bottom * cameraResolution.y / screenResolution.y;

      framingRectInPreview = rect;

    }

    return framingRectInPreview;

  }

  /**

   * Converts the result points from still resolution coordinates to screen coordinates.

   *

   * @param points The points returned by the Reader subclass through Result.getResultPoints().

   * @return An array of Points scaled to the size of the framing rect and offset appropriately

   *         so they can be drawn in screen coordinates.

   */

  /*

  public Point[] convertResultPoints(ResultPoint[] points) {

    Rect frame = getFramingRectInPreview();

    int count = points.length;

    Point[] output = new Point[count];

    for (int x = 0; x < count; x++) {

      output[x] = new Point();

      output[x].x = frame.left + (int) (points[x].getX() + 0.5f);

      output[x].y = frame.top + (int) (points[x].getY() + 0.5f);

    }

    return output;

  }

   */

  /**

   * A factory method to build the appropriate LuminanceSource object based on the format

   * of the preview buffers, as described by Camera.Parameters.

   *

   * @param data A preview frame.

   * @param width The width of the image.

   * @param height The height of the image.

   * @return A PlanarYUVLuminanceSource instance.

   */

  public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {

    Rect rect = getFramingRectInPreview();

    int previewFormat = configManager.getPreviewFormat();

    String previewFormatString = configManager.getPreviewFormatString();

    switch (previewFormat) {

      // This is the standard Android format which all devices are REQUIRED to support.

      // In theory, it's the only one we should ever care about.

      case PixelFormat.YCbCr_420_SP:

      // This format has never been seen in the wild, but is compatible as we only care

      // about the Y channel, so allow it.

      case PixelFormat.YCbCr_422_SP:

        return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,

            rect.width(), rect.height());

      default:

        // The Samsung Moment incorrectly uses this variant instead of the 'sp' version.

        // Fortunately, it too has all the Y data up front, so we can read it.

        if ("yuv420p".equals(previewFormatString)) {

          return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,

            rect.width(), rect.height());

        }

    }

    throw new IllegalArgumentException("Unsupported picture format: " +

        previewFormat + '/' + previewFormatString);

  }

}这个主要是处理下扫描的区域(用来适配不同的机型)

把整个ZXing抠出来之后的结构如下:
 

ZXing02

 

主要是吧ZXing里面自带的很多没用的功能剔除掉,抽取里面的核心代码用于自己的项目。

你可能感兴趣的:(android二维码扫描)