Android Studio版本:3.2
操作系统(AS):Windows10
Android版本:5.0.2
摄像头:USB摄像头(640*480
贼辣鸡)开源项目:saki4510t/UVCCamera、saki4510t/OpenCVwithUVC
接上篇:【UVCcamera/新手向】Android Studio编译&踩的坑
下一篇:【UVCcamera/新手向】AS中使用OpenCV进行简单人脸识别
简单的灰度图
o0olele/UVCcamera-OpenCV
1. 新建一个camera的工程
2. 将第三方库文件(源码里有)复制到对应位置
3. 更改项目下的build.gradle
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.2.0'
//改成自己使用的版本
}
}
allprojects {
repositories {
maven { url 'http://raw.github.com/saki4510t/libcommon/master/repository/' }
google()
jcenter()
}
configurations.all {
resolutionStrategy.eachDependency { DependencyResolveDetails details ->
def requested = details.requested
if (requested.group == 'com.android.support') {
if (!requested.name.startsWith("multidex")) {
details.useVersion '27.1.1'
//改成自己使用的版本
//将所有的三方库使用的lib版本统一
}
}
}
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
4. 更改app下的build.gradle
apply plugin: 'com.android.application'
android {
compileSdkVersion 27
defaultConfig {
applicationId "com.example.o0orick.camera"
minSdkVersion 21
targetSdkVersion 27
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
//添加内容
repositories {
flatDir {
dirs 'libs'
}
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'com.android.support:appcompat-v7:27.1.1'
implementation 'com.android.support.constraint:constraint-layout:1.1.3'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'com.android.support.test:runner:1.0.2'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
//添加内容,使用第三方库
implementation(name: 'openCVLibrary', ext: 'aar')
implementation(name: 'libuvccamera', ext: 'aar')
implementation(name: 'usbCameraCommon', ext: 'aar')
implementation("com.serenegiant:common:1.5.20") {
exclude module: 'support-v4'
}
}
5. 更改界面,添加camera_view(这是UVC要求的,貌似)、imageView(用于显示opencv处理后的图像)、一个button(普通、imagebutton都行)
6. 源代码
(部分注释,刚入门,自己的一些理解)
package com.example.o0orick.camera;
import android.animation.Animator;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.hardware.usb.UsbDevice;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.util.Log;
import android.view.Surface;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.CompoundButton;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.SeekBar;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ToggleButton;
import com.serenegiant.common.BaseActivity;
import com.serenegiant.usb.CameraDialog;
import com.serenegiant.usb.IFrameCallback;
import com.serenegiant.usb.USBMonitor;
import com.serenegiant.usb.USBMonitor.OnDeviceConnectListener;
import com.serenegiant.usb.USBMonitor.UsbControlBlock;
import com.serenegiant.usb.UVCCamera;
import com.serenegiant.usbcameracommon.UVCCameraHandler;
import com.serenegiant.utils.ViewAnimationHelper;
import com.serenegiant.widget.CameraViewInterface;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.imgproc.Imgproc;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
public final class MainActivity extends BaseActivity implements CameraDialog.CameraDialogParent {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "MainActivity";
/**
* 操作锁
*/
private final Object mSync = new Object();
/**
* set true if you want to record movie using MediaSurfaceEncoder
* (writing frame data into Surface camera from MediaCodec
* by almost same way as USBCameratest2)
* set false if you want to record movie using MediaVideoEncoder
*/
private static final boolean USE_SURFACE_ENCODER = false;
/**
* preview resolution(width)
* if your camera does not support specific resolution and mode,
* {@link UVCCamera#setPreviewSize(int, int, int)} throw exception
*/
private static final int PREVIEW_WIDTH = 640; // 640
/**
* preview resolution(height)
* if your camera does not support specific resolution and mode,
* {@link UVCCamera#setPreviewSize(int, int, int)} throw exception
*/
private static final int PREVIEW_HEIGHT = 480; //480
/**
* preview mode
* if your camera does not support specific resolution and mode,
* {@link UVCCamera#setPreviewSize(int, int, int)} throw exception
* 0:YUYV, other:MJPEG
*/
private static final int PREVIEW_MODE = 0; // YUV
protected static final int SETTINGS_HIDE_DELAY_MS = 2500;
/**
* for accessing USB
*/
private USBMonitor mUSBMonitor;
/**
* Handler to execute camera related methods sequentially on private thread
*/
private UVCCameraHandler mCameraHandler;
/**
* for camera preview display
*/
private CameraViewInterface mUVCCameraView;
/**
* for open&start / stop&close camera preview
*/
private ImageButton mCameraButton;
private ImageView mImageView;
private boolean isScaling = false;
private boolean isInCapturing = false;
private int[][] capture_solution = {{640,480}, {800,600},{1024,768}, {1280,1024}};
private int mCaptureWidth = capture_solution[0][0];
private int mCaptureHeight = capture_solution[0][1];
/**
* opencv的一个抽象类,为了支持opencv与app之间的相互作用,
* 该类声明了一个callback,在opencv manager之后执行,这个回调是为了使opencv在合适的地方进行初始化
*/
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
@Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.v(TAG, "onCreate:");
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
mCameraButton = findViewById(R.id.imageButton);
mCameraButton.setOnClickListener(mOnClickListener);
mImageView = (ImageView)findViewById(R.id.imageView);
mCaptureWidth = capture_solution[0][0];
mCaptureHeight = capture_solution[0][1];
bitmap = Bitmap.createBitmap(mCaptureWidth, mCaptureHeight, Bitmap.Config.RGB_565);
final View view = findViewById(R.id.camera_view);
mUVCCameraView = (CameraViewInterface)view;
mUVCCameraView.setAspectRatio(PREVIEW_WIDTH / (float)PREVIEW_HEIGHT);
synchronized (mSync) {
mUSBMonitor = new USBMonitor(this, mOnDeviceConnectListener);
mCameraHandler = UVCCameraHandler.createHandler(this, mUVCCameraView,
USE_SURFACE_ENCODER ? 0 : 1, PREVIEW_WIDTH, PREVIEW_HEIGHT, PREVIEW_MODE);
}
}
@Override
protected void onStart() {
super.onStart();
Log.v(TAG, "onStart:");
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
synchronized (mSync) {
mUSBMonitor.register();
}
if (mUVCCameraView != null) {
mUVCCameraView.onResume();
}
}
@Override
protected void onStop() {
Log.v(TAG, "onStop:");
synchronized (mSync) {
mCameraHandler.close(); // #close include #stopRecording and #stopPreview
mUSBMonitor.unregister();
}
if (mUVCCameraView != null)
mUVCCameraView.onPause();
super.onStop();
}
@Override
public void onDestroy() {
Log.v(TAG, "onDestroy:");
synchronized (mSync) {
if (mCameraHandler != null) {
mCameraHandler.setPreviewCallback(null); //zhf
mCameraHandler.release();
mCameraHandler = null;
}
if (mUSBMonitor != null) {
mUSBMonitor.destroy();
mUSBMonitor = null;
}
}
super.onDestroy();
}
/**
* event handler when click camera / capture button
*/
private final OnClickListener mOnClickListener = new OnClickListener() {
@Override
public void onClick(final View view) {
synchronized (mSync) {
if ((mCameraHandler != null) && !mCameraHandler.isOpened()) {
CameraDialog.showDialog(MainActivity.this);
} else {
mCameraHandler.close();
}
}
}
};
private void startPreview() {
synchronized (mSync) {
if (mCameraHandler != null) {
final SurfaceTexture st = mUVCCameraView.getSurfaceTexture();
/**
* 由于surfaceview由另一个线程处理,这里使用消息处理机制
* 对Frame进行回调处理
*/
mCameraHandler.setPreviewCallback(mIFrameCallback);
mCameraHandler.startPreview(new Surface(st));
}
}
updateItems();
}
private final OnDeviceConnectListener mOnDeviceConnectListener = new OnDeviceConnectListener() {
@Override
public void onAttach(final UsbDevice device) {
Toast.makeText(MainActivity.this, "USB_DEVICE_ATTACHED", Toast.LENGTH_SHORT).show();
}
@Override
public void onConnect(final UsbDevice device, final UsbControlBlock ctrlBlock, final boolean createNew) {
if (DEBUG) Log.v(TAG, "onConnect:");
synchronized (mSync) {
if (mCameraHandler != null) {
mCameraHandler.open(ctrlBlock);
startPreview();
updateItems();
}
}
}
@Override
public void onDisconnect(final UsbDevice device, final UsbControlBlock ctrlBlock) {
if (DEBUG) Log.v(TAG, "onDisconnect:");
synchronized (mSync) {
if (mCameraHandler != null) {
queueEvent(new Runnable() {
@Override
public void run() {
try{
// maybe throw java.lang.IllegalStateException: already released
mCameraHandler.setPreviewCallback(null); //zhf
}
catch(Exception e){
e.printStackTrace();
}
mCameraHandler.close();
}
}, 0);
}
}
}
@Override
public void onDettach(final UsbDevice device) {
Toast.makeText(MainActivity.this, "USB_DEVICE_DETACHED", Toast.LENGTH_SHORT).show();
}
@Override
public void onCancel(final UsbDevice device) {
}
};
/**
* to access from CameraDialog
* @return
*/
@Override
public USBMonitor getUSBMonitor() {
synchronized (mSync) {
return mUSBMonitor;
}
}
@Override
public void onDialogResult(boolean canceled) {
if (DEBUG) Log.v(TAG, "onDialogResult:canceled=" + canceled);
}
//================================================================================
private boolean isActive() {
return mCameraHandler != null && mCameraHandler.isOpened();
}
private boolean checkSupportFlag(final int flag) {
return mCameraHandler != null && mCameraHandler.checkSupportFlag(flag);
}
private int getValue(final int flag) {
return mCameraHandler != null ? mCameraHandler.getValue(flag) : 0;
}
private int setValue(final int flag, final int value) {
return mCameraHandler != null ? mCameraHandler.setValue(flag, value) : 0;
}
private int resetValue(final int flag) {
return mCameraHandler != null ? mCameraHandler.resetValue(flag) : 0;
}
/**
* 利用Activity.runOnUiThread(Runnable)把更新ui的代码创建在Runnable中,
* 然后在需要更新ui时,把这个Runnable对象传给Activity.runOnUiThread(Runnable)
*/
private void updateItems() {
runOnUiThread(mUpdateItemsOnUITask, 100);
}
private final Runnable mUpdateItemsOnUITask = new Runnable() {
@Override
public void run() {
if (isFinishing()) return;
final int visible_active = isActive() ? View.VISIBLE : View.INVISIBLE;
mImageView.setVisibility(visible_active);
}
};
// if you need frame data as byte array on Java side, you can use this callback method with UVCCamera#setFrameCallback
// if you need to create Bitmap in IFrameCallback, please refer following snippet.
private Bitmap bitmap = null;//Bitmap.createBitmap(640, 480, Bitmap.Config.RGB_565);
private final Bitmap srcBitmap = Bitmap.createBitmap(PREVIEW_WIDTH, PREVIEW_HEIGHT, Bitmap.Config.RGB_565);
private String WarnText;
private final IFrameCallback mIFrameCallback = new IFrameCallback() {
@Override
public void onFrame(final ByteBuffer frame) {
frame.clear();
if(!isActive() || isInCapturing){
return;
}
if(bitmap == null){
Toast.makeText(MainActivity.this, "错误:Bitmap为空", Toast.LENGTH_SHORT).show();
return;
}
/**
* 这里进行opencv操作
* srcBitmap:源
* bitmap:处理后
*/
synchronized (bitmap) {
srcBitmap.copyPixelsFromBuffer(frame);
WarnText = "";
if(bitmap.getWidth() != mCaptureWidth || bitmap.getHeight() != mCaptureHeight){
bitmap = Bitmap.createBitmap(mCaptureWidth, mCaptureHeight, Bitmap.Config.RGB_565);
}
Mat rgbMat = new Mat();
Mat grayMat = new Mat();
bitmap = Bitmap.createBitmap(srcBitmap.getWidth(), srcBitmap.getHeight(), Bitmap.Config.RGB_565);
Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.
Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat
Utils.matToBitmap(grayMat, bitmap); //convert mat to bitmap
Log.i(TAG, "procSrc2Gray sucess...");
}
mImageView.post(mUpdateImageTask);
}
};
private final Runnable mUpdateImageTask = new Runnable() {
@Override
public void run() {
synchronized (bitmap) {
mImageView.setImageBitmap(bitmap);
}
}
};
}