本文介绍如何通过android studio通过jni调用openCV,不使用Opencv Manager,使用静态编译openCV的方式,生成单独的一个so文件。
可先看上篇文章http://blog.csdn.net/cheng20150809/article/details/51348420,AndroidStudio基本的JNI编程。
1、新建一个空的Activity工程,添加Add->Folder->JNI Folder,并添加cpp文件(Windows下再多添加一个空的,只有一个cpp文件,ndk会报错),同样我们使用JNI动态注册的方式,避免较长的函数名。代码如下:
//
// Created by user on 16-5-4.
//
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#ifndef LOG
#define LOG_TAG "imgprocess"
#define ALOGD(...) \
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__);
#define ALOGE(...) \
__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__);
#define ALOGV(...) \
__android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, __VA_ARGS__);
#endif LOG
#ifndef NELEM
# define NELEM(x) ((int) (sizeof(x) / sizeof((x)[0])))
#endif
// 若这里不加namespace cv,则下面所有openCV的数据类型都要加cv::限定
using namespace cv;
// 传入一个bitmap对象,传出一个包含边缘信息的bitmap对象
// 注意前两个参数是JNIEnv* env, jobject thiz
// JNIEnv标识当前NDK环境的对象指针,可以通过该参数访问NDK中的内置成员,
// jobject表示调用当前NDK方法的Java对象,可以用该参数值访问调用该方法的Java对象成员。
jobject getEdge(JNIEnv* env, jobject thiz, jobject bitmap){
AndroidBitmapInfo bitmapInfo;
uint32_t* storedBitmapPixels = NULL;
int pixelsCount;
int ret = -1;
// 读取bitmap基本信息
if ((ret = AndroidBitmap_getInfo(env, bitmap, &bitmapInfo)) < 0) {
return NULL;
}
ALOGD("width:%d height:%d stride:%d", bitmapInfo.width, bitmapInfo.height, bitmapInfo.stride);
// 这里只处理RGBA_888类型的bitmap
if (bitmapInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
return NULL;
}
// 提取像素值
void* bitmapPixels = NULL;
if ((ret = AndroidBitmap_lockPixels(env, bitmap, &bitmapPixels)) < 0) {
return NULL;
}
// 生成openCV Mat矩阵
Mat srcMat(Size(bitmapInfo.width, bitmapInfo.height), CV_8UC4);
pixelsCount = bitmapInfo.height * bitmapInfo.width;
memcpy(srcMat.data, bitmapPixels, sizeof(uint32_t) * pixelsCount);
AndroidBitmap_unlockPixels(env, bitmap);
// 处理求边缘得到desMat
Mat desMat;
Canny(srcMat, desMat, 30.0, 90.0, 3, true);
// 通过JAVA层的Bitmap类,新建一个bitmap对象
jclass bitmapCls = env->FindClass("android/graphics/Bitmap");
jmethodID createBitmapFunction = env->GetStaticMethodID(bitmapCls, "createBitmap", "(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;");
jstring configName = env->NewStringUTF("ARGB_8888");
jclass bitmapConfigClass = env->FindClass("android/graphics/Bitmap$Config");
jmethodID valueOfBitmapConfigFunction = env->GetStaticMethodID(bitmapConfigClass, "valueOf", "(Ljava/lang/String;)Landroid/graphics/Bitmap$Config;");
jobject bitmapConfig = env->CallStaticObjectMethod(bitmapConfigClass, valueOfBitmapConfigFunction, configName);
jobject newBitmap = env->CallStaticObjectMethod(bitmapCls, createBitmapFunction, desMat.cols, desMat.rows, bitmapConfig);
// 获取新bitmap的data数据指针
bitmapPixels = NULL;
if ((ret = AndroidBitmap_lockPixels(env, newBitmap, &bitmapPixels)) < 0) {
return NULL;
}
// 将Mat数据写入bitmap
uint32_t* newBitmapPixels = (uint32_t*)bitmapPixels;
pixelsCount = srcMat.cols * desMat.rows;
for (size_t i = 0; i < pixelsCount; i++) {
memset(&(newBitmapPixels[i]), srcMat.data[i], 3);
}
AndroidBitmap_unlockPixels(env, newBitmap);
return newBitmap;
}
// native函数所在的类
static const char *classPathName = "test/hc/cvtest/MainActivity";
//
static JNINativeMethod gMethods[] = {
{"getEdge", "(Ljava/lang/Object;)Ljava/lang/Object;", (void*)getEdge},
};
int registerNativeMethods(JNIEnv* env, const char* className,
JNINativeMethod* gMethods, int numMethods)
{
jclass clazz;
clazz = env->FindClass(className);
if (clazz == NULL) {
ALOGE("Native registration unable to find class '%s'", className);
return JNI_FALSE;
}
if (env->RegisterNatives(clazz, gMethods, numMethods) < 0) {
ALOGE("RegisterNatives failed for '%s'", className);
return JNI_FALSE;
}
return JNI_TRUE;
}
int register_jni_methods(JNIEnv* env)
{
return registerNativeMethods(env, classPathName, gMethods, NELEM(gMethods));
}
jint JNI_OnLoad(JavaVM* vm, void*)
{
JNIEnv* env = NULL;
jint result = -1;
if (vm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
ALOGE("ERROR: GetEnv failed\n");
goto bail;
}
assert(env != NULL);
if (register_jni_methods(env) < 0) {
ALOGE("ERROR: native registration failed\n");
goto bail;
}
ALOGE("SUCCESS: native registration successed\n");
result = JNI_VERSION_1_4;
bail:
return result;
}
2、由于需要使用opencv的mk文件,这里我们不使用Android Studio的默认ndk编译,使用我们自己的mk文件。在jni目录中添加Android.mk和Application.mk文件。
Android.mk
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
#opencv
OPENCVROOT:=/local/tools/OpenCV-2.4.10-android-sdk/OpenCV-2.4.10-android-sdk
OPENCV_CAMERA_MODULES:=off
OPENCV_INSTALL_MODULES:=on
#OPENCV_LIB_TYPE:=SHARED
#静态编译
OPENCV_LIB_TYPE:=STATIC
include ${OPENCVROOT}/sdk/native/jni/OpenCV.mk
LOCAL_SRC_FILES := ImgProcess.cpp
LOCAL_LDLIBS += -llog
LOCAL_LDLIBS += -ljnigraphics
LOCAL_MODULE := imgprocess
include $(BUILD_SHARED_LIBRARY)
Application.mk
# CPU架构,目前有arm64-v8a,需要64位ndk以及较新的opencv
APP_ABI := armeabi, armeabi-v7a, x86
APP_PLATFORM := android-16
APP_STL := gnustl_static
3、配置app的build.gradle,defaultConfig中添加一个编译ndk的task,生成的so文件将会生成在src/main/jniLibs这个目录中
android {
compileSdkVersion 23
buildToolsVersion "23.0.3"
defaultConfig {
applicationId "test.hc.cvtest"
minSdkVersion 15
targetSdkVersion 23
versionCode 1
versionName "1.0"
// add begin for ndk
sourceSets.main.jni.srcDirs = []
task ndkBuild(type: Exec, description: 'Compile JNI source via NDK') {
commandLine "$ndkDir/ndk-build",
'NDK_PROJECT_PATH=build/intermediates/ndk',
'NDK_LIBS_OUT=src/main/jniLibs',
'APP_BUILD_SCRIPT=src/main/jni/Android.mk',
'NDK_APPLICATION_MK=src/main/jni/Application.mk'
}
tasks.withType(JavaCompile) {
compileTask -> compileTask.dependsOn ndkBuild
}
// add end for ndk
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
}
4、在gradle.properties中添加:
android.useDeprecatedNdk=true
ndkDir=/local/tools/android-ndk-r10
并在在local.properties中配置好ndk路径
ndk.dir=/local/tools/android-ndk-r10
5、新建一个java类,load lib库,并添加native函数即可