window10环境 android studio集成ffmpeg5.1 NDK r24

配置信息

NDK r24 24.0.8215888
CMake 3.18.1
AGP 7.2.1
Gradle 7.3.3

项目地址:

https://github.com/Dragonxwl/FFmpegDemo/tree/master

项目TAG信息 V1.0

{
FFmpeg 5.1 编译arm64-v8a so文件集成Demo
}

1.开发环境 Windows 10 专业版

2.android studio 环境与版本信息

2.1 android studio 版本信息 (2022.6.1 当前官网直接下载的版本)
image.png
2.2 Gradle信息
image.png
2.3 NDK信息(2022.6.1 当前AS SDKmanger最高版本)

24.0.8215888

2.4 CMake信息(2022.6.1 当前AS SDKmanger最高版本)

3.18.1(3.23.2也成功了,自己下载放入{SDKmanger路径}/cmake --eg:C:\Users\xiangwl\AppData\Local\Android\Sdk\cmake)


image.png

image.png

3.提前准备编译号的ffmpeg so文件(Ubuntn 环境、ffmpeg5.1、NDK r24 -- 24.0.8215888) 没有可以去github项目中拿一下

有需要可以看我的文章尝试自己编译,有问题欢迎评论区
window 10 下安装 Ubuntu 22.04 使用 NDK r24 编译FFmpeg 5.1

4.第一步:创建新的Native C++项目工程

4.1 创建工程过程按下图操作即可
image.png

image.png

image.png
4.2 工程创建好以后,打开build.gralde文件,添加ndkVersion(我选择最新的NDK版本)点击Sync,gradle对应默认NDK版本可能与自己想要的不一致
image.png

5.第二步:添加提前准备好的so文件

5.1 ANDROID_ABI:arm64-v8a(因为我使用的是华为手机,暂时仅添加arm64-v8a,armeabi-v7a、x86、x86_64后续再添加) 详细解释与官网说明
image.png
5.2 配置build.gradle文件
externalNativeBuild {}中添加
ndk{
    abiFilters "arm64-v8a"
}
plugins {
id 'com.android.application'
id 'org.jetbrains.kotlin.android'
id 'kotlin-android-extensions'
}

android {
compileSdkVersion 32
ndkVersion '24.0.8215888'
defaultConfig {
applicationId "com.xwl.ffmepgdemo"
minSdkVersion 15
targetSdkVersion 32
versionCode 1
versionName "1.0"

testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
cppFlags '-frtti -fexceptions -Wno-deprecated-declarations'
}
ndk{
abiFilters "arm64-v8a"
}
}
}

buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
externalNativeBuild {
cmake {
path file('src/main/cpp/CMakeLists.txt')
version '3.18.1'
}
}
buildFeatures {
viewBinding true
}
}

dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.core:core-ktx:1.8.0'
implementation 'androidx.appcompat:appcompat:1.4.2'
implementation 'com.google.android.material:material:1.6.1'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
}
5.3 第三步:配置CMakeLists.txt文件
cmake_minimum_required(VERSION 3.4.1)

find_library( log-lib
log )

# 定义变量
set(distribution_DIR ${CMAKE_SOURCE_DIR}/../../../libs)
message(CMAKE_SOURCE_DIR = ${CMAKE_SOURCE_DIR})

# 添加库——自己编写的库
# 库名称:native-lib
# 库类型:SHARED,表示动态库,后缀为.so(如果是STATIC,则表示静态库,后缀为.a)
# 库源码文件:src/main/cpp/native-lib.cpp
add_library( native-lib
SHARED
native-lib.cpp)

# 添加库——外部引入的库
# 库名称:avcodec(不需要包含前缀lib)
# 库类型:SHARED,表示动态库,后缀为.so(如果是STATIC,则表示静态库,后缀为.a)
# IMPORTED表明是外部引入的库
add_library( avcodec
SHARED
IMPORTED)
# 设置目标属性
# 设置avcodec目标库的IMPORTED_LOCATION属性,用于说明引入库的位置
# 还可以设置其他属性,格式:PROPERTIES key value
set_target_properties( avcodec
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libavcodec.so)

add_library( avdevice
SHARED
IMPORTED)
set_target_properties( avdevice
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libavdevice.so)

add_library( avfilter
SHARED
IMPORTED)
set_target_properties( avfilter
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libavfilter.so)

add_library( avformat
SHARED
IMPORTED)
set_target_properties( avformat
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libavformat.so)

add_library( avutil
SHARED
IMPORTED)
set_target_properties( avutil
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libavutil.so)

add_library( postproc
SHARED
IMPORTED)
set_target_properties( postproc
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libpostproc.so)

add_library( swresample
SHARED
IMPORTED)
set_target_properties( swresample
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libswresample.so)

add_library( swscale
SHARED
IMPORTED)
set_target_properties( swscale
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libswscale.so)

# 引入头文件
include_directories(../../../libs/include)

# 告诉编译器生成native-lib库需要链接的库
# native-lib库需要依赖avcodec、avfilter等库
target_link_libraries( native-lib
avcodec
avdevice
avfilter
avformat
avutil
postproc
swresample
swscale
${log-lib} )

配置好CMakeLists.txt后点击build->Re fresh Linked C++ Projects


image.png
5.4第四步:JNI部分与代码调用

打开native-lib.cpp
Java_com_xwl_ffmpegdemo_MainActivity_XXX 规则
package="com.xwl.ffmpegdemo"
package="包名1.包名2.包名3"
类名->MainActivity,建议建立一个工具类
前面是固定的 JAVA_包名1_包名2_包名3_类名_方法名

#include 
#include 

extern "C"
{


#include 
#include 
#include 
#include 

JNIEXPORT jstring JNICALL
Java_com_xwl_ffmpegdemo_MainActivity_stringFromJNI(
JNIEnv *env,
jobject /* this */) {
std::string hello = "Hello from C++";
return env->NewStringUTF(hello.c_str());
}

JNIEXPORT jstring JNICALL
Java_com_xwl_ffmpegdemo_MainActivity_avcodecinfo(JNIEnv *env, jobject instance) {

char info[40000] = {0};

void *item = nullptr;
for (;;) {
const AVCodec *cur = av_codec_iterate(&item);
if (!cur)
break;
if (av_codec_is_decoder(cur) != 0) {
sprintf(info, "%sdecode:", info);
} else {
sprintf(info, "%sencode:", info);
}
switch (cur->type) {
case AVMEDIA_TYPE_VIDEO:
sprintf(info, "%s(video):", info);
break;
case AVMEDIA_TYPE_AUDIO:
sprintf(info, "%s(audio):", info);
break;
default:
sprintf(info, "%s(other):", info);
break;
}
sprintf(info, "%s[%10s]\n", info, cur->name);
}

return env->NewStringUTF(info);
}

JNIEXPORT jstring JNICALL
Java_com_xwl_ffmpegdemo_MainActivity_urlprotocolinfo(JNIEnv *env, jobject instance) {
char info[40000] = {0};
struct URLProtocol *pup = nullptr;
struct URLProtocol **p_temp = &pup;
avio_enum_protocols((void **) p_temp, 0);
while ((*p_temp) != nullptr) {
sprintf(info, "%sInput: %s\n", info, avio_enum_protocols((void **) p_temp, 0));
}
pup = nullptr;
avio_enum_protocols((void **) p_temp, 1);
while ((*p_temp) != nullptr) {
sprintf(info, "%sInput: %s\n", info, avio_enum_protocols((void **) p_temp, 1));
}

return env->NewStringUTF(info);
}

JNIEXPORT jstring JNICALL
Java_com_xwl_ffmpegdemo_MainActivity_avformatinfo(JNIEnv *env, jobject instance) {
char info[40000] = {0};
void *inputItem = nullptr;
for (;;) {
const AVInputFormat *inputCur = av_demuxer_iterate(&inputItem);
if (!inputCur)
break;
sprintf(info, "%sInput: %s\n", info, inputCur->name);
}

void *outputItem = nullptr;
for (;;) {
const AVOutputFormat *outputCur = av_muxer_iterate(&outputItem);
if (!outputCur)
break;
sprintf(info, "%sOutput: %s\n", info, outputCur->name);
}
return env->NewStringUTF(info);
}

JNIEXPORT jstring JNICALL
Java_com_xwl_ffmpegdemo_MainActivity_avfilterinfo(JNIEnv *env, jobject instance) {
char info[40000] = {0};
void *item = nullptr;
for (;;) {
const AVFilter *cur = av_filter_iterate(&item);
if (!cur)
break;
sprintf(info, "%s%s\n", info, cur->name);
}
return env->NewStringUTF(info);
}


}

MainActivity中调用

package com.xwl.ffmpegdemo

import android.os.Bundle
import androidx.appcompat.app.AppCompatActivity
import kotlinx.android.synthetic.main.activity_main.*

class MainActivity : AppCompatActivity() {
companion object {
init {
System.loadLibrary("native-lib")
}
}

override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
btn_protocol.setOnClickListener {
tv_info!!.text = urlprotocolinfo()
}
btn_codec.setOnClickListener {
tv_info!!.text = avcodecinfo()
}
btn_filter.setOnClickListener {
tv_info!!.text = avfilterinfo()
}
btn_format.setOnClickListener {
tv_info!!.text = avformatinfo()
}

}

external fun stringFromJNI(): String?
private external fun urlprotocolinfo(): String?
private external fun avformatinfo(): String?
private external fun avcodecinfo(): String?
private external fun avfilterinfo(): String?

}

你可能感兴趣的:(window10环境 android studio集成ffmpeg5.1 NDK r24)