工程结构如下:
将include放入cpp目录下,创建JniLibs放入FFmpeg编译好的arm和x86动态库放入,CMakeLists.txt放入工程根目录。
include_directories(src/main/cpp/include) //第一步加入路径
//第二步将so文件库引入
add_library( avcodec-57 SHARED IMPORTED)
set_target_properties( avcodec-57
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavcodec-57.so)
add_library( avdevice-57 SHARED IMPORTED)
set_target_properties( avdevice-57
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavdevice-57.so)
add_library( avfilter-6 SHARED IMPORTED)
set_target_properties( avfilter-6
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavfilter-6.so)
add_library( avformat-57 SHARED IMPORTED)
set_target_properties( avformat-57
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavformat-57.so)
add_library( avutil-55 SHARED IMPORTED)
set_target_properties( avutil-55
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavutil-55.so)
add_library( postproc-54 SHARED IMPORTED)
set_target_properties( postproc-54
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libpostproc-54.so)
add_library( swresample-2 SHARED IMPORTED)
set_target_properties( swresample-2
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libswresample-2.so)
add_library( swscale-4 SHARED IMPORTED)
set_target_properties( swscale-4
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libswscale-4.so)
//第三步,链接库
target_link_libraries( # Specifies the target library.
native-lib
avcodec-57
avdevice-57
avfilter-6
avformat-57
avutil-55
postproc-54
swresample-2
swscale-4
OpenSLES
# Links the target library to the log library
# included in the NDK.
${log-lib})
android {
compileSdkVersion 29
buildToolsVersion "29.0.2"
defaultConfig {
minSdkVersion 16
targetSdkVersion 29
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
consumerProguardFiles 'consumer-rules.pro'
//的加入部分---------------------------------------
externalNativeBuild {
cmake {
cppFlags "-frtti -fexceptions"
abiFilters "armeabi","x86"
}
}
sourceSets {
main {
jniLibs.srcDirs = ['src/main/jniLibs']
}
}
//-------------------------------------------------------
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
//的加入部分---------------------------------------
externalNativeBuild {
cmake {
path "CMakeLists.txt"
}
}
//-------------------------------------------------------
}
abiFilters “armeabi”,“x86”,一般手机的指令架构位arm或者x86,所以过滤这两个就行了
public class WLPlayer {
static {
System.loadLibrary("native-lib");
System.loadLibrary("avcodec-57");
System.loadLibrary("avdevice-57");
System.loadLibrary("avfilter-6");
System.loadLibrary("avformat-57");
System.loadLibrary("avutil-55");
System.loadLibrary("postproc-54");
System.loadLibrary("swresample-2");
System.loadLibrary("swscale-4");
}
}
在解码音频前我们还需要做几个准备
首先编写一个WlCallJava的C++类实现和java之间的互相调用(C++需要获取JAVA虚拟机对象,和Java的一些相关信息才能回调Java)
WlCalljava 头文件
#ifndef WY_MUSIC_WLCALLJAVA_H
#define WY_MUSIC_WLCALLJAVA_H
#include
#include "jni.h"
#include "Android.h"
#define MAIN_THRBAD 0 //主线程调用
#define CHILD_THRBAD 1 // 子线程调用
class WlCallJava {
public:
JavaVM *javaVm=NULL; //Java虚拟机
JNIEnv *jniEnv=NULL; //Java环境遍历
jobject jobj; //Java对象
jmethodID jmid_parpared;
public:
WlCallJava(JavaVM * javaVm,JNIEnv * env,jobject *obj);
~WlCallJava();
void onCallParpared(int Type);
};
#endif //WY_MUSIC_WLCALLJAVA_H
``WlCallJava.cpp实现---------------------------
//初始化
WlCallJava::WlCallJava(JavaVM *javaVm, JNIEnv *env, jobject *obj) {
this->javaVm=javaVm;
this->jniEnv=env;
this->jobj=env->NewGlobalRef(*obj);
jclass jls=jniEnv->GetObjectClass(jobj);
if(!jls){
if(LOG_DEBUG){
LOGE("get jclass error");
return ;
}
}
jmid_parpared=env->GetMethodID(jls,"onCallParpared","()V");
}
//根据传入的Type调用JAVA,MAIN_THRBAD在主线程调用,CHILD_THRBAD在子线程调用JAVA,因为在子线程调用需要获取JNIEnv,和主线程调用不一样。
void WlCallJava::onCallParpared(int Type) {
if(Type==MAIN_THRBAD){
jniEnv->CallVoidMethod(jobj,jmid_parpared);
} else if(Type==CHILD_THRBAD){
JNIEnv *jniEnv;
if(javaVm->AttachCurrentThread(&jniEnv,0)!=JNI_OK)
{
if(LOG_DEBUG)
{
LOGD("get child thread jnienv error");
return;
}
}
jniEnv->CallVoidMethod(jobj,jmid_parpared);
javaVm->DetachCurrentThread();
}
}
使用队列和线程锁、信号量,编写WlQueue.cpp实现生产者消费者
#ifndef WY_MUSIC_WLQUEUE_H
#define WY_MUSIC_WLQUEUE_H
extern "C"{
#include
};
#include "WlPlaystatus.h"
#include "Android.h"
#include "queue"
#include "pthread.h"
class WlQueue {
public:
std::queue<AVPacket*> queuePacket; //队列
pthread_mutex_t mutexPacket; //锁
pthread_cond_t condPacket; //信号量,加锁和释放锁用到
WlPlaystatus *playstatus=NULL;
public:
WlQueue(WlPlaystatus * playstatus);
~WlQueue();
int putAvPacket(AVPacket* packet); //将解码的帧放入队列
int getAvPacket(AVPacket* packet); //从队列获取解码的帧
int getQueueSize(); //获取队列大小
};
#endif //WY_MUSIC_WLQUEUE_H
#include "WlQueue.h"
/初始化
WlQueue::WlQueue(WlPlaystatus *playstatus) {
this->playstatus=playstatus;
pthread_mutex_init(&mutexPacket,NULL); //初始化锁
pthread_cond_init(&condPacket,NULL); //初始化信号量
}
WlQueue::~WlQueue() {
pthread_mutex_destroy(&mutexPacket);
pthread_cond_destroy(&condPacket);
}
int WlQueue::putAvPacket(AVPacket *packet) {
pthread_mutex_lock(&mutexPacket); //加锁
queuePacket.push(packet);
if(LOG_DEBUG){
LOGD("放入一个AvPacket到队列,个数为: %d",queuePacket.size());
}
pthread_cond_signal(&condPacket); //解锁前唤醒消费者
pthread_mutex_unlock(&mutexPacket); //解锁
return 0;
}
int WlQueue::getAvPacket(AVPacket *packet) {
pthread_mutex_lock(&mutexPacket);
while (playstatus!=NULL&&!playstatus->exit){
if(queuePacket.size()>0){
AVPacket *avPacket=queuePacket.front();
if(av_packet_ref(packet,avPacket)==0){//成功取出
queuePacket.pop();
}
av_packet_free(&avPacket);
av_free(avPacket);
avPacket=NULL;
if(LOG_DEBUG){
LOGD("从队列取出一个AvPacket,还剩下: %d",queuePacket.size());
}
break;
} else{ //获取失败的时候等待一下
pthread_cond_wait(&condPacket,&mutexPacket);
}
}
pthread_mutex_unlock(&mutexPacket);
return 0;
}
int WlQueue::getQueueSize() {
int size=0;
pthread_mutex_lock(&mutexPacket);
size=queuePacket.size();
pthread_mutex_unlock(&mutexPacket);
return size;
}
开始编写WlFFmpeg.cpp解码音频
#include "WlCallJava.h"
#include "pthread.h"
#include "WlAudio.h"
extern "C"{
#include
};
class WlFFmpeg {
public:
WlCallJava *callJava=NULL;
const char *url=NULL;
pthread_t decodeThread; //创建线程。多线程调用
AVFormatContext *pFormatCtx=NULL;
WlAudio *audio=NULL;
WlPlaystatus *playstatus=NULL;
public:
WlFFmpeg(WlPlaystatus *playstatus,WlCallJava *callJava1, const char * url);
~WlFFmpeg();
void parpared();
void decodeFFmpegThread();
void start();
};
#endif //WY_MUSIC_WLFFMPEG_H
#include "WlFFmpeg.h"
//初始化变量
WlFFmpeg::WlFFmpeg(WlPlaystatus *playstatus,WlCallJava *callJava1, const char *url) {
this->callJava=callJava1;
this->url=url; //音频地址
this->playstatus=playstatus;
}
//线程回调
void *decodeFFmpeg(void *data){
WlFFmpeg* wlFFmpeg= (WlFFmpeg *)(data);
wlFFmpeg->decodeFFmpegThread();
pthread_exit(&wlFFmpeg->decodeThread);
}
//初始化线程
void WlFFmpeg::parpared() {
pthread_create(&decodeThread,NULL,decodeFFmpeg,this);
}
//解码
void WlFFmpeg::decodeFFmpegThread() {
LOGD("解码开始了------------");
av_register_all(); //注册解码器
avformat_network_init();
pFormatCtx=avformat_alloc_context();
if(avformat_open_input(&pFormatCtx,url,NULL,NULL)!=0){
if(LOG_DEBUG){
LOGD("can not open url :%s",url);
}
return;
}
if(avformat_find_stream_info(pFormatCtx,NULL)<0){
if(LOG_DEBUG){
LOGD("can not find stream from url :%s",url);
}
return;
}
for(int i=0;i<pFormatCtx->nb_streams;i++){
if(pFormatCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_AUDIO){
//将每一帧的数据放入audio对象
if(audio==NULL){
audio=new WlAudio(playstatus);
audio->streamIndex=i;
audio->codecpar=pFormatCtx->streams[i]->codecpar;
}
}
}
AVCodec *dec =avcodec_find_decoder(audio->codecpar->codec_id); //获取到解码器
if(!dec){
if(LOG_DEBUG){
LOGD("can not find decoder ");
}
return;
}
audio->avCodecContext=avcodec_alloc_context3(dec);
if(!audio->avCodecContext){
if(LOG_DEBUG){
LOGD("can not alloc new decoderContext ");
}
return;
}
if(avcodec_parameters_to_context(audio->avCodecContext,audio->codecpar)<0){
if(LOG_DEBUG){
LOGD("can not fill decoderContext");
}
return;
}
//打开解码器
if(avcodec_open2(audio->avCodecContext,dec,0)!=0){
//流不能打开
if(LOG_DEBUG){
LOGD("can not open streams ");
}
return;
}
//成功了回调Java 层
callJava->onCallParpared(CHILD_THRBAD);
}
//读取解码的帧对象
void WlFFmpeg::start() {
if(audio==NULL){
if(LOG_DEBUG){
LOGD("audio is null");
}
return;
}
audio->play();
int count=0;
//读取流
while(playstatus!=NULL&&!playstatus->exit){
AVPacket *avPacket=av_packet_alloc();
if(av_read_frame(pFormatCtx,avPacket)==0){
if(avPacket->stream_index==audio->streamIndex){
count++;
if(LOG_DEBUG){
LOGD("解码第 %d 帧",count);
}
audio->queue->putAvPacket(avPacket);
} else{
av_packet_free(&avPacket);
av_free(avPacket);
avPacket=NULL;
}
} else{
av_packet_free(&avPacket);
av_free(avPacket);
avPacket=NULL;
while(playstatus!=NULL&&!playstatus->exit){
if(audio->queue->getQueueSize()>0){
continue;
}else{
playstatus->exit=true;
break;
}
}
break;
}
}
while(audio->queue->getQueueSize()>0){
AVPacket *avPacket=av_packet_alloc();
audio->queue->getAvPacket(avPacket);
av_packet_free(&avPacket);
av_free(avPacket);
avPacket=NULL;
}
if(LOG_DEBUG){
LOGD("解码完成");
}
}
· 引入OpenEL ES动态库
#include "WlQueue.h"
#include "WlPlaystatus.h"
#include
#include
extern "C"{
#include
#include
};
class WlAudio {
public:
int streamIndex=-1;
AVCodecParameters *codecpar;
AVCodecContext * avCodecContext=NULL;
WlQueue *queue=NULL;
WlPlaystatus *playstatus=NULL;
int ret=-1;
pthread_t thread_play;
AVPacket *avPacket=NULL; //解码的音频数据
AVFrame *avFrame=NULL; //解码的PCM格式音频帧
uint8_t * buffer=NULL; //缓存
int data_size=0;
//引擎接口
SLObjectItf engineObject=NULL;
SLEngineItf engineEngine=NULL;
//混音器
SLObjectItf outputMixObject=NULL;
SLEnvironmentalReverbSettings reverbSettings=SL_I3DL2_ENVIRONMENT_PRESET_STONECORRIDOR;
SLEnvironmentalReverbItf outputMixEnvironmentalReverb=NULL;
//pcm
SLObjectItf pcmPlayerObject= NULL;
SLPlayItf pcmPlayerPlay=NULL;
//缓冲器队列接口
SLAndroidSimpleBufferQueueItf pcmBufferQueue=NULL;
public:
WlAudio(WlPlaystatus *playstatus);
~WlAudio();
void play();
int resampleAudio();
void initOpenSLES();
};
#include "WlAudio.h"
WlAudio::WlAudio(WlPlaystatus *playstatus) {
this->playstatus=playstatus;
queue=new WlQueue(playstatus);
buffer= (uint8_t *)av_malloc(44100 * 2 * 2);
}
WlAudio::~WlAudio() {
}
FILE *outFile=fopen("/mnt/sdcard/1/mymusic.pcm","w");
void *decodePlay(void *data){
WlAudio *wlAudio= (WlAudio *)(data);
wlAudio->initOpenSLES();
pthread_exit(&wlAudio->thread_play);
}
void WlAudio::play() {
pthread_create(&thread_play,NULL,decodePlay,this);
}
int WlAudio::resampleAudio() {
while(playstatus!=NULL&&!playstatus->exit){
avPacket=av_packet_alloc();
if(queue->getAvPacket(avPacket)!=0){
av_packet_free(&avPacket);
av_free(avPacket);
avPacket=NULL;
continue;
}
ret=avcodec_send_packet(avCodecContext,avPacket);
if(ret!=0){
av_packet_free(&avPacket);
av_free(avPacket);
avPacket=NULL;
continue;
}
avFrame=av_frame_alloc();
ret=avcodec_receive_frame(avCodecContext,avFrame);
if(ret==0){
if(avFrame->channels>0&&avFrame->channel_layout==0){
avFrame->channel_layout=av_get_default_channel_layout(avFrame->channels);
}
else if(avFrame->channels==0&&avFrame->channel_layout>0){
avFrame->channels=av_get_channel_layout_nb_channels(avFrame->channel_layout);
}
//创建解码器
SwrContext *swrContext=NULL;
swrContext=swr_alloc_set_opts(NULL, AV_CH_LAYOUT_STEREO,
AV_SAMPLE_FMT_S16,
avFrame->sample_rate,
avFrame->channel_layout,
(AVSampleFormat)(avFrame->format),
avFrame->sample_rate,
NULL,
NULL);
if(!swrContext||swr_init(swrContext)<0){
av_packet_free(&avPacket);
av_free(avPacket);
avPacket=NULL;
av_frame_free(&avFrame);
av_free(avFrame);
avFrame=NULL;
if(swrContext!=NULL){
swr_free(&swrContext);
swrContext=NULL;
}
continue;
}
int nb=swr_convert(swrContext, &buffer, avFrame->nb_samples,
(const uint8_t **)(avFrame->data), avFrame->nb_samples);
int out_channels=av_get_channel_layout_nb_channels(AV_CH_LAYOUT_STEREO);
data_size=nb*out_channels*av_get_bytes_per_sample(AV_SAMPLE_FMT_S16);
//fwrite(buffer,1,data_size,outFile);
if(LOG_DEBUG){
LOGD("data size is %d",data_size);
}
av_packet_free(&avPacket);
av_free(avPacket);
avPacket=NULL;
av_frame_free(&avFrame);
av_free(avFrame);
avFrame=NULL;
swr_free(&swrContext);
swrContext=NULL;
break;
}else{
av_packet_free(&avPacket);
av_free(avPacket);
avPacket=NULL;
av_frame_free(&avFrame);
av_free(avFrame);
avFrame=NULL;
continue;
}
}
return data_size;
}
//队列有缓存回调播放
void pcmBufferCallBack(SLAndroidSimpleBufferQueueItf bf,void *context){
WlAudio *wlAudio=(WlAudio*)context;
if(wlAudio!=NULL){
int buffersize=wlAudio->resampleAudio();
if(buffersize>0){
(*wlAudio->pcmBufferQueue)->Enqueue(wlAudio->pcmBufferQueue,wlAudio->buffer,buffersize);
}
}
}
//初始化
void WlAudio::initOpenSLES() {
SLresult result;
result=slCreateEngine(&engineObject,0,0,0,0,0);
result=(*engineObject)->Realize(engineObject,SL_BOOLEAN_FALSE);
result=(*engineObject)->GetInterface(engineObject,SL_IID_ENGINE,&engineEngine);
//第二步创建混音器
const SLInterfaceID mids[1]={SL_IID_ENVIRONMENTALREVERB};
const SLboolean mreq[1]={SL_BOOLEAN_FALSE};
result=(*engineEngine)->CreateOutputMix(engineEngine,&outputMixObject,1,mids,mreq);
(void)result;
result=(*outputMixObject)->Realize(outputMixObject,SL_BOOLEAN_FALSE);
(void)result;
result=(*outputMixObject)->GetInterface(outputMixObject,SL_IID_ENVIRONMENTALREVERB,&outputMixEnvironmentalReverb);
if(SL_RESULT_SUCCESS==result){
result=(*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(
outputMixEnvironmentalReverb,&reverbSettings
);
(void)result;
}
SLDataLocator_OutputMix outputMix={SL_DATALOCATOR_OUTPUTMIX,outputMixObject};
SLDataSink audioSnk={&outputMix,0};
//第三步配置PCM格式信息
SLDataLocator_AndroidSimpleBufferQueue android_queue={ SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,2};
SLDataFormat_PCM pcm={
SL_DATAFORMAT_PCM,//播放PCM格式的数据
2,//2个声道
SL_SAMPLINGRATE_44_1,//44100的赫兹
SL_PCMSAMPLEFORMAT_FIXED_16,//位数16位
SL_PCMSAMPLEFORMAT_FIXED_16,//和位数一致就行
SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,//立体声(前左前右)
SL_BYTEORDER_LITTLEENDIAN //结束标志
};
SLDataSource slDataSource={&android_queue,&pcm};
const SLInterfaceID ids[1]={SL_IID_BUFFERQUEUE};
const SLboolean req[1]={SL_BOOLEAN_FALSE};
(*engineEngine)->CreateAudioPlayer(engineEngine,&pcmPlayerObject,&slDataSource,&audioSnk,1,ids,req);
//初始化播放器
(*pcmPlayerObject)->Realize(pcmPlayerObject,SL_BOOLEAN_FALSE);
//得到接口后调用 获取Player接口
(*pcmPlayerObject)->GetInterface(pcmPlayerObject,SL_IID_PLAY,&pcmPlayerPlay);
//注册回调缓冲区 获取缓冲队列接口
(*pcmPlayerObject)->GetInterface(pcmPlayerObject,SL_IID_BUFFERQUEUE,&pcmBufferQueue);
//缓冲接口回调
(*pcmBufferQueue)->RegisterCallback(pcmBufferQueue,pcmBufferCallBack,this);
//获取播放状态接口
(*pcmPlayerPlay)->SetPlayState(pcmPlayerPlay,SL_PLAYSTATE_PLAYING);
pcmBufferCallBack(pcmBufferQueue,this);
}
2、视频演示
演示视频播放地址