//--------------------------安卓的log
#include
#include
#define LOG_TAG "zbv"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
//--------------------------安卓的log
//-------------------------ndk的opensles
#include
#include
//-------------------------ndk的opensles
//memcpy
#include
//是unix/linux系统基本数据类型的头文件
#include
//--------------------------ffmpeg
//格式封装
#include
//编解码
#include
//重采样
#include
//设置采样参数
#include
//--------------------------ffmpeg
#ifdef __cplusplus
extern "C" {
#endif
/**
创建opensles的引擎,结果返回0失败1成功
*/
int createOpenslEngine();
/**
创建opensles的缓存队列播放器,结果返回0失败1成功
*/
int createBufferQueue();
/**
BufferQueue的回调函数:每次在buffer播放完成后都会调用该函数
*/
void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context);
/**
释放资源
*/
void releaseResource();
static const enum AVSampleFormat dst_sample_fmt=AV_SAMPLE_FMT_S16;
static AVCodecContext* audio_codec_ctx;
static AVFormatContext* fmt_ctx=NULL;//必须初始化不然open_input会挂
static uint8_t **dst_data=NULL;
static AVPacket* packet;
static AVFrame* frame;
static struct SwrContext* swr_ctx;
static int channels;
static int dst_nb_sample;
static int max_dst_nb_sample;
static int stream_index=-1;
//engine interface
static SLObjectItf engineObject=NULL;
static SLEngineItf engineEngine;
//outout mix
static SLObjectItf outputMixObject = NULL;
static SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL;
//stone corridor
static SLEnvironmentalReverbSettings reverbSettings = SL_I3DL2_ENVIRONMENT_PRESET_STONECORRIDOR;
// buffer queue player interfaces
static SLObjectItf bqPlayerObject = NULL;
static SLPlayItf bqPlayerPlay;
static SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
JNIEXPORT void Java_com_example_simpleTestFFmpeg_opensles_FFmpegAndOpenslActivity_decodeMusicPlayByOpensl(JNIEnv* env,jclass clazz,jstring sourcePath){
int ret;
AVStream* stream;
AVCodec* codec;
//第一步:获取从Java层传来的参数字符串 sourcePath ->方法可以从jni.h中查找
//c文件使用*env,c++文件直接使用env--->jni.h中将c调用和c++调用分开来写了相应调用方法,可在jni.h该文件中仔细查找调用
const char* original_path=(*env)->GetStringUTFChars(env,sourcePath,NULL);
LOGE("原始文件path=%s",original_path);
//第二步:开启FFMPEG的初始化操作
//av_register_all();---deprecated
//avformat_network_init();---如果不是使用老版本的OpenSSL没有必要使用
//第三个参数是AVInputFormat* 如果是NULL则会自动被检测---该函数表示:打开一个输入流读取header
if(avformat_open_input(&fmt_ctx,original_path,NULL,NULL)<0){
LOGD("无法打开源文件");
return;
}
//该函数表示:读取媒体文件的packets去获取流信息
if(avformat_find_stream_info(fmt_ctx,NULL)<0){
LOGD("无法读取源文件流信息");
return;
}
//因为是音频文件所以就针对音频来
//第三个参数wanted_stream_nb如果为-1则自动选择,该函数表示:找到最适合的流
ret=av_find_best_stream(fmt_ctx,AVMEDIA_TYPE_AUDIO,-1,-1,NULL,0);
if(ret<0){
//av_get_media_type_string函数就是switch-case简单函数返回字符串char*
LOGD("没有找到%s类型的输入流",av_get_media_type_string(AVMEDIA_TYPE_AUDIO));
releaseResource();
return;
}else{
//获取到流下标
stream_index=ret;
//注意streams是数组AVStream **streams
stream=fmt_ctx->streams[stream_index];
//通过AVStream的id查找到相应的解码器
codec=avcodec_find_decoder(stream->codecpar->codec_id);
if(!codec){
LOGD("失败去找到%s类型的解码器",av_get_media_type_string(AVMEDIA_TYPE_AUDIO));
return;
}
//该函数表示给AVCodecContext分配内存设置默认值,最后记得释放
audio_codec_ctx=avcodec_alloc_context3(codec);
if(!audio_codec_ctx){
LOGD("给AVCodecContext内存分配失败");
return;
}
//该函数表示将输入流的参数信息拷贝给AVCodecContext
ret=avcodec_parameters_to_context(audio_codec_ctx,stream->codecpar);
if(ret<0){
LOGD("给AVCodecContext拷贝参数失败");
return;
}
//该函数表示:用给定的AVCodec初始化AVCodecContext,打开解码器
ret=avcodec_open2(audio_codec_ctx,codec,NULL);
if(ret<0){
LOGD("打开%s类型的解码器失败",av_get_media_type_string(AVMEDIA_TYPE_AUDIO));
return;
}
}
//释放掉传入字符串的内存---只要确保使用完毕就可以释放内存
(*env)->ReleaseStringUTFChars(env,sourcePath,original_path);
//swsample
swr_ctx=swr_alloc();
if(!swr_ctx){
LOGD("分配resample context失败");
releaseResource();
return;
}
av_opt_set_int(swr_ctx,"in_channel_layout",audio_codec_ctx->channel_layout,0);
av_opt_set_int(swr_ctx,"in_sample_rate",audio_codec_ctx->sample_rate,0);
av_opt_set_sample_fmt(swr_ctx,"in_sample_fmt",audio_codec_ctx->sample_fmt,0);
//通道数
channels=av_get_channel_layout_nb_channels(audio_codec_ctx->channel_layout);
const char* fmt_name=av_get_sample_fmt_name(audio_codec_ctx->sample_fmt);
LOGD("channels=%d,sampleRate=%d,sampleFmt=%s",channels,audio_codec_ctx->sample_rate,fmt_name);
av_opt_set_int(swr_ctx,"out_channel_layout",audio_codec_ctx->channel_layout,0);
av_opt_set_int(swr_ctx,"out_sample_rate",audio_codec_ctx->sample_rate,0);
av_opt_set_sample_fmt(swr_ctx,"out_sample_fmt",dst_sample_fmt,0);
//该函数表示:设置好参数配置后初始化resample context
ret=swr_init(swr_ctx);
if(ret<0){
LOGD("初始化resample context失败");
releaseResource();
return;
}
//audio_codec_ctx->frame_size---每个音频通道的样本数===>frame->nb_samples
max_dst_nb_sample=dst_nb_sample=av_rescale_rnd(audio_codec_ctx->frame_size,audio_codec_ctx->sample_rate,audio_codec_ctx->sample_rate,AV_ROUND_UP);
//类似于下面的av_samples_alloc--->从第三个参数开始:通道数、单通道的样本数、样本格式、对齐
ret=av_samples_alloc_array_and_samples(&dst_data,NULL,channels,dst_nb_sample,dst_sample_fmt,0);
if(ret<0){
LOGD("分配dst_data失败");
releaseResource();
return;
}
frame=av_frame_alloc();
if(!frame){
LOGD("无法分配frame");
releaseResource();
return;
}
packet=(AVPacket *)malloc(sizeof(AVPacket));
av_init_packet(packet);
packet->data=NULL;
packet->size=0;
//初始化opensles
ret=createOpenslEngine();
if(ret==JNI_FALSE){
LOGE("创建opensles引擎失败");
releaseResource();
return;
}
ret=createBufferQueue(audio_codec_ctx->sample_rate,channels);
if(ret==JNI_FALSE){
LOGE("创建buffer queue播放器失败");
releaseResource();
return;
}
//初始化opensles
LOGD("start av_read_frame");
//主动调用回调函数
bqPlayerCallback(bqPlayerBufferQueue,NULL);
}
void releaseResource(){
avcodec_free_context(&audio_codec_ctx);
avformat_close_input(&fmt_ctx);
if(dst_data){
av_freep(&dst_data[0]);
}
av_freep(&dst_data);
av_frame_free(&frame);
swr_free(&swr_ctx);
// destroy buffer queue audio player object, and invalidate all associated interfaces
if (bqPlayerObject != NULL) {
(*bqPlayerObject)->Destroy(bqPlayerObject);
bqPlayerObject = NULL;
bqPlayerPlay = NULL;
bqPlayerBufferQueue = NULL;
}
if(NULL!=outputMixObject){
(*outputMixObject)->Destroy(outputMixObject);
outputMixObject=NULL;
outputMixEnvironmentalReverb=NULL;
}
if (engineObject != NULL) {
(*engineObject)->Destroy(engineObject);
engineObject = NULL;
engineEngine = NULL;
}
}
/*JNI_FALSE=0 JNI_SUCCESS=1*/
int createOpenslEngine(){
SLresult result;
//线程安全
const SLEngineOption engineOptions[1] = {{(SLuint32) SL_ENGINEOPTION_THREADSAFE,(SLuint32) SL_BOOLEAN_TRUE}};
//该函数表示:初始化引擎对象给使用者一个处理手柄对象,第四个参数(需要支持的interface数目)为零则会忽视第五、第六个参数
result=slCreateEngine(&engineObject,1,engineOptions,0,NULL,NULL);
if(result!=SL_RESULT_SUCCESS){
LOGD("opensl es引擎创建初始化失败");
return JNI_FALSE;
}
//该函数表示:转化一个Object从未实例化到实例化过程,第二个参数表示是否异步
result=(*engineObject)->Realize(engineObject,SL_BOOLEAN_FALSE);
if(result!=SL_RESULT_SUCCESS){
LOGD("引擎Object实例化失败");
return JNI_FALSE;
}
//该函数表示:得到由Object暴露的接口,这里指的是引擎接口,第二个参数是接口ID,第三个参数是输出的引擎接口对象
result=(*engineObject)->GetInterface(engineObject,SL_IID_ENGINE,&engineEngine);
if(result!=SL_RESULT_SUCCESS){
LOGD("引擎接口获取失败");
return JNI_FALSE;
}
//该函数表示:创建输出混音器--->由引擎接口创建,从第三个参数开始就是支持的interface数目,同样的为零忽略第四第五个参数
const SLInterfaceID interfaceIds[1]={SL_IID_ENVIRONMENTALREVERB};//这里给一个环境混响的接口id
const SLboolean reqs[1]={SL_BOOLEAN_TRUE};
result=(*engineEngine)->CreateOutputMix(engineEngine,&outputMixObject,1,interfaceIds,reqs);
if(result!=SL_RESULT_SUCCESS){
LOGD("创建输出混音器失败");
return JNI_FALSE;
}
//同样的实例化输出混音器对象
result=(*outputMixObject)->Realize(outputMixObject,SL_BOOLEAN_FALSE);
if(result!=SL_RESULT_SUCCESS){
LOGD("输出混音器outout mix实例化失败");
return JNI_FALSE;
}
//因为环境混响接口的失败与否没关系的
//同样的申请支持了环境混响EnvironmentalReverb接口就可以获取该接口对象
result=(*outputMixObject)->GetInterface(outputMixObject,SL_IID_ENVIRONMENTALREVERB,&outputMixEnvironmentalReverb);
if(result==SL_RESULT_SUCCESS){
result=(*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(outputMixEnvironmentalReverb,&reverbSettings);
if(result!=SL_RESULT_SUCCESS){
LOGD("混响属性设置失败");
}
}else{
LOGD("获取环境混响接口失败");
}
return JNI_TRUE;
}
/**
创建pcm播放格式:采样率、通道数、单个样本的比特率(s16le)
*/
int createBufferQueue(int sampleRate,int channels){
SLresult result;
// configure audio source
SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
int numChannels=2;
SLuint32 samplesPerSec=SL_SAMPLINGRATE_44_1;//注意是毫秒赫兹
SLuint32 bitsPerSample=SL_PCMSAMPLEFORMAT_FIXED_16;
SLuint32 containerSize=SL_PCMSAMPLEFORMAT_FIXED_16;
//引文channels=2,native-audio-jni.c中的例子是单声道的所以取SL_SPEAKER_FRONT_CENTER
SLuint32 channelMask=SL_SPEAKER_FRONT_LEFT|SL_SPEAKER_FRONT_RIGHT;
SLuint32 endianness=SL_BYTEORDER_LITTLEENDIAN;
numChannels=channels;
if(channels==1){
channelMask=SL_SPEAKER_FRONT_CENTER;
}else{
//2以及更多
channelMask=SL_SPEAKER_FRONT_LEFT|SL_SPEAKER_FRONT_RIGHT;
}
samplesPerSec=(SLuint32)(sampleRate*1000);
SLDataFormat_PCM format_pcm={SL_DATAFORMAT_PCM,(SLuint32)numChannels,samplesPerSec,bitsPerSample,containerSize,channelMask,endianness};
SLDataSource audioSrc = {&loc_bufq, &format_pcm};
// configure audio sink
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk = {&loc_outmix, NULL};
// create audio player
const SLInterfaceID ids[1] = {SL_IID_BUFFERQUEUE};
const SLboolean req[1] = {SL_BOOLEAN_TRUE};
result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk,
1, ids, req);
if(result!=SL_RESULT_SUCCESS){
LOGD("创建audioplayer失败");
return JNI_FALSE;
}
result=(*bqPlayerObject)->Realize(bqPlayerObject,SL_BOOLEAN_FALSE);
if(result!=SL_RESULT_SUCCESS){
LOGD("实例化audioplayer失败");
return JNI_FALSE;
}
LOGD("---createBufferQueueAudioPlayer---");
// get the play interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取play接口对象失败");
return JNI_FALSE;
}
// get the buffer queue interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
&bqPlayerBufferQueue);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取BUFFERQUEUE接口对象失败");
return JNI_FALSE;
}
// register callback on the buffer queue
result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取play接口对象失败");
return JNI_FALSE;
}
// set the player's state to playing
result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
if(result!=SL_RESULT_SUCCESS){
LOGD("设置为可播放状态失败");
return JNI_FALSE;
}
return JNI_TRUE;
}
void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context){
int got_frame,ret;
//该函数表示:返回流中的一帧 ===> 0 if OK, < 0 on error or end of file
while((got_frame=av_read_frame(fmt_ctx,packet))>=0){
LOGE("帧大小got_freme=%d",got_frame);
if(packet->stream_index==stream_index){
/* send the packet with the compressed data to the decoder */
ret=avcodec_send_packet(audio_codec_ctx,packet);
if(ret<0){
LOGD("音频解码AVPacket出错了");
releaseResource();
return;
}
/* read all the output frames (in general there may be any number of them */
while(ret>=0){
ret=avcodec_receive_frame(audio_codec_ctx,frame);
if(ret==AVERROR(EAGAIN) || ret==AVERROR_EOF){
break;
}else if(ret<0){
LOGD("音频解码AVFrame出错了");
releaseResource();
return;
}
dst_nb_sample=av_rescale_rnd(swr_get_delay(swr_ctx,frame->sample_rate)+frame->nb_samples,frame->sample_rate,frame->sample_rate,AV_ROUND_UP);
if(dst_nb_sample>max_dst_nb_sample){
av_freep(&dst_data[0]);
ret=av_samples_alloc(dst_data,NULL,channels,dst_nb_sample,dst_sample_fmt,1);
if(ret<0){
LOGD("重新分配dst_data失败");
break;
}
max_dst_nb_sample=dst_nb_sample;
}
//该函数表示:开启格式转换
ret=swr_convert(swr_ctx,dst_data,dst_nb_sample,(uint8_t**)frame->data,frame->nb_samples);
if(ret<0){
LOGD("swr_convert转换错误");
releaseResource();
return;
}
//该函数表示:通过给定的参数得到需要的buffer size
int dst_buffer_size=av_samples_get_buffer_size(NULL,channels,ret,dst_sample_fmt,1);
if(dst_buffer_size<0){
LOGD("获取样本buffer大小失败");
releaseResource();
return;
}
//MP3每帧是1152字节,ACC每帧是1024/2048字节
LOGD("WRITE TO AUDIOTRACK %d",dst_buffer_size);//4608
SLresult result;
result=(*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue,dst_data[0],dst_buffer_size);
if(result!=SL_RESULT_SUCCESS){
LOGD("入队失败");
}
}
}
//过去使用av_free_packet()
av_packet_unref(packet);
break;
}
if(got_frame<0){
LOGE("解封装解码全部完成!!!");
releaseResource();
}
}
#ifdef __cplusplus
}
#endif
package com.example.simpleTestFFmpeg.opensles;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import com.example.simpleTestFFmpeg.R;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class FFmpegAndOpenslActivity extends AppCompatActivity {
static {
System.loadLibrary("avutil-56");
System.loadLibrary("swresample-3");
System.loadLibrary("avcodec-58");
System.loadLibrary("avformat-58");
System.loadLibrary("swscale-5");
System.loadLibrary("postproc-55");
System.loadLibrary("avfilter-7");
System.loadLibrary("avdevice-58");
System.loadLibrary("ffmpegAndOpensles");
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_ffmpeg_and_opensl);
}
//一个button
public void decodeAndPlay(View view) {
ExecutorService executorService = Executors.newSingleThreadExecutor();
executorService.execute(new Runnable() {
@Override
public void run() {
decodeMusicPlayByOpensl(Environment.getExternalStoragePublicDirectory
(Environment.DIRECTORY_MUSIC).getAbsolutePath() + "/lky_bhs_mp3.mp3");
}
});
}
private native void decodeMusicPlayByOpensl(String sourcePath);
}
# Android.mk for FFmpeg
#
# Lei Xiaohua À×Ïöæè
# [email protected]
# http://blog.csdn.net/leixiaohua1020
#
LOCAL_PATH := $(call my-dir)
# FFmpeg library
include $(CLEAR_VARS)
LOCAL_MODULE := avcodec
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libavcodec-58.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avdevice
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libavdevice-58.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avfilter
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libavfilter-7.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avformat
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libavformat-58.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avutil
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libavutil-56.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := postproc
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libpostproc-55.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := swresample
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libswresample-3.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := swscale
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libswscale-5.so
include $(PREBUILT_SHARED_LIBRARY)
# Program
include $(CLEAR_VARS)
LOCAL_MODULE := ffmpegAndOpensles
LOCAL_SRC_FILES := ffmpegAndOpensles.c
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_LDLIBS := -llog -lz -lOpenSLES -landroid
LOCAL_SHARED_LIBRARIES := avcodec avdevice avfilter avformat avutil postproc swresample swscale
include $(BUILD_SHARED_LIBRARY)
百度云盘:链接:https://pan.baidu.com/s/1RxRWpiWn4ZawC3vrJPYr2A 密码:pr72
CSDN博客下载:https://download.csdn.net/download/zb52588/10687951