十七、音频转PCM和使用native调用AudioTrack播放音频文件

一、Android AudioTrack简介

在Android中播放音频可以用MediaPlayer和AudioTrack两种方案的,但是两种方案是有很大区别的,MediaPlayer可以播放多种格式的声音文件,例如MP3,AAC,WAV,OGG,MIDI等。而AudioTrack只能播放PCM数据流。
事实上,两种本质上是没啥区别的,MediaPlayer在播放音频时,在framework层还是会创建AudioTrack,把解码后的PCM数流传递给AudioTrack,最后由AudioFlinger进行混音,传递音频给硬件播放出来。利用AudioTrack播放只是跳过Mediaplayer的解码部分而已。Mediaplayer的解码核心部分是基于OpenCORE 来实现的,支持通用的音视频和图像格式,codec使用的是OpenMAX接口来进行扩展。因此使用audiotrack播放mp3文件的话,要自己加入一个音频解码器,如libmad。否则只能播放PCM数据,如大多数WAV格式的音频文件。
如果是实时的音频数据,那么只能用AudioTrack进行播放。

音频所占用字节数 = 通道数 * 采用频率(Hz) * 采用位数(byte)

二、具体实现

native-lib.cpp

2.1音频转PCM和使用native调用AudioTrack播放音频文件,代码公共部分
#include 
#include 
#include 

extern "C" {

#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswresample/swresample.h"
#include "libswscale/swscale.h"
#include 
#include 

};

#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"MusicPlayer",FORMAT,##__VA_ARGS__);
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"MusicPlayer",FORMAT,##__VA_ARGS__);

2.2 音频转PCM
extern "C"
JNIEXPORT void JNICALL
Java_com_fmtech_ffmpegmusic_MusicPlayer_audioToPcm(JNIEnv *env, jobject instance, jstring inputPath_, jstring outputPath_) {
    const char *inputPath = env->GetStringUTFChars(inputPath_, 0);
    const char *outputPath = env->GetStringUTFChars(outputPath_, 0);

    av_register_all();

    AVFormatContext *pFormatCtx = avformat_alloc_context();

    if(avformat_open_input(&pFormatCtx, inputPath, NULL, NULL) != 0){
        LOGE("Open input failed.");
        return;
    }

    if(avformat_find_stream_info(pFormatCtx, NULL) < 0){
        LOGE("Find stream info failed.");
        return;
    }

    int audio_stream_idx = -1;
    int i = 0;
    for(i = 0; i < pFormatCtx->nb_streams; i++){
        if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO){
            audio_stream_idx = i;
            break;
        }
    }

    AVCodecContext *pCodecCtx = pFormatCtx->streams[audio_stream_idx]->codec;
    AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);

    if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0){
        LOGE("avcodec_open2 failed.");
        return;
    }

    AVPacket *packet = (AVPacket*)av_malloc(sizeof(AVPacket));
    AVFrame *frame = av_frame_alloc();

    SwrContext *swrContext = swr_alloc();

    int got_frame;

    uint8_t *out_buffer = (uint8_t*)av_malloc(44100 * 2);

    uint64_t out_ch_layout = AV_CH_LAYOUT_STEREO;

    //输出采样位数
    enum AVSampleFormat out_sample_format = AV_SAMPLE_FMT_S16;

    //输出采样率必须与输入相同
    int out_sample_rate = pCodecCtx->sample_rate;

    swr_alloc_set_opts(swrContext, out_ch_layout, out_sample_format, out_sample_rate,
                        pCodecCtx->channel_layout, pCodecCtx->sample_fmt, pCodecCtx->sample_rate, 0, NULL);

    swr_init(swrContext);
    int out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout);
    LOGI("-------Out channecl nb:%d",out_channel_nb);

    FILE *pcm_file = fopen(outputPath, "wb");
    while(av_read_frame(pFormatCtx, packet) >= 0){
        if(packet->stream_index == audio_stream_idx){
            avcodec_decode_audio4(pCodecCtx, frame, &got_frame, packet);
            if(got_frame){
                swr_convert(swrContext, &out_buffer, 44100 * 2, (const uint8_t **) frame->data, frame->nb_samples);
                int out_buffer_size = av_samples_get_buffer_size(NULL, out_channel_nb, frame->nb_samples, out_sample_format, 1);
                fwrite(out_buffer, 1, out_buffer_size, pcm_file);
            }
        }
    }
    LOGI("-------Decode audio success.");

    fclose(pcm_file);
    av_frame_free(&frame);
    av_free(out_buffer);
    swr_free(&swrContext);
    avcodec_close(pCodecCtx);
    avformat_close_input(&pFormatCtx);

    env->ReleaseStringUTFChars(inputPath_, inputPath);
    env->ReleaseStringUTFChars(outputPath_, outputPath);
}
2.3使用native调用AudioTrack播放音频文件
extern "C"
JNIEXPORT void JNICALL
Java_com_fmtech_ffmpegmusic_MusicPlayer_playMusic(JNIEnv *env, jobject instance,
                                                  jstring inputPath_) {
    const char *inputPath = env->GetStringUTFChars(inputPath_, 0);

    av_register_all();

    AVFormatContext *pFormatCtx = avformat_alloc_context();

    if(avformat_open_input(&pFormatCtx, inputPath, NULL, NULL) != 0){
        LOGE("Open input failed.");
        return;
    }

    if(avformat_find_stream_info(pFormatCtx, NULL) < 0){
        LOGE("Find stream info failed.");
        return;
    }

    int audio_stream_idx = -1;
    int i = 0;
    for(i = 0; i < pFormatCtx->nb_streams; i++){
        if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO){
            audio_stream_idx = i;
            break;
        }
    }

    AVCodecContext *pCodecCtx = pFormatCtx->streams[audio_stream_idx]->codec;
    AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);

    if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0){
        LOGE("avcodec_open2 failed.");
        return;
    }

    AVPacket *packet = (AVPacket*)av_malloc(sizeof(AVPacket));
    AVFrame *frame = av_frame_alloc();

    SwrContext *swrContext = swr_alloc();

    int got_frame;

    uint8_t *out_buffer = (uint8_t*)av_malloc(44100 * 2);

    uint64_t out_ch_layout = AV_CH_LAYOUT_STEREO;

    //输出采样位数
    enum AVSampleFormat out_sample_format = AV_SAMPLE_FMT_S16;

    //输出采样率必须与输入相同
    int out_sample_rate = pCodecCtx->sample_rate;

    swr_alloc_set_opts(swrContext, out_ch_layout, out_sample_format, out_sample_rate,
                       pCodecCtx->channel_layout, pCodecCtx->sample_fmt, pCodecCtx->sample_rate, 0, NULL);

    swr_init(swrContext);
    int out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout);
    LOGI("-------Out channecl nb:%d",out_channel_nb);

   //调用MusicPlayer.java中的方法
    jclass clazzMusicPlayer = env->GetObjectClass(instance);
    jmethodID initAudioTrack = env->GetMethodID(clazzMusicPlayer, "initAudioTrack", "(II)V");
    jmethodID playTrack = env->GetMethodID(clazzMusicPlayer, "playTrack", "([BI)V");
    env->CallVoidMethod(instance, initAudioTrack, 44100, out_channel_nb);

    int frameCount=0;
    while(av_read_frame(pFormatCtx, packet) >= 0){
        if(packet->stream_index == audio_stream_idx){
            avcodec_decode_audio4(pCodecCtx, frame, &got_frame, packet);
            if(got_frame){
                LOGI("Decode %d frame.", frameCount++);
                swr_convert(swrContext, &out_buffer, 44100 * 2, (const uint8_t **) frame->data, frame->nb_samples);
                int out_buffer_size = av_samples_get_buffer_size(NULL, out_channel_nb, frame->nb_samples, out_sample_format, 1);

                jbyteArray audio_sample_array = env->NewByteArray(out_buffer_size);
                env->SetByteArrayRegion(audio_sample_array, 0, out_buffer_size, (const jbyte *) out_buffer);
                env->CallVoidMethod(instance, playTrack, audio_sample_array, out_buffer_size);
                env->DeleteLocalRef(audio_sample_array);
            }
        }
    }
    LOGI("-------Play audio finish.");

    av_frame_free(&frame);
    av_free(out_buffer);
    swr_free(&swrContext);
    avcodec_close(pCodecCtx);
    avformat_close_input(&pFormatCtx);

    env->ReleaseStringUTFChars(inputPath_, inputPath);
}

MusicPlayer.java

public class MusicPlayer {

    static{
        System.loadLibrary("avcodec-56");
        System.loadLibrary("avdevice-56");
        System.loadLibrary("avfilter-5");
        System.loadLibrary("avformat-56");
        System.loadLibrary("avutil-54");
        System.loadLibrary("postproc-53");
        System.loadLibrary("swresample-1");
        System.loadLibrary("swscale-3");
        System.loadLibrary("native-lib");
    }

    private AudioTrack mAudioTrack;

    public void initAudioTrack(int sampleRateInHz, int nb_channels){

        int channelConfig;
        if(nb_channels == 1){
            channelConfig = AudioFormat.CHANNEL_OUT_MONO;//单声道
        }else if(nb_channels == 2){
            channelConfig = AudioFormat.CHANNEL_OUT_STEREO;//双声道立体声
        }else{
            channelConfig = AudioFormat.CHANNEL_OUT_MONO;
        }

        ////根据采样率,采样精度,单双声道来得到buffer的大小
        int bufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);

//        AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, int bufferSizeInBytes, int mode)
        // AudioFormat.ENCODING_PCM_16BIT 设置音频数据块是8位还是16位,这里设置为16位。
       // AudioTrack.MODE_STREAM设置模式类型,在这里设置为流类型,第二种MODE_STATIC
        mAudioTrack = new AudioTrack(
                    AudioManager.STREAM_MUSIC, // 指定流的类型
                    sampleRateInHz,// 设置音频数据的采样率
                    channelConfig,
                   AudioFormat.ENCODING_PCM_16BIT,
                   bufferSize, AudioTrack.MODE_STREAM);

        mAudioTrack.play();//very important  启动音频设备
    }

    public synchronized void playTrack(byte[] buffer, int length){
        if(null != mAudioTrack && mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING){
            mAudioTrack.write(buffer, 0, length);
        }
    }

    public native void audioToPcm(String inputPath, String outputPath);

    public native void playMusic(String inputPath);

}

【相关源码】

你可能感兴趣的:(十七、音频转PCM和使用native调用AudioTrack播放音频文件)