上次已经得到PCM编码的音频流,今天来实现播放PCM音频流,使用安卓提供的AudioTrack,方法很简单,在native层调用java代码
首先在java中定义相应的方法:
package com.aruba.ffmpegapplication;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.os.Bundle;
import android.os.Environment;
import android.view.View;
import androidx.appcompat.app.AppCompatActivity;
import java.io.File;
public class PcmPlayActivity extends AppCompatActivity {
static {
System.loadLibrary("native-lib");
}
private AudioTrack audioTrack;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_pcm_play);
}
/**
* 给native层回调
*
* @param sampleRateInHz
* @param channelCount
*/
private void create(int sampleRateInHz, int channelCount) {
int channelConfig = AudioFormat.CHANNEL_OUT_MONO; //单声道
if (channelCount == 2) {
channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
}
int buffSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);//计算最小缓冲区
// @Deprecated
// public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, int bufferSizeInBytes, int mode) throws IllegalArgumentException {
// throw new RuntimeException("Stub!");
// }
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, buffSize, AudioTrack.MODE_STREAM);
audioTrack.play();
}
/**
* 给native层回调
*/
private void play(byte[] bytes, int size) {
if (audioTrack != null && audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING)
audioTrack.write(bytes, 0, size);
}
public void click(View view) {
switch (view.getId()) {
case R.id.btn_audiotrack:
final File input1 = new File(Environment.getExternalStorageDirectory(), "input.mp3");
new Thread() {
@Override
public void run() {
playByAudio(input1.getAbsolutePath());
}
}.start();
break;
}
}
private native void playByAudio(String inputFilePath);
}
在native层,将上次输出到文件改为调用java方法
#include
#include
#include
#include
#include
extern "C" {
//编码
#include "libavcodec/avcodec.h"
//封装格式处理
#include "libavformat/avformat.h"
#include "libswresample/swresample.h"
//像素处理
#include "libswscale/swscale.h"
}
#define LOG_TAG "aruba"
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
extern "C"
JNIEXPORT void JNICALL
Java_com_aruba_ffmpegapplication_PcmPlayActivity_playByAudio(JNIEnv *env, jobject instance,
jstring inputFilePath_) {
const char *inputFilePath = env->GetStringUTFChars(inputFilePath_, 0);
//注册FFmpeg中各大组件
av_register_all();
//打开文件
AVFormatContext *formatContext = avformat_alloc_context();
if (avformat_open_input(&formatContext, inputFilePath, NULL, NULL) != 0) {
LOGE("打开失败");
avformat_free_context(formatContext);
env->ReleaseStringUTFChars(inputFilePath_, inputFilePath);
return;
}
//将文件信息填充进AVFormatContext
if (avformat_find_stream_info(formatContext, NULL) < 0) {
LOGE("获取文件信息失败");
avformat_free_context(formatContext);
env->ReleaseStringUTFChars(inputFilePath_, inputFilePath);
return;
}
//获取视频流的编解码器上下文
AVCodecContext *codecContext = NULL;
int audio_stream_idx = -1;
for (int i = 0; i < formatContext->nb_streams; ++i) {
if (formatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {//如果是音频流
codecContext = formatContext->streams[i]->codec;
audio_stream_idx = i;
break;
}
}
if (codecContext == NULL) {
avformat_free_context(formatContext);
env->ReleaseStringUTFChars(inputFilePath_, inputFilePath);
return;
}
//根据编解码器上下文的id获取视频流解码器
AVCodec *codec = avcodec_find_decoder(codecContext->codec_id);
//打开解码器
if (avcodec_open2(codecContext, codec, NULL) < 0) {
LOGE("解码失败");
avformat_free_context(formatContext);
env->ReleaseStringUTFChars(inputFilePath_, inputFilePath);
return;
}
//开始读每一帧
//存放压缩数据
AVPacket *pkt = (AVPacket *) (av_malloc(sizeof(AVPacket)));
av_init_packet(pkt);
//存放解压数据
AVFrame *picture = av_frame_alloc();
int picture_ptr = 0;
//音频转码组件上下文
SwrContext *swrContext = swr_alloc();
//AV_CH_LAYOUT_STEREO:双声道 AV_SAMPLE_FMT_S16:量化格式 16位 codecContext->sample_rate:采样率 Hz
swr_alloc_set_opts(swrContext, AV_CH_LAYOUT_STEREO, AV_SAMPLE_FMT_S16,
codecContext->sample_rate,//输出采样率和输入采样率应相同
codecContext->channel_layout, codecContext->sample_fmt,
codecContext->sample_rate, 0, NULL
);
swr_init(swrContext);
//原音频通道数
int channel_count = av_get_channel_layout_nb_channels(codecContext->channel_layout);
//单通道最大存放转码数据 所占字节 = 采样率*量化格式 / 8
int out_size = 44100 * 16 / 8;
uint8_t *out = (uint8_t *) (av_malloc(out_size));
//调用java层create方法
jclass jclz = env->GetObjectClass(instance);
jmethodID create_method_id = env->GetMethodID(jclz, "create", "(II)V");
env->CallVoidMethod(instance, create_method_id, 44100, av_get_channel_layout_nb_channels(AV_CH_LAYOUT_STEREO));
jmethodID play_method_id = env->GetMethodID(jclz, "play", "([BI)V");
while (av_read_frame(formatContext, pkt) == 0) {//读到每一帧的压缩数据存放在AVPacket
if (pkt->stream_index == audio_stream_idx) {
//解码
avcodec_decode_audio4(codecContext, picture, &picture_ptr, pkt);
LOGE("picture_ptr %d", picture_ptr);
if (picture_ptr > 0) {
//转码
swr_convert(swrContext, &out, out_size,
(const uint8_t **) (picture->data), picture->nb_samples);
//缓冲区真实大小
int size = av_samples_get_buffer_size(NULL, channel_count, picture->nb_samples,
AV_SAMPLE_FMT_S16, 1);
jbyteArray array = env->NewByteArray(size);
env->SetByteArrayRegion(array, 0, size, (const jbyte *) (out));
env->CallVoidMethod(instance, play_method_id, array, size);
env->DeleteLocalRef(array);
}
}
av_free_packet(pkt);
}
//释放资源
av_freep(out);
swr_free(&swrContext);
av_frame_free(&picture);
avcodec_close(codecContext);
avformat_free_context(formatContext);
env->ReleaseStringUTFChars(inputFilePath_, inputFilePath);
}
经过测试,已经可以播放了
项目地址:https://gitee.com/aruba/FFmpegApplication.git