类型
- MediaPlayer:原生API中封装最全的
- SoundPool:适合播放较短的音频
- AudioTrack:底层的音频 API,需要自己解码,只能播放 PCM 裸数据和 WAV
AudioTrack
播放 PCM 音频裸数据,不能播放视频,需要自己先解码音频,接下来将说明 AudioTrack 初始化的几个参数的意义:
public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, int bufferSizeInBytes, int mode) throws IllegalArgumentException {
throw new RuntimeException("Stub!");
}
- streamType 流类型,AudioManager 中定义了音频的类型,可大致分为 STREAM_MUSIC 、 STREAM_RINHG 等
- sampleRateInHz 采样率,播放的音频每秒有多少次采样
- channelConfig 声道数配置,单声道和双声道
- audioFormat 数据位宽,选择 16bit ,能够兼容所有 Android 设备
- bufferSizeInBytes 缓冲区大小,通过 AudioTrack.getMinBufferSize 运算得出
- mode 播放模式 : MODE_STATIC 一次写入,MODE_STREAM 多次写入
播放 WAV 文件
WAV 是在 PCM 前面添加了几个字节来表示音频的格式,未对 PCM 进行压缩,所以文件体积相较于 MP3 等有损压缩文件要大,我们的 AudioTrack 可以播放 WAV 文件
实践
为了使用 AudioTrack 播放,我们需要提前准备一份 WAV 格式的音频文件,将文件重命名为 “test.wav”,然后放到手机内置存储的根目录中 ,使用流的形式播放,就需要将文件转成 InputStream,然后将 byte 数据 通过 AudioTrack.write() 写入缓冲区进行播放
- 准备读取 WAV 文件
public class WavFileHeader {
public static final int WAV_FILE_HEADER_SIZE = 44;
public static final int WAV_CHUNKSIZE_EXCLUDE_DATA = 36;
public static final int WAV_CHUNKSIZE_OFFSET = 4;
public static final int WAV_SUB_CHUNKSIZE1_OFFSET = 16;
public static final int WAV_SUB_CHUNKSIZE2_OFFSET = 40;
public String mChunkID = "RIFF";
public int mChunkSize = 0;
public String mFormat = "WAVE";
public String mSubChunk1ID = "fmt ";
public int mSubChunk1Size = 16;
public short mAudioFormat = 1;
public short mNumChannel = 1;
public int mSampleRate = 8000;
public int mByteRate = 0;
public short mBlockAlign = 0;
public short mBitsPerSample = 8;
public String mSubChunk2ID = "data";
public int mSubChunk2Size = 0;
public WavFileHeader() {
}
public WavFileHeader(int sampleRateInHz, int channels, int bitsPerSample) {
mSampleRate = sampleRateInHz;
mBitsPerSample = (short) bitsPerSample;
mNumChannel = (short) channels;
mByteRate = mSampleRate * mNumChannel * mBitsPerSample / 8;
mBlockAlign = (short) (mNumChannel * mBitsPerSample / 8);
}
}
- 使用 InputStream 将 Wav 文件转成流
public class WavFileReader {
private static final String TAG = WavFileReader.class.getSimpleName();
private DataInputStream mDataInputStream;
private WavFileHeader mWavFileHeader;
public boolean openFile(String filepath) throws IOException {
if (mDataInputStream != null) {
closeFile();
}
mDataInputStream = new DataInputStream(new FileInputStream(filepath));
return readHeader();
}
public void closeFile() throws IOException {
if (mDataInputStream != null) {
mDataInputStream.close();
mDataInputStream = null;
}
}
public WavFileHeader getmWavFileHeader() {
return mWavFileHeader;
}
public int readData(byte[] buffer, int offset, int count) {
if (mDataInputStream == null || mWavFileHeader == null) {
return -1;
}
try {
int nbytes = mDataInputStream.read(buffer, offset, count);
if (nbytes == -1) {
return 0;
}
return nbytes;
} catch (IOException e) {
e.printStackTrace();
}
return -1;
}
private boolean readHeader() {
if (mDataInputStream == null) {
return false;
}
WavFileHeader header = new WavFileHeader();
byte[] intValue = new byte[4];
byte[] shortValue = new byte[2];
try {
header.mChunkID = "" + (char) mDataInputStream.readByte() + (char) mDataInputStream.readByte() + (char) mDataInputStream.readByte() + (char) mDataInputStream.readByte();
Log.d(TAG, "Read file chunkID:" + header.mChunkID);
mDataInputStream.read(intValue);
header.mChunkSize = byteArrayToInt(intValue);
Log.d(TAG, "Read file chunkSize:" + header.mChunkSize);
header.mFormat = "" + (char) mDataInputStream.readByte() + (char) mDataInputStream.readByte() + (char) mDataInputStream.readByte() + (char) mDataInputStream.readByte();
Log.d(TAG, "Read file format:" + header.mFormat);
header.mSubChunk1ID = "" + (char) mDataInputStream.readByte() + (char) mDataInputStream.readByte() + (char) mDataInputStream.readByte() + (char) mDataInputStream.readByte();
Log.d(TAG, "Read fmt chunkID:" + header.mSubChunk1ID);
mDataInputStream.read(intValue);
header.mSubChunk1Size = byteArrayToInt(intValue);
Log.d(TAG, "Read fmt chunkSize:" + header.mSubChunk1Size);
mDataInputStream.read(shortValue);
header.mAudioFormat = byteArrayToShort(shortValue);
Log.d(TAG, "Read audioFormat:" + header.mAudioFormat);
mDataInputStream.read(shortValue);
header.mNumChannel = byteArrayToShort(shortValue);
Log.d(TAG, "Read channel number:" + header.mNumChannel);
mDataInputStream.read(intValue);
header.mSampleRate = byteArrayToInt(intValue);
Log.d(TAG, "Read samplerate:" + header.mSampleRate);
mDataInputStream.read(intValue);
header.mByteRate = byteArrayToInt(intValue);
Log.d(TAG, "Read byterate:" + header.mByteRate);
mDataInputStream.read(shortValue);
header.mBlockAlign = byteArrayToShort(shortValue);
Log.d(TAG, "Read blockalign:" + header.mBlockAlign);
mDataInputStream.read(shortValue);
header.mBitsPerSample = byteArrayToShort(shortValue);
Log.d(TAG, "Read bitspersample:" + header.mBitsPerSample);
header.mSubChunk2ID = "" + (char) mDataInputStream.readByte() + (char) mDataInputStream.readByte() + (char) mDataInputStream.readByte() + (char) mDataInputStream.readByte();
Log.d(TAG, "Read data chunkID:" + header.mSubChunk2ID);
mDataInputStream.read(intValue);
header.mSubChunk2Size = byteArrayToInt(intValue);
Log.d(TAG, "Read data chunkSize:" + header.mSubChunk2Size);
Log.d(TAG, "Read wav file success !");
} catch (Exception e) {
e.printStackTrace();
return false;
}
mWavFileHeader = header;
return true;
}
private static short byteArrayToShort(byte[] b) {
return ByteBuffer.wrap(b).order(ByteOrder.LITTLE_ENDIAN).getShort();
}
private static int byteArrayToInt(byte[] b) {
return ByteBuffer.wrap(b).order(ByteOrder.LITTLE_ENDIAN).getInt();
}
}
public class AudioPlayer {
private static final String TAG = "AudioPlayer";
private static final int DEFAULT_STREAM_TYPE = AudioManager.STREAM_MUSIC;
private static final int DEFAULT_SAMPLE_RATE = 44100;
private static final int DEFAULT_CHANNEL_CONFIG = AudioFormat.CHANNEL_OUT_MONO;
private static final int DEFAULT_AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
private static final int DEFAULT_PLAY_MODE = AudioTrack.MODE_STREAM;
private volatile boolean mIsPlayStarted = false;
private AudioTrack mAudioTrack;
public boolean startPlayer() {
return startPlayer(DEFAULT_STREAM_TYPE, DEFAULT_SAMPLE_RATE, DEFAULT_CHANNEL_CONFIG, DEFAULT_AUDIO_FORMAT);
}
public boolean startPlayer(int streamType, int sampleRateInHz, int channelConfig, int audioFormat) {
if (mIsPlayStarted) {
Log.e(TAG, "Player already started !");
return false;
}
int bufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);
if (bufferSizeInBytes == AudioTrack.ERROR_BAD_VALUE) {
Log.e(TAG, "Invalid parameter !");
return false;
}
Log.i(TAG, "getMinBufferSize = " + bufferSizeInBytes + " bytes !");
mAudioTrack = new AudioTrack(streamType, sampleRateInHz, channelConfig, audioFormat, bufferSizeInBytes, DEFAULT_PLAY_MODE);
if (mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED) {
Log.e(TAG, "AudioTrack initialize fail !");
return false;
}
mIsPlayStarted = true;
Log.i(TAG, "Start audio player success !");
return true;
}
public void stopPlayer() {
if (!mIsPlayStarted) {
return;
}
if (mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
mAudioTrack.stop();
}
mAudioTrack.release();
mIsPlayStarted = false;
Log.i(TAG, "Stop audio player success !");
}
public boolean play(byte[] audioData, int offsetInBytes, int sizeInBytes) {
if (!mIsPlayStarted) {
Log.e(TAG, "Player not started !");
return false;
}
if (mAudioTrack.write(audioData, offsetInBytes, sizeInBytes) != sizeInBytes) {
Log.e(TAG, "Could not write all the samples to the audio device !");
}
mAudioTrack.play();
Log.d(TAG, "OK, Played " + sizeInBytes + " bytes !");
return true;
}
}
- 开启播放线程
private Runnable pcmRunnable = new Runnable() {
@Override
public void run() {
byte[] buffer = new byte[SAMPLES_PER_FRAME * 2];
while (wavFileReader.readData(buffer, 0, buffer.length) > 0) {
audioPlayer.play(buffer, 0, buffer.length);
}
audioPlayer.stopPlayer();
try {
wavFileReader.closeFile();
} catch (IOException e) {
e.printStackTrace();
}
}
};
最终实现了 Wav 的播放,但是听起来怪怪的,不知道是读取的哪个参数不正确
相关链接
Android音频开发之AudioTrack
Android 音频PCM数据的采集和播放,读写音频wav文件