package com.txz.test;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.Arrays;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.Message;
import android.os.PowerManager;
import android.util.Log;
public class AudioRecThread implements
android.media.AudioTrack.OnPlaybackPositionUpdateListener {
final static String TAG = "AudioRecThread";
static public final int MSG_AUDIO_REC_FINISH = 1;
static public final int MSG_AUDIO_PLAY_FINISH = 2;
private boolean audioRecContinue = false;
private boolean audioPlayContinue = false;
private PowerManager.WakeLock wl;
private Thread thread;
private long startPlayMilli = 0;
private long playUpdatePeriodAmass = 0;
private long timeMarkAttendModified = 0;
private int markPos = 0;
private int markMill = 200;
private int updateTimes = 0;
public void onPeriodicNotification(AudioTrack track) {
if (track.getState()==AudioTrack.PLAYSTATE_PLAYING) {
playUpdatePeriodAmass += 25;
long c = System.currentTimeMillis();
c -= startPlayMilli;
c = track.getPlaybackHeadPosition();
c /= 8;
Log.v(TAG + " onPeriodicNotification", "real ms " + c
+ " period total " + playUpdatePeriodAmass);
}
}
public void onMarkerReached(AudioTrack track) {
timeMarkAttendModified = System.currentTimeMillis();
timeMarkAttendModified -= markMill;
Log.v(TAG + " period currpos", "mark:" + markMill);
}
AudioRecThread(PowerManager.WakeLock wl1) {
Log.v(TAG, "AvcThread created constructor1");
wl = wl1;
}
void writeShort(RandomAccessFile fo, short in) throws IOException {
byte tmp[] = { 0, 0 };
tmp[0] = (byte) (in & 0xff);
tmp[1] = (byte) ((in >> 8) & 0xff);
try {
fo.write(tmp);
} catch (IOException e) {
throw e;
}
}
void writeInt(RandomAccessFile fo, int in) throws IOException {
byte tmp[] = { 0, 0, 0, 0 };
tmp[0] = (byte) (in & 0xff);
tmp[1] = (byte) ((in >> 8) & 0xff);
tmp[2] = (byte) ((in >> 16) & 0xff);
tmp[3] = (byte) ((in >> 24) & 0xff);
try {
fo.write(tmp);
} catch (IOException e) {
throw e;
}
}
void startAudioRec(final String audioFname, final int audioSource,
final Handler handler) {
final Runnable audioRecRun = new Runnable() {
public void run() {
int channel = AudioFormat.CHANNEL_IN_MONO;
short channels = 1;
int format = AudioFormat.ENCODING_PCM_16BIT;
int bitsPerSample = 16;
int sampleRate = 16000;
int bufferSizeInBytes = AudioRecord.getMinBufferSize(
sampleRate, channel, format);
AudioRecord audioRecord = new AudioRecord(audioSource,
sampleRate, channel, format, bufferSizeInBytes);
if (audioRecord == null)
return;
int bufReadLenInByte = bufferSizeInBytes / 2;
byte[] bufRead = new byte[bufReadLenInByte];
RandomAccessFile fo;
wl.acquire();
try {
fo = new RandomAccessFile(audioFname, "rw");
} catch (FileNotFoundException e) {
return;
} catch (IllegalArgumentException e) {
return;
} catch (SecurityException e) {
return;
}
/*
* typedef struct tWAVEFORMATEX { WORD wFormatTag; WORD
* nChannels; DWORD nSamplesPerSec; DWORD nAvgBytesPerSec; WORD
* nBlockAlign; WORD wBitsPerSample; WORD cbSize; } WAVEFORMATEX
*/
int datalength = 0;
try {
byte[] tmp = { 0, 0, 0, 0 };
fo.writeBytes("RIFF");
fo.writeInt(0); // Riff file length
fo.writeBytes("WAVE"); // 4
fo.writeBytes("fmt "); // 4
writeInt(fo, 16); // 4 , followed by 16 bytes
writeShort(fo, (short) 1);// pcm wave tag
writeShort(fo, (short) channels);
writeInt(fo, sampleRate);
writeInt(fo, bitsPerSample * sampleRate * channels / 8);
writeShort(fo, (short) (channels * bitsPerSample / 8));
writeShort(fo, (short) bitsPerSample);
fo.writeBytes("data"); // 4
fo.writeInt(0); // 4 ,data length
} catch (IOException e) {
}
audioRecContinue = true;
try {
audioRecord.startRecording();
} catch (IllegalStateException e) {
e.printStackTrace();
audioRecContinue = false;
}
while (audioRecContinue) {
// Log.v(TAG+"-ENC","In run AudioRecThread.encodeContinue="+audioRecContinue
// ) ;
try {
int read = audioRecord.read(bufRead, 0,
bufReadLenInByte);
if (read > 0) {
fo.write((byte[]) bufRead, 0, read);
datalength += read;
}
} catch (IOException e) {
}
}
try {
fo.seek(4);
writeInt(fo, 36 + datalength);
fo.seek(40);
writeInt(fo, datalength);
fo.close();
} catch (IOException e) {
}
try {
audioRecord.stop();
} catch (IllegalStateException e) {
}
audioRecord.release();
wl.release();
handler.sendEmptyMessage(MSG_AUDIO_REC_FINISH);
Log.v(TAG, "AvcThread Finish encoding ");
}
};
startLowPriorityNewThread(audioRecRun);
}
public void stopAudioRec() {
audioRecContinue = false;
if (thread != null) {
try {
thread.join();
} catch (InterruptedException e) {
}
}
thread = null;
// Log.v(TAG,"in stopAvcEnc AvcThread.encodeContinue="+encodeContinue )
// ;
}
private short readShort(FileInputStream fi) throws IOException {
byte tmp[] = { 0, 0 };
try {
fi.read(tmp);
} catch (IOException e) {
throw e;
}
short r = tmp[1];
r <<= 8;
r += tmp[0];
return r;
}
private int readInt(FileInputStream fi) throws IOException {
byte tmp[] = { 0, 0, 0, 0 };
try {
fi.read(tmp);
} catch (IOException e) {
throw e;
}
int r = tmp[3];
r <<= 8;
r += tmp[2];
r <<= 8;
r += tmp[1];
r <<= 8;
r += tmp[0];
return r;
}
void startAudioPlay(final String audioFname, final Handler handler) {
final Runnable audioRecRun = new Runnable() {
public void run() {
FileInputStream fi;
try {
fi = new FileInputStream(audioFname);
} catch (FileNotFoundException e) {
e.printStackTrace();
return;
}
short channels = 1;
int blockAlign;
int bitsPerSample;
int sampleRate;
wl.acquire();
int datalength = 0;
try {
byte[] tmp = { 0, 0, 0, 0 };
fi.read(tmp);
boolean check = Arrays.equals(tmp, "RIFF".getBytes());
if (check != true)
return;
fi.read(tmp); // Riff file length
fi.read(tmp);
check = Arrays.equals(tmp, "WAVE".getBytes()); // 4
if (check != true)
return;
fi.read(tmp);
check = Arrays.equals(tmp, "fmt ".getBytes()); // 4
if (check != true)
return;
int headlen = readInt(fi);
if (headlen != 16)
return; // 4 , followed by 16 bytes
short s = readShort(fi);
if (s != 1)
return;// pcm wave tag
channels = readShort(fi);
sampleRate = readInt(fi);
readInt(fi); // bitsPerSample*sampleRate*channels/8) ;
blockAlign = readShort(fi);// ,
// (short)(channels*bitsPerSample/8)
// ) ;
bitsPerSample = readShort(fi);// ,(short)bitsPerSample) ;
fi.read(tmp); // "data"); // 4
fi.read(tmp); // 4 ,data length
} catch (IOException e) {
return;
}
int format;// = AudioFormat.ENCODING_PCM_16BIT ;
int channel;// =AudioFormat.CHANNEL_CONFIGURATION_MONO ;
if (channels == 2)
channel = AudioFormat.CHANNEL_OUT_STEREO;
else if (channels == 1)
channel = AudioFormat.CHANNEL_OUT_MONO;
else
channel = -1;
if (bitsPerSample == 16)
format = AudioFormat.ENCODING_PCM_16BIT;
else if (bitsPerSample == 8)
format = AudioFormat.ENCODING_PCM_8BIT;
else
format = -1;
int bufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRate,
channel, format) * 4;
if (bufferSizeInBytes < channels * bitsPerSample * 2 / 8
* sampleRate)
;
bufferSizeInBytes = channels * bitsPerSample * 2 / 8
* sampleRate;
Log.v(TAG + " write", "bufferSizeInBytes=" + bufferSizeInBytes);
AudioTrack audioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC, sampleRate, channel, format,
bufferSizeInBytes, AudioTrack.MODE_STREAM);
if (audioTrack == null)
return;
int bufReadLenInByte = bufferSizeInBytes / 8;
byte[] bufRead = new byte[bufReadLenInByte];
audioPlayContinue = true;
audioTrack.setPositionNotificationPeriod(200);
markPos = sampleRate * markMill / 1000;
int rr = audioTrack.setNotificationMarkerPosition(markPos);
Log.v(TAG + " Period currpos",
"setNotificationMarkerPosition return " + rr);
audioTrack
.setPlaybackPositionUpdateListener(AudioRecThread.this);
// int rr = audioTrack.setPlaybackHeadPosition(8000*5);
// Log.v(TAG+" Period", "setPlaybackHeadPosition return "+rr) ;
startPlayMilli = System.currentTimeMillis();
Log.v(TAG + " Period currpos", "startPlayMilli= "
+ startPlayMilli);
playUpdatePeriodAmass = 0;
rr = audioTrack.getPlaybackHeadPosition();
Log.v(TAG + " Period", "getPlaybackHeadPosition return " + rr);
while (audioPlayContinue) {
// Log.v(TAG+"-ENC","In run AudioRecThread.encodeContinue="+audioRecContinue
// ) ;
try {
int read = fi.read(bufRead, 0, bufReadLenInByte);
if (read > 0) {
Log.v(TAG + " write", "before write to AudioTrack");
read = audioTrack.write((byte[]) bufRead, 0, read);
Log.v(TAG + " write",
"after write to AudioTrack, write " + read
+ " bytes");
if (datalength == 0) {
try {
Log.v(TAG + " Period currpos",
"startPlay : "
+ System.currentTimeMillis());
audioTrack.play();
bufReadLenInByte /= 10;
} catch (IllegalStateException e) {
}
}
datalength += read;
} else {
break;
}
} catch (IOException e) {
}
long cu = System.currentTimeMillis();
long cu1 = cu - startPlayMilli;
long cu2 = cu - timeMarkAttendModified;
rr = audioTrack.getPlaybackHeadPosition();
Log.v(TAG + " Period currpos", "curr: " + cu1
+ " modified mill:" + cu2 + " pos:" + rr * 1000
/ 8000);
}
try {
fi.close();
} catch (IOException e) {
}
;
try {
Log.v(TAG + " Period", "dataLength=" + datalength);
audioTrack.flush();
// for (int i = 50; i > 0; i--) {
// long cu = System.currentTimeMillis();
// long cu1 = cu - startPlayMilli;
// long cu2 = cu - timeMarkAttendModified;
// rr = audioTrack.getPlaybackHeadPosition();
// Log.v(TAG + " Period currpos", "curr: " + cu1
// + " modified mill:" + cu2 + " pos:" + rr * 1000
// / 8000);
//
// Thread.sleep(100);
// }
audioTrack.stop();
} catch (Exception e) {
e.printStackTrace();
}/*
* catch (InterruptedException e) {
*
* }
*/
audioTrack.release();
wl.release();
handler.sendEmptyMessage(MSG_AUDIO_PLAY_FINISH);
Log.v(TAG, "AvcThread Finish encoding ");
}
};
startLowPriorityNewThread(audioRecRun);
}
public void stopAudioPlay() {
audioPlayContinue = false;
if (thread != null) {
try {
thread.join();
} catch (InterruptedException e) {
}
}
thread = null;
// Log.v(TAG,"in stopAvcEnc AvcThread.encodeContinue="+encodeContinue )
// ;
}
private void startLowPriorityNewThread(Runnable run) {
thread = new Thread(run);
int pr = thread.getPriority();
Log.v(TAG, "Original Thread priority is " + pr);
thread.setPriority(pr - 1);
pr = thread.getPriority();
Log.v(TAG, "new Thread priority is " + pr);
thread.start();
}
}
其中wav和pcm的区别
audiotrack只能播放pcm的数据,
如果测试单独模块,可以单独挑出其中的代码进行测试,其中randomAssessFile的seek方法每次都是针对文件头算的.
其中遇到的几个问题:
java.lang.IllegalStateException: startRecording() called on an uninitialized AudioRecord.
原因:
<uses-permission android:name="android.permission.RECORD_AUDIO" />
//这里使用MediaRecorder.AudioSource.DEFAULT就好
audioRecThread.startAudioRec(path,MediaRecorder.AudioSource.DEFAULT, new Handler(getMainLooper()){
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
Toast.makeText(getApplicationContext(), "完毕", 0).show();
}
});
//有些设备的采样率不支持8k,导致初始化出现问题,这里改为16k就好了
AudioRecord audioRecord = new AudioRecord(audioSource,
sampleRate, channel, format, bufferSizeInBytes);
10-12 13:29:46.017: E/AndroidRuntime(5549): java.lang.IllegalStateException: Unable to retrieve AudioTrack pointer for getPosition()
10-12 13:29:46.017: E/AndroidRuntime(5549): at android.media.AudioTrack.native_get_position(Native Method)
10-12 13:29:46.017: E/AndroidRuntime(5549): at android.media.AudioTrack.getPlaybackHeadPosition(AudioTrack.java:642)
10-12 13:29:46.017: E/AndroidRuntime(5549): at com.txz.test.AudioRecThread.onPeriodicNotification(AudioRecThread.java:42)
10-12 13:29:46.017: E/AndroidRuntime(5549): at android.media.AudioTrack$NativeEventHandlerDelegate$1.handleMessage(AudioTrack.java:1273)
10-12 13:29:46.017: E/AndroidRuntime(5549): at android.os.Handler.dispatchMessage(Handler.java:102)
10-12 13:29:46.017: E/AndroidRuntime(5549): at android.os.Looper.loop(Looper.java:136)
10-12 13:29:46.017: E/AndroidRuntime(5549): at android.app.ActivityThread.main(ActivityThread.java:5017)
10-12 13:29:46.017: E/AndroidRuntime(5549): at java.lang.reflect.Method.invokeNative(Native Method)
10-12 13:29:46.017: E/AndroidRuntime(5549): at java.lang.reflect.Method.invoke(Method.java:515)
10-12 13:29:46.017: E/AndroidRuntime(5549): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:779)
10-12 13:29:46.017: E/AndroidRuntime(5549): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:595)
10-12 13:29:46.017: E/AndroidRuntime(5549): at dalvik.system.NativeStart.main(Native Method)
解决办法:
public void onPeriodicNotification(AudioTrack track) {
if (track.getState()==AudioTrack.PLAYSTATE_PLAYING) {//增加状态判断
playUpdatePeriodAmass += 25;
long c = System.currentTimeMillis();
c -= startPlayMilli;
c = track.getPlaybackHeadPosition();
c /= 8;
Log.v(TAG + " onPeriodicNotification", "real ms " + c
+ " period total " + playUpdatePeriodAmass);
}
}