Android使用AudioRecorder录音,通过G711A编码并实时发送

AudioRecorder:封装了录音的方法:创建录音对象、开始、暂停、停止、取消,使用静态枚举类Status来记录录音的状态。

AudioRecorder类:


import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.util.Log;

import com.tutk.IOTC.AVAPIs;
import com.tutk.IOTC.AVIOCTRLDEFs;
import com.tutk.IOTC.Client;
import com.tutk.IOTC.IOTCAPIs;

import java.util.ArrayList;
import java.util.List;


public class AudioRecorder {
    private static AudioRecorder audioRecorder;
    // 音频源:音频输入-麦克风
    private final static int AUDIO_INPUT = MediaRecorder.AudioSource.MIC;
    // 采样率
    // 44100是目前的标准,但是某些设备仍然支持22050,16000,11025
    // 采样频率一般共分为22.05KHz、44.1KHz、48KHz三个等级
    private final static int AUDIO_SAMPLE_RATE = 8000;
    // 音频通道 单声道
    private final static int AUDIO_CHANNEL = AudioFormat.CHANNEL_IN_MONO;
    // 音频格式:PCM编码
    private final static int AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
    // 缓冲区大小:缓冲区字节大小
    private int bufferSizeInBytes = 0;
    // 录音对象
    private static AudioRecord audioRecord;
    // 录音状态
    private Status status = Status.STATUS_NO_READY;
    // 录音文件集合
    private List filesName = new ArrayList<>();
    //发送音频的线程
    private SendAudioThread sendAudioThread;

    int chIndexForSendAudio = -1;
    int avIndexForSendAudio = -1;

    private AudioRecorder() {
    }

    //单例模式
    public static AudioRecorder getInstance() {
        if (audioRecorder == null) {
            audioRecorder = new AudioRecorder();
        }
        return audioRecorder;
    }

    /**
     * 创建默认的录音对象
     */
    public void createDefaultAudio() {
        // 获得缓冲区字节大小
        bufferSizeInBytes = AudioRecord.getMinBufferSize(AUDIO_SAMPLE_RATE,
                AUDIO_CHANNEL, AUDIO_ENCODING);
        audioRecord = new AudioRecord(AUDIO_INPUT, AUDIO_SAMPLE_RATE, AUDIO_CHANNEL, AUDIO_ENCODING, bufferSizeInBytes);
        status = Status.STATUS_READY;
        Log.d("AudioRecorder", "createDefaultAudio success");
    }

    /**
     * 开始录音
     */
    public void startRecord() {
        createDefaultAudio();

        if (status == Status.STATUS_NO_READY || audioRecord == null) {
            throw new IllegalStateException("录音尚未初始化,请检查是否禁止了录音权限~");
        }
        if (status == Status.STATUS_START) {
            throw new IllegalStateException("正在录音");
        }
        Log.d("AudioRecorder", "===startRecord===" + audioRecord.getState());
        audioRecord.startRecording();

        //将录音状态设置成正在录音状态
        status = Status.STATUS_START;
        //开启线程
        if (sendAudioThread == null) {
            Log.d("fsdfwerf", "thread start");
            sendAudioThread = new SendAudioThread();
            sendAudioThread.start();
        }
    }

    /**
     * 停止录音
     */
    public void stopRecord() {
        Log.d("AudioRecorder", "===stopRecord===");
        if (status == Status.STATUS_NO_READY || status == Status.STATUS_READY) {
            throw new IllegalStateException("录音尚未开始");
        } else {
            try {
                //延迟500ms,否则最后声音不能发送出去
                Thread.sleep(500);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            if (sendAudioThread != null) {
                sendAudioThread.stopThread();
                try {
                    sendAudioThread.interrupt();
                    sendAudioThread.join();
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
                sendAudioThread = null;
            }
        }
    }

    /**
     * 取消录音
     */
    public void canel() {
        filesName.clear();
//        fileName = null;
        if (audioRecord != null) {
            audioRecord.release();
            audioRecord = null;
        }
        status = Status.STATUS_NO_READY;
    }

    /**
     * 释放资源
     */
    public void release() {
        Log.d("AudioRecorder", "===release===");
        if (audioRecord != null) {
            audioRecord.release();
            audioRecord = null;
        }
        status = Status.STATUS_NO_READY;
    }

    /**
     * 录音对象的状态
     */
    public enum Status {
        //未开始
        STATUS_NO_READY,
        //预备
        STATUS_READY,
        //录音
        STATUS_START,
        //暂停
        STATUS_PAUSE,
        //停止
        STATUS_STOP
    }

    private class SendAudioThread extends Thread {
        private boolean isRecording = false;

        @Override
        public void run() {
            super.run();
            isRecording = true;
            byte flag;
            byte[] frameInfo;
            int ret = -1;
            chIndexForSendAudio = IOTCAPIs.IOTC_Session_Get_Free_Channel(Client.sid);
            Log.i("fsdfwerf", "IOTC_Session_Get_Free_Channel: " + chIndexForSendAudio);
            ret = AVAPIs.avSendIOCtrl(Client.avSendIndex, 848, AVIOCTRLDEFs.SMsgAVIoctrlAVStream.parseContent(chIndexForSendAudio), 16);
            Log.i("fsdfwerf", "avSendIOCtrl: " + ret);
            avIndexForSendAudio = AVAPIs.avServStart(Client.sid, (byte[]) null, (byte[]) null, 60, 0, chIndexForSendAudio);
            Log.i("fsdfwerf", "avServStart: " + avIndexForSendAudio);
            //录音
            while (isRecording) {
                byte[] audiodata = new byte[bufferSizeInBytes];
                int readsize = 0;
                readsize = audioRecord.read(audiodata, 0, bufferSizeInBytes);
                if (readsize > 0) {
                    byte[] audioData = new byte[readsize];
                    System.arraycopy(audiodata, 0, audioData, 0, readsize);
                    byte[] outbuf = new byte[audiodata.length / 2];
                    //编码为G711A
                    int encodeSize = G711.encode(audiodata, 0, audiodata.length, outbuf);
                    //发送录音
                    flag = 2;
                    frameInfo = AVIOCTRLDEFs.SFrameInfo.parseContent((short) 143, flag, (byte) 0, (byte) 0, (int) System.currentTimeMillis());
                    ret = AVAPIs.avSendAudioData(avIndexForSendAudio, outbuf, encodeSize, frameInfo, 16);
                }
            }
            //停止录音
            if (audioRecorder != null) {
                audioRecorder.stopRecord();
                audioRecorder.release();
            }

            if (avIndexForSendAudio >= 0) {
                AVAPIs.avServStop(avIndexForSendAudio);
            }

            if (chIndexForSendAudio >= 0) {
                IOTCAPIs.IOTC_Session_Channel_OFF(Client.sid, chIndexForSendAudio);
            }
            avIndexForSendAudio = -1;
            chIndexForSendAudio = -1;
        }

        public void stopThread() {
            if (Client.sid >= 0 && chIndexForSendAudio >= 0) {
                AVAPIs.avServExit(Client.sid, chIndexForSendAudio);
                AVAPIs.avSendIOCtrl(Client.avSendIndex, 849, AVIOCTRLDEFs.SMsgAVIoctrlAVStream.parseContent(chIndexForSendAudio), 16);
            }
            isRecording = false;

        }
    }

}

 

MainActivity中调用:

        audioRecorder = AudioRecorder.getInstance();
        sendRecord = findViewById(R.id.btn_sendRecorder);
        sendRecord.setOnTouchListener(new View.OnTouchListener() {
            @Override
            public boolean onTouch(View v, MotionEvent event) {
                switch (event.getAction()){
                    case MotionEvent.ACTION_DOWN:
                        audioRecorder.startRecord();
                        break;
                    case MotionEvent.ACTION_UP:
                        audioRecorder.stopRecord();
                        break;

                }
                return false;
            }
        });

 

参考:

https://www.jb51.net/article/117384.htm

你可能感兴趣的:(Android)