vue+pc端录音+canvas音频可视化+输出blob文件+播放录音

  1. 引入封装好的record.js文件,代码如下
    /* 录音 vue文件 标准版,canvas可视化版 */

    window.URL = window.URL || window.webkitURL
    navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia
    let HZRecorder = function (stream, config) {
      // const that = this;
      config = config || {}
      config.sampleBits = config.sampleBits || 8 // 采样数位 8, 16
      config.sampleRate = config.sampleRate || (44100 / 6) // 采样率(1/6 44100)
      let context = new(window.webkitAudioContext || window.AudioContext)()
      let audioInput = context.createMediaStreamSource(stream)
      let createScript = context.createScriptProcessor || context.createJavaScriptNode
      let recorder = createScript.apply(context, [4096, 1, 1])
      let transcount = 0;
      let pos = 0;
      let buffer48 = new Float32Array(1024 * 3);
      let buffer16 = new Float32Array(buffer48.length / 3);
      let audioData = {
        size: 0, // 录音文件长度
        buffer: [], // 录音缓存
        inputSampleRate: context.sampleRate, // 输入采样率
        inputSampleBits: 16, // 输入采样数位 8, 16
        outputSampleRate: config.sampleRate, // 输出采样率
        oututSampleBits: config.sampleBits, // 输出采样数位 8, 16
        input: function (data) {
          this.buffer.push(new Float32Array(data))
          this.size += data.length
        },
        compress: function () { // 合并压缩
          // 合并
          let data = new Float32Array(this.size)
          let offset = 0
          for (let i = 0; i < this.buffer.length; i++) {
            data.set(this.buffer[i], offset)
            offset += this.buffer[i].length
          }
          // 压缩
          let compression = parseInt(this.inputSampleRate / this.outputSampleRate)
          let length = data.length / compression
          let result = new Float32Array(length)
          let index = 0;
          let j = 0
          while (index < length) {
            result[index] = data[j]
            j += compression
            index++
          }
          return result
        },
        encodeWAV: function () {
          let sampleRate = Math.min(this.inputSampleRate, this.outputSampleRate)
          let sampleBits = Math.min(this.inputSampleBits, this.oututSampleBits)
          let bytes = this.compress()
          let dataLength = bytes.length * (sampleBits / 8)
          let buffer = new ArrayBuffer(44 + dataLength)
          let data = new DataView(buffer)
          let channelCount = 1 // 单声道
          let offset = 0
          let writeString = function (str) {
            for (let i = 0; i < str.length; i++) {
              data.setUint8(offset + i, str.charCodeAt(i))
            }
          }
          // 资源交换文件标识符
          writeString('RIFF');
          offset += 4
          // 下个地址开始到文件尾总字节数,即文件大小-8
          data.setUint32(offset, 36 + dataLength, true);
          offset += 4
          // WAV文件标志
          writeString('WAVE');
          offset += 4
          // 波形格式标志
          writeString('fmt ');
          offset += 4
          // 过滤字节,一般为 0x10 = 16
          data.setUint32(offset, 16, true);
          offset += 4
          // 格式类别 (PCM形式采样数据)
          data.setUint16(offset, 1, true);
          offset += 2
          // 通道数
          data.setUint16(offset, channelCount, true);
          offset += 2
          // 采样率,每秒样本数,表示每个通道的播放速度
          data.setUint32(offset, sampleRate, true);
          offset += 4
          // 波形数据传输率 (每秒平均字节数) 单声道×每秒数据位数×每样本数据位/8
          data.setUint32(offset, channelCount * sampleRate * (sampleBits / 8), true);
          offset += 4
          // 快数据调整数 采样一次占用字节数 单声道×每样本的数据位数/8
          data.setUint16(offset, channelCount * (sampleBits / 8), true);
          offset += 2
          // 每样本数据位数
          data.setUint16(offset, sampleBits, true);
          offset += 2
          // 数据标识符
          writeString('data');
          offset += 4
          // 采样数据总数,即数据总大小-44
          data.setUint32(offset, dataLength, true);
          offset += 4
          // 写入采样数据
          if (sampleBits === 8) {
            for (let i = 0; i < bytes.length; i++, offset++) {
              let s = Math.max(-1, Math.min(1, bytes[i]))
              let val = s < 0 ? s * 0x8000 : s * 0x7FFF
              val = parseInt(255 / (65535 / (val + 32768)))
              data.setInt8(offset, val, true)
            }
          } else {
            for (let i = 0; i < bytes.length; i++, offset += 2) {
              let s = Math.max(-1, Math.min(1, bytes[i]))
              data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true)
            }
          }
          return new Blob([data], {
            type: 'audio/wav'
          })
        }
      }
      // 开始录音
      this.start = function () {
        audioInput.connect(recorder)
        recorder.connect(context.destination)
      }
      // 停止
      this.stop = function () {
        recorder.disconnect()
      }
      // 获取音频文件
      this.getBlob = function () {
        this.stop()
        return audioData.encodeWAV()
      }
      // 回放
      this.play = function (audio) {
          audio.src = window.URL.createObjectURL(this.getBlob())
      }
      // 上传
      this.upload = function (url, callback) {
        let fd = new FormData()
        fd.append('audioData', this.getBlob())
        let xhr = new XMLHttpRequest()
        /* eslint-disable */
        if (callback) {
          xhr.upload.addEventListener('progress', function (e) {
            callback('uploading', e)
          }, false)
          xhr.addEventListener('load', function (e) {
            callback('ok', e)
          }, false)
          xhr.addEventListener('error', function (e) {
            callback('error', e)
          }, false)
          xhr.addEventListener('abort', function (e) {
            callback('cancel', e)
          }, false)
        }
        /* eslint-disable */
        xhr.open('POST', url)
        xhr.send(fd)
      }
    
   //初始化canvas绘图画布,注意,这里就是你将要在试图上输出的canvas区域
      let canvas = document.getElementById('mycanvas');
      let width = canvas.width;
      let height = canvas.height;
      let g = canvas.getContext("2d");
      g.strokeStyle = "#409EFF";
      g.translate(0.5, height / 2 + 0.5);
      // 音频采集
      recorder.onaudioprocess = function (e) {
        // 获取输入和输出的缓冲区
        audioData.input(e.inputBuffer.getChannelData(0))
        //获取输入和输出的数据缓冲区(48000,32,1)
        let input = e.inputBuffer.getChannelData(0);
        // 输出设置为空,消除混音
        let output = e.outputBuffer.getChannelData(0);
        //缓存数据
        for (let n = 0; n < input.length; n++) {
          buffer48[pos++] = input[n];
        }
        transcount++;
        if (transcount == 3) {
          //重置参数
          transcount = 0;
          pos = 0;
          //48k转16k
          let offset = 0;
          for (let k = 0; k < buffer48.length; k += 3) {
            buffer16[offset++] = buffer48[k];
          }
          //32转16位
          let dataAsInt16Array = new Int16Array(buffer16.length);
          for (let i = 0; i < input.length; i++) {
            let s = Math.max(-1, Math.min(1, buffer16[i]));
            if (s < 0) {
              dataAsInt16Array[i] = 0x8000 * s;
            } else {
              dataAsInt16Array[i] = 0x7fff * s;
            }
          }
        }
        for (let i = 0; i < input.length; i++) {
          output[i] = input[i];
        }
        //将缓冲区的数据绘制到Canvas上
        g.clearRect(-0.5, -height / 2 - 0.5, width, height);
        g.beginPath();
        for (let i = 0; i < width; i++)
        	//这里是图像输出算法,可以写自己想输出的效果
          g.lineTo(i, height / 2 * output[(output.length * i / width) | 0]);
        g.stroke();
      }
    }
    // 抛出异常
    HZRecorder.throwError = function (message) {
      alert(message)
      throw new function () {
        this.toString = function () {
          return message
        }
      }()
    }
    // 是否支持录音
    HZRecorder.canRecording = (navigator.getUserMedia != null)
    // 获取录音机
    HZRecorder.get = function (callback, config) {
      if (callback) {
        if (navigator.getUserMedia) {
          navigator.getUserMedia({
              audio: true
            } // 只启用音频
            ,
            function (stream) {
              let rec = new HZRecorder(stream, config)
              callback(rec)
            },
            function (error) {
              switch (error.code || error.name) {
                case 'PERMISSION_DENIED':
                case 'PermissionDeniedError':
                  HZRecorder.throwError('用户拒绝提供信息。')
                  break
                case 'NOT_SUPPORTED_ERROR':
                case 'NotSupportedError':
                  HZRecorder.throwError('浏览器不支持硬件设备。')
                  break
                case 'MANDATORY_UNSATISFIED_ERROR':
                case 'MandatoryUnsatisfiedError':
                  HZRecorder.throwError('无法发现指定的硬件设备。')
                  break
                default:
                  HZRecorder.throwError('无法打开麦克风。异常信息:' + (error.code || error.name))
                  break
              }
            })
        } else {
          HZRecorder.throwErr('当前浏览器不支持录音功能。');
          return
        }
      }
    }
    export default HZRecorder
  1. 在vue文件中引入
  

3.测试demo,我是做了,canvas区域的显示隐藏的,需要先点击


你可能感兴趣的:(vue)