JS 调用 麦克风 检测实时音量

代码较多 看第二份调用代码 里面有注释

关键步骤
1、将成功调用麦克风后的 媒体轨道保存 (第二份代码)
2、 关闭的时候便利轨道数组 逐个关闭 (第三份代码)

封装部分 直接调用

封装代码 、

/*
 *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree.
 */
export default class SoundMeter {
     
  instant: number;
  script: any;
  clip: number;
  slow: number;
  context: any;
  mic: any;
  constructor(context: any) {
     
    this.context = context;
    this.instant = 0.0;
    this.slow = 0.0;
    this.clip = 0.0;
    this.script = context.createScriptProcessor(2048, 1, 1);
    var that = this;
    this.script.onaudioprocess = function (event: {
      inputBuffer: {
      getChannelData: (arg0: number) => any; }; }) {
     
      var input = event.inputBuffer.getChannelData(0);
      var i;
      var sum = 0.0;
      var clipcount = 0;
      for (i = 0; i < input.length; ++i) {
     
        sum += input[i] * input[i];
        if (Math.abs(input[i]) > 0.99) {
     
          clipcount += 1;
        }
      }
      that.instant = Math.sqrt(sum / input.length);
      that.slow = 0.95 * that.slow + 0.05 * that.instant;
      that.clip = clipcount / input.length;
    };
  }
  connectToSource = (stream: any, callback: (arg0: null) => void) => {
     
    console.log('SoundMeter connecting');
    try {
     
      this.mic = this.context.createMediaStreamSource(stream);
      this.mic.connect(this.script);
      // necessary to make sample run, but should not be.
      this.script.connect(this.context.destination);
      if (typeof callback !== 'undefined') {
     
        callback(null);
      }
    } catch (e) {
     
      console.error(e);
      if (typeof callback !== 'undefined') {
     
        callback(e);
      }
    }
  }
  stop = () => {
     
    this.mic.disconnect();
    this.script.disconnect();
  }
}

调用

  useEffect(() => {
     
    const mic = () => {
     
      let constraints = {
     
        audio: true,
        video: false,
      };
      //调用开启麦克风
      navigator.mediaDevices
        .getUserMedia(constraints)
        .then(handleSuccess)
        .catch(handleError);
    };


    const handleSuccess = (stream: any) => {
     
      window.MSStream = stream;
      //成功调用麦克风后 媒体输入会产生一个MediaStream,里面包含了请求的媒体类型的轨道
      // 将此轨道保存  后面关闭的时候回用到
      setSource(window.MSStream);
      soundMeter.connectToSource(stream, function (e: any) {
     
        if (e) {
     
          console.log(e);
          return;
        }
        //组件卸载需要清除定时器 使用useRef()
        intervalRef.current = setInterval(function () {
     
        	//	设置展示数据 就是取得 音量
          setVoice(Number(soundMeter.instant.toFixed(2)) * 100);
        }, 200);
      });
    };

    const handleError = (error: any) => {
     
      console.log('navigator.getUserMedia error: ', error);
    };

    if (micResult.support === 'success') {
     
      mic();
    } else {
     
   		//判断音频流是存在
      soundMeter.mic && soundMeter.stop();
      clearInterval(intervalRef.current);
    }
    return () => {
     
      clearInterval(intervalRef.current);
    };
    //根据是成果否调用到麦克风硬件 触发是否监听 麦克风音量
  }, [micResult, soundMeter]);

关闭监听

调用完后直接切换组件,监听音量会一直存在 浏览器上方会展示话筒
所以需要关闭浏览器

 const init = useCallback(() => {
     
    if(! window.MSStream) return 
    // 首次进入卸载 会导致下面报错 所以需要return
    
   // 获取到上面的 媒体轨道信息数组 遍历关闭
    source && source.getTracks().forEach(function (track: {
      stop: () => void }) {
     
        track.stop();
      });
  }, [source]);

你可能感兴趣的:(WebRTC,js)