var constraints={
video: true,
audio: true,
}
navigator.mediaDevices.getUserMedia(constraints)
.then(gotMediaStream)
.then(gotDevices)
.catch(handleError)
function gotMediaStream(stream){
videoplay.srcObject=stream;
}
音视频数据采集主要使用getUserMedia方法获取媒体数据,constraints配置采集轨道的参数,video,audio的true表示采集,false表示不采集,然后将数据流通过gotMediaStream方法添加到视频组建上。
添加官方的adapter-latest支持即可
function gotMediaStream(stream){
videoplay.srcObject=stream;
return navigator.mediaDevices.enumerateDevices();
}
function gotDevices(deviceInfos){
deviceInfos.forEach(function(deviceinfo){
var option= document.createElement(‘option’);
option.text=deviceinfo.label;
option.value=deviceinfo.deviceId;
if(deviceinfo.kind===‘audioinput’){
audioSource.appendChild(option);
}else if(deviceinfo.kind===‘audiooutput’){
audioOutput.appendChild(option);
}else if(deviceinfo.kind===‘videoinput’){
videoSource.appendChild(option);
}
})
}
在gotMediaStream方法返回return navigator.mediaDevices.enumerateDevices(),这时gotDevices方法中就可以获取音视频设备
var constraints={
video: {
width:640,
height:480,
frameRate:30,
//environment:后置摄像头,user:前置摄像头
facingMode:“user”,
deviceId: {exact:deviceId ? deviceId:undefined}
},
视频约束所有的配置都在constraints中进行配置,更多详细可以查看官方api
var constraints={
video: {
width:640,
height:480,
frameRate:30,
//environment:后置摄像头,user:前置摄像头
facingMode:“user”,
deviceId: {exact:deviceId ? deviceId:undefined}
},
audio: {
//降噪
noiseSuppression:true,
//回音消除
echoCancellation:true
},
}
音频约束和视频约束一样,在constraints中进行配置,更多详细可以查看官方api
Filter:
None blur Grayscale Invert sepia//特效
filtersSelect.onchange = function(){
videoplay.className=filtersSelect.value;
}
设置特效直接设置视频源video的className即可
Take snapshot
//从视频中获取图片
var snapshot =document.querySelector(“button#snapshot”);
var picture =document.querySelector(“canvas#picture”);
picture.width=480;
picture.height=640;
//从视频中获取图片
snapshot.οnclick=function(){
picture.className=filtersSelect.value
picture.getContext(‘2d’).drawImage(videoplay,
0,0,
picture.width,
picture.height);
}
从视频中获取图片主要使用的是canvas来绘制的
//获取屏幕约束
var divConstraints = document.querySelector(‘div#constraints’)
function gotMediaStream(stream){
var videoTrack = stream.getVideoTracks()[0];
var videoConstraints = videoTrack.getSettings();
divConstraints.textContent= JSON.stringify(videoConstraints,null,2);
videoplay.srcObject=stream;
return navigator.mediaDevices.enumerateDevices();
}
结果
{ “aspectRatio”: 1.3333333333333333, “deviceId”: “97953df027728ab0acac98c670d59f654a1e7f36f9faf70f2e0fd7a479394fe3”,
“frameRate”: 29.969999313354492, “groupId”: “1b83734781c08e3c51519598002aa1d5acb1bcd73772f5d2db4b976586af3666”,
“height”: 480, “width”: 640, “videoKind”: “color” }
获取视频约束,在gotMediaStream方法中获取视频轨道,信息都在轨道中获取
//视频录制
btnRecord.οnclick=()=>{
if(btnRecord.textContent===‘Start Record’){
startRecord();
btnRecord.textContent=‘Stop Record’
btnPlay.disabled=true;
btnDownload.disabled=true;
}else{
stopRecord();
btnRecord.textContent=‘Start Record’
btnPlay.disabled=false;
btnDownload.disabled=false;
}
}
function gotMediaStream(stream){
…
window.stream=stream;
…
return navigator.mediaDevices.enumerateDevices();
}
//开始录制
function startRecord(){
buffer=[];
var options={
mimeType: ‘video/webm;codecs=vp8’
}
if(!window.MediaRecorder.isTypeSupported(options.mimeType)){
console.error(’${options.mimeType} is not supported’);
return;
}
try {
mediaRecorder= new window.MediaRecorder(window.stream,options);
} catch (e) {
console.error(‘failed to create MediaRecorder:’,e);
return;
}
mediaRecorder.ondataavailable= handleDataAvailable;
mediaRecorder.start(10);
}
//停止录制
function stopRecord(){
mediaRecorder.stop();
}
btnPlay.οnclick=()=>{
var blob =new Blob(buffer,{type: ‘video/webm’});
recvideo.src=window.URL.createObjectURL(blob);
recvideo.srcObject=null;
recvideo.controls=true;
recvideo.play();
}
btnDownload.οnclick=()=>{
var blob =new Blob(buffer,{type:‘video/webm’});
var url = window.URL.createObjectURL(blob);
var a=document.createElement(‘a’);
a.href=url;
a.style.display=‘none’;
a.click();
}
//getDisplayMedia 捕获桌面 ,getUserMedia 捕获摄像头数据
function start(){
//捕获桌面
if (!navigator.mediaDevices||
!navigator.mediaDevices.getDisplayMedia) {
console.log(“getUserMedia is not supported!”)
return;
} else {
//捕获桌面
var constraints1={
video: true,
audio: true,
}
//getDisplayMedia 捕获桌面 ,getUserMedia 捕获摄像头数据
navigator.mediaDevices.getDisplayMedia(constraints1)
.then(gotMediaStream)
.then(gotDevices)
.catch(handleError)
}
}
采集屏幕数据其实和采集音视频信息一直,只是将getUserMedia替换成getDisplayMedia即可.
注意:要使用google浏览器打开Experimental Web Platform features
全部代码
WebRtc capture video and audio
audio input device:
audio output device:
video input device:
Filter:
None blur Grayscale Invert sepia3px);
}
.grayscale{
-webkit-filter:grayscale(1);
}
.invert{
-webkit-filter:invert(1);
}
.sepia{
-webkit-filter:sepia(1);
}
audio input device:
audio output device:
video input device:
Filter:
None blur Grayscale Invert sepia