文章目录
前言
利用WebRTC和WebSocket技术,可以实现浏览器摄像头监控。这一实现过程主要依赖于WebRTC在浏览器中进行实时音视频通信的能力,以及WebSocket提供的全双工通信机制。
一、WebRTC是什么?
WebRTC(Web Real-Time Communication)是一项实时通讯技术,它允许网络应用或站点在不借助中间媒介的情况下,建立浏览器之间点对点(Peer-to-Peer)的连接,实现视频流、音频流或其他任意数据的传输。
二、Websocket是什么?
WebSocket允许服务器和客户端之间建立一个持久的连接,通过这个连接双方可以实时地进行双向数据传输。与传统的HTTP协议相比,WebSocket避免了频繁建立和断开连接的开销,从而降低了延迟,提高了数据传输的效率和实时性。
三、通过WebRTC和Websocket技术实现视频监控
本文仅使用webRTC的相关API获取音视频流通过websocket服务端转发实现流的播放。即通过webRTC的mediaDevices.getUserMedia请求用户媒体设备的摄像头和麦克风并获取音视频流,再通过websocket将实时音视频流转发给接收者并使用MediaSource实时播放,即可达到视频监控的目的。以下是个简单的示例。
1.websocket服务端
通过nodejs搭建websocekt服务器转发音视频流:
const WebSocket = require('ws');
const url = require('url');
const clientMap = new Map();
const wss = new WebSocket.Server({ port: 8081 });
wss.on('connection', (ws, req) => {
const id = url.parse(req.url, true).query.id;
if (!id) {
return;
}
// id:1是音视频流发送端客户端,2是音视频流接收端客户端
clientMap.set(id, ws);
ws.on('message', message => {
if (isJsonString(message)) {
const data = JSON.parse(message);
const { toId, content } = data;
const toClient = clientMap.get(String(toId));
if (toClient && toClient.readyState === WebSocket.OPEN) {
console.log(content);
toClient.send(content);
} else {
console.log("toClient does not exist.");
}
} else {
const toClient = clientMap.get(String(2));
if (toClient && toClient.readyState === WebSocket.OPEN) {
//console.log(message);
toClient.send(message);
} else {
//console.log("toClient2 does not exist.");
}
}
});
ws.on('close', () => {
// 当连接关闭时,从映射中移除客户端
for (const [id, conn] of clientMap) {
if (conn === ws) {
clientMap.delete(id);
console.log(`Client ${id} disconnected.`);
break;
}
}
});
});
function isJsonString(str) {
try {
JSON.parse(str);
return true;
} catch (e) {
return false;
}
}
console.log('WebSocket server is running on ws://localhost:8081');
2.获取摄像头麦克风音视频流html
通过webRTC的mediaDevices.getUserMedia获取音视频流:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Record Video</title>
<style>
.container {
width: 7em;
height: 7em;
position: relative;
}
.button {
position: absolute;
width: 100%;
height: 100%;
border-radius: 50%;
border: 4px solid #090909;
background-color: transparent;
background-image: linear-gradient(145deg, #171717, #444245);
box-sizing: border-box;
box-shadow: inset 2px 2px 0 #7d7c7e, inset -2px -2px 0px #1c1c1c;
display: flex;
align-items: center;
justify-content: center;
}
.container input {
display: none;
}
.button::before {
position: absolute;
content: "";
width: 7.25em;
height: 7.25em;
border-radius: inherit;
background-color: transparent;
background-image: linear-gradient(145deg, #262626, #606060);
z-index: -1;
box-shadow: 11px 11px 22px #141414, -11px -11px 22px #525252;
}
.button .icon {
width: 60px;
height: 60px;
display: inline-block;
}
.button .icon svg {
height: 100%;
width: 100%;
fill: #a5a5a5;
}
.container input:checked+.button {
box-shadow: inset -2px -2px 0 #5e5e5e, inset 2px 2px 0 #1c1c1c;
border: 4px solid rgb(0, 215, 0);
animation: animeBorder 0.3s linear alternate-reverse infinite;
}
.container input:checked+.button .icon svg {
fill: rgb(0, 215, 0);
animation: animeFill 0.3s linear alternate-reverse infinite;
}
@keyframes animeFill {
to {
fill: rgba(0, 194, 0, 0.954);
}
}
@keyframes animeBorder {
to {
border-color: rgba(0, 175, 0, 0.878);
}
}
</style>
</head>
<body style="text-align: center; padding: 5% 5%; background-color: #212121;">
<div>
<video id="localVideo" width="100%" height="auto" autoplay controls></video>
<textarea style="margin-top: 30px; text-align: center;" id="bufferData" disabled></textarea>
</div>
<div class="container" style="margin-top: 10%; margin-left: 43.5%;">
<input type="checkbox" id="checkbox" />
<label for="checkbox" class="button" onclick="startRecord()">
<span class="icon">
<svg data-darkreader-inline-fill="" style="--darkreader-inline-fill: #000000;" fill=""
xml:space="preserve" viewBox="0 0 30.143 30.143" xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg" id="Capa_1" version="1.1" width="200px" height="200px">
<g stroke-width="0" id="SVGRepo_bgCarrier"></g>
<g stroke-linejoin="round" stroke-linecap="round" id="SVGRepo_tracerCarrier"></g>
<g id="SVGRepo_iconCarrier">
<g>
<path data-darkreader-inline-fill=""
d="M20.034,2.357v3.824c3.482,1.798,5.869,5.427,5.869,9.619c0,5.98-4.848,10.83-10.828,10.83 c-5.982,0-10.832-4.85-10.832-10.83c0-3.844,2.012-7.215,5.029-9.136V2.689C4.245,4.918,0.731,9.945,0.731,15.801 c0,7.921,6.42,14.342,14.34,14.342c7.924,0,14.342-6.421,14.342-14.342C29.412,9.624,25.501,4.379,20.034,2.357z">
</path>
<path data-darkreader-inline-fill=""
d="M14.795,17.652c1.576,0,1.736-0.931,1.736-2.076V2.08c0-1.148-0.16-2.08-1.736-2.08 c-1.57,0-1.732,0.932-1.732,2.08v13.496C13.062,16.722,13.225,17.652,14.795,17.652z">
</path>
</g>
</g>
</svg>
</span>
</label>
</div>
<script>
function startRecord() {
// 请求用户媒体设备(摄像头和/或麦克风)
navigator.mediaDevices.getUserMedia({ audio: true, video: true })
.then(function (stream) {
const video = document.getElementById('localVideo');
video.srcObject = stream;
// 启动连接
websocketConnect(stream);
}).catch(function (error) {
// 处理错误,例如摄像头不可用、权限被拒绝等
console.error('Error accessing media devices.', error);
});
}
let socket;
const wsUrl = "ws://localhost:8081?id=1";
function websocketConnect(stream) {
socket = new WebSocket(wsUrl);
socket.binaryType = 'arraybuffer';
const bufferData = document.getElementById('bufferData');
socket.onopen = () => {
console.log('WebSocket 连接已打开');
//socket.send(JSON.stringify({toId:2, content: "womeb"}));
reconnectAttempts = 0; // 重连成功后重置尝试次数
//const mimeType = 'video/mp4; codecs="avc1.64001E, mp4a.40.5"';
const mimeType = 'video/webm; codecs="vp8, opus"';
if ('MediaSource' in window && MediaSource.isTypeSupported(mimeType)) {
const mediaRecorder = new MediaRecorder(stream, { mimeType });
mediaRecorder.ondataavailable = event => {
if (socket.readyState === 1) {
if (event.data.size > 0) {
bufferData.textContent = event.data.size;
// console.log('event.data:', event.data);
socket.send(event.data);
}
} else {
// 重连
handleReconnect();
}
};
mediaRecorder.start(1000 / 5);
} else {
alert('Unsupported MIME type or codec: ', mimeType);
}
};
socket.addEventListener('message', function (e) {
console.log('收到消息:', e.data);
if (e.data == "restart") {
startRecord();
}
}, false);
socket.addEventListener('close', function (e) {
console.log('WebSocket 连接已关闭');
handleReconnect(); // 连接关闭时处理重连
}, false);
socket.addEventListener('error', function (e) {
console.error('WebSocket 错误');
handleReconnect(); // 错误发生时处理重连
}, false);
}
function handleReconnect() {
console.log('尝试重连了');
const now = new Date();
if (now.getSeconds() === 0 && now.getMilliseconds() < 100) { // 加上毫秒数检查以减少误差
console.log("当前是分钟的整点: " + now.toLocaleString());
socket = new WebSocket(wsUrl);
}
}
// 监听窗口关闭事件,当窗口关闭时,主动去关闭websocket连接,防止连接还没断开就关闭窗口,这样服务端会抛异常。
window.onbeforeunload = function () {
socket.close();
}
</script>
</body>
</html>
3.播放摄像头麦克风音视频流html
通过webRTC的MediaSource实时播放音视频流:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Play Video</title>
<style>
.container {
width: 7em;
height: 7em;
position: relative;
}
.button {
position: absolute;
width: 100%;
height: 100%;
border-radius: 50%;
border: 4px solid #090909;
background-color: transparent;
background-image: linear-gradient(145deg, #171717, #444245);
box-sizing: border-box;
box-shadow: inset 2px 2px 0 #7d7c7e, inset -2px -2px 0px #1c1c1c;
display: flex;
align-items: center;
justify-content: center;
}
.container input {
display: none;
}
.button::before {
position: absolute;
content: "";
width: 7.25em;
height: 7.25em;
border-radius: inherit;
background-color: transparent;
background-image: linear-gradient(145deg, #262626, #606060);
z-index: -1;
box-shadow: 11px 11px 22px #141414, -11px -11px 22px #525252;
}
.button .icon {
width: 60px;
height: 60px;
display: inline-block;
}
.button .icon svg {
height: 100%;
width: 100%;
fill: #a5a5a5;
}
.container input:checked+.button {
box-shadow: inset -2px -2px 0 #5e5e5e, inset 2px 2px 0 #1c1c1c;
border: 4px solid rgb(0, 215, 0);
animation: animeBorder 0.3s linear alternate-reverse infinite;
}
.container input:checked+.button .icon svg {
fill: rgb(0, 215, 0);
animation: animeFill 0.3s linear alternate-reverse infinite;
}
@keyframes animeFill {
to {
fill: rgba(0, 194, 0, 0.954);
}
}
@keyframes animeBorder {
to {
border-color: rgba(0, 175, 0, 0.878);
}
}
</style>
</head>
<body style="text-align: center; padding: 5% 5%; background-color: #212121;">
<video id="remoteVideo" width="100%" height="auto" autoplay controls></video>
<div class="container" style="margin-top: 10%; margin-left: 43.5%;">
<input type="checkbox" id="checkbox" />
<label for="checkbox" class="button" onclick="startPlay()">
<span class="icon">
<svg data-darkreader-inline-fill="" style="--darkreader-inline-fill: #000000;" fill=""
xml:space="preserve" viewBox="0 0 30.143 30.143" xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg" id="Capa_1" version="1.1" width="200px" height="200px">
<g stroke-width="0" id="SVGRepo_bgCarrier"></g>
<g stroke-linejoin="round" stroke-linecap="round" id="SVGRepo_tracerCarrier"></g>
<g id="SVGRepo_iconCarrier">
<g>
<path data-darkreader-inline-fill=""
d="M20.034,2.357v3.824c3.482,1.798,5.869,5.427,5.869,9.619c0,5.98-4.848,10.83-10.828,10.83 c-5.982,0-10.832-4.85-10.832-10.83c0-3.844,2.012-7.215,5.029-9.136V2.689C4.245,4.918,0.731,9.945,0.731,15.801 c0,7.921,6.42,14.342,14.34,14.342c7.924,0,14.342-6.421,14.342-14.342C29.412,9.624,25.501,4.379,20.034,2.357z">
</path>
<path data-darkreader-inline-fill=""
d="M14.795,17.652c1.576,0,1.736-0.931,1.736-2.076V2.08c0-1.148-0.16-2.08-1.736-2.08 c-1.57,0-1.732,0.932-1.732,2.08v13.496C13.062,16.722,13.225,17.652,14.795,17.652z">
</path>
</g>
</g>
</svg>
</span>
</label>
</div>
<script>
let websocket;
const queue = [];
let buffer;
const wsUrl = "ws://localhost:8081?id=2";
function startPlay() {
const websocket = new WebSocket(wsUrl);
websocket.binaryType = 'arraybuffer';
setTimeout(play, 2000);
websocket.addEventListener('message', function (e) {
if (typeof e.data !== 'string') {
if (buffer.updating || queue.length > 0) {
queue.push(e.data);
} else {
console.log('data', e.data);
buffer.appendBuffer(e.data);
}
}
}, false);
websocket.addEventListener('open', function (e) {
console.log("send message to record restart")
websocket.send(JSON.stringify({ toId: 1, content: "restart" }));
}, false);
}
function play() {
const mediaSource = new MediaSource();
const video = document.getElementById('remoteVideo');
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
//video.play();
//const mimeType = 'video/mp4; codecs="avc1.64001E, mp4a.40.5"';
const mimeType = 'video/webm; codecs="vp8, opus"';
if ('MediaSource' in window && MediaSource.isTypeSupported(mimeType)) {
buffer = mediaSource.addSourceBuffer(mimeType);
buffer.addEventListener('updatestart', function (e) {
console.log('updatestart: ' + mediaSource.readyState);
});
buffer.addEventListener('update', function (e) {
console.log('update: ' + mediaSource.readyState);
});
buffer.addEventListener('updateend', function (e) {
console.log('updateend: ' + mediaSource.readyState);
});
buffer.addEventListener('error', function (e) {
console.log('error: ' + mediaSource.readyState);
});
buffer.addEventListener('abort', function (e) {
console.log('abort: ' + mediaSource.readyState);
});
buffer.addEventListener('update', function () { // Note: Have tried 'updateend'
if (queue.length > 0 && !buffer.updating) {
buffer.appendBuffer(queue.shift());
}
});
} else {
console.error('Mimetype not supported', mimeType);
alert('Unsupported MIME type or codec: ', mimeType);
}
}, false);
mediaSource.addEventListener('sourceended', function (e) {
console.log('sourceended: ' + mediaSource.readyState);
});
mediaSource.addEventListener('sourceclose', function (e) {
console.log('sourceclose: ' + mediaSource.readyState);
});
mediaSource.addEventListener('error', function (e) {
console.log('error: ' + mediaSource.readyState);
});
}
</script>
</body>
</html>
总结
通过WebRTC技术,前端浏览器可以捕获来自摄像头的视频流。WebRTC允许在Web浏览器中进行实时通信,包括视频和音频,它已被广泛用于构建视频会议、流媒体和实时监控系统。在前端,可以使用JavaScript代码创建一个WebRTC连接,并捕获摄像头的视频流。这个视频流随后通过WebRTC的数据通道被发送到后端服务器。
后端服务器接收到视频流后,可以进行实时处理和预览。同时,WebSocket技术在这里起到了关键作用。WebSocket实现了浏览器与服务器之间的全双工通信,可以实时地将视频流数据从服务器推送到客户端,或者将客户端的控制指令发送到服务器。这种通信方式减少了频繁建立和销毁连接带来的开销,提高了数据传输的效率和实时性。
在实际应用中,后端服务器可以运行一个WebSocket服务,该服务监听来自前端浏览器的连接请求。一旦连接建立,服务器就可以开始接收来自WebRTC的视频流,并通过WebSocket将这些视频流数据实时推送到客户端浏览器进行预览。同时,客户端也可以通过WebSocket向服务器发送控制指令,如调整摄像头角度、切换监控画面等。
总的来说,利用WebRTC和WebSocket技术,可以实现一个高效、实时的浏览器摄像头监控系统。这一系统不仅提高了监控系统的易用性和灵活性,还为我们提供了一种新的视频通信解决方案。在实际应用中,需要关注性能优化、安全性和跨浏览器兼容性等方面的问题,以获得更好的用户体验和实际应用效果。
标签:function,const,log,2px,Websocket,音视频,WebSocket,console,WebRTC From: https://blog.csdn.net/qq_36569837/article/details/144661865