vue2纯前端对接海康威视摄像头实现实时视频预览

vue2纯前端对接海康威视摄像头实现实时视频预览

vue2纯前端对接海康威视摄像头实现实时视频预览

实现实时对海康威视摄像头进行取流的大致思路:摄像头做端口映射(安装摄像头的师傅一般都会),做了映射之后就可以通过IP+端口的形式在浏览器中进行摄像头的实时浏览,这种是海康威视自己就带有的方式,不能嵌入到自研的系统,视频流画面实现嵌入自研系统,需要在满足以上的前提下,使用webrtc-streamer进行推流,然后在vue2中进行接流,渲染到页面中

一、环境准备

需要具备的前提条件,设备可在网页端进行浏览,且做以下设置

登录进行设置


在这里插入图片描述
设置视频编码格式


设置RTSP协议端口


至此摄像头设置已完成,接下来需要获取摄像头设备所在IP的rtsp链接,海康摄像头的rtsp链接获取见官方说明:海康威视摄像头取流说明
可以使用VLC取流软件进行验证rtsp链接是否是通的VLC官方下载地址

VLC官网


打开网络串流


输入取流地址


在这里插入图片描述


至此准备工作就完成了,接下来就是敲代码进行集成阶段了

二、代码集成

1.1 准备webrtcstreamer.js,粘贴即用,不用做任何修改

var WebRtcStreamer =(function(){/** * Interface with WebRTC-streamer API * @constructor * @param {string} videoElement - id of the video element tag * @param {string} srvurl - url of webrtc-streamer (default is current location) */varWebRtcStreamer=functionWebRtcStreamer(videoElement, srvurl){if(typeof videoElement ==="string"){this.videoElement = document.getElementById(videoElement);}else{this.videoElement = videoElement;}this.srvurl = srvurl || location.protocol+"//"+window.location.hostname+":"+window.location.port;this.pc =null;this.mediaConstraints ={ offerToReceiveAudio:true, offerToReceiveVideo:true};this.iceServers =null;this.earlyCandidates =[];}WebRtcStreamer.prototype._handleHttpErrors=function(response){if(!response.ok){throwError(response.statusText);}return response;}/** * Connect a WebRTC Stream to videoElement * @param {string} videourl - id of WebRTC video stream * @param {string} audiourl - id of WebRTC audio stream * @param {string} options - options of WebRTC call * @param {string} stream - local stream to send * @param {string} prefmime - prefered mime */WebRtcStreamer.prototype.connect=function(videourl, audiourl, options, localstream, prefmime){this.disconnect();// getIceServers is not already receivedif(!this.iceServers){ console.log("Get IceServers");fetch(this.srvurl +"/api/getIceServers").then(this._handleHttpErrors).then((response)=>(response.json())).then((response)=>this.onReceiveGetIceServers(response, videourl, audiourl, options, localstream, prefmime)).catch((error)=>this.onError("getIceServers "+ error ))}else{this.onReceiveGetIceServers(this.iceServers, videourl, audiourl, options, localstream, prefmime);}}/** * Disconnect a WebRTC Stream and clear videoElement source */WebRtcStreamer.prototype.disconnect=function(){if(this.videoElement?.srcObject){this.videoElement.srcObject.getTracks().forEach(track=>{ track.stop()this.videoElement.srcObject.removeTrack(track);});}if(this.pc){fetch(this.srvurl +"/api/hangup?peerid="+this.pc.peerid).then(this._handleHttpErrors).catch((error)=>this.onError("hangup "+ error ))try{this.pc.close();}catch(e){ console.log("Failure close peer connection:"+ e);}this.pc =null;}}WebRtcStreamer.prototype.filterPreferredCodec=function(sdp, prefmime){const lines = sdp.split('\n');const[prefkind, prefcodec]= prefmime.toLowerCase().split('/');let currentMediaType =null;let sdpSections =[];let currentSection =[];// Group lines into sections lines.forEach(line=>{if(line.startsWith('m=')){if(currentSection.length){ sdpSections.push(currentSection);} currentSection =[line];}else{ currentSection.push(line);}}); sdpSections.push(currentSection);// Process each sectionconst processedSections = sdpSections.map(section=>{const firstLine = section[0];if(!firstLine.startsWith('m='+ prefkind)){return section.join('\n');}// Get payload types for preferred codecconst rtpLines = section.filter(line=> line.startsWith('a=rtpmap:'));const preferredPayloads = rtpLines .filter(line=> line.toLowerCase().includes(prefcodec)).map(line=> line.split(':')[1].split(' ')[0]);if(preferredPayloads.length ===0){return section.join('\n');}// Modify m= line to only include preferred payloadsconst mLine = firstLine.split(' ');const newMLine =[...mLine.slice(0,3),...preferredPayloads].join(' ');// Filter related attributesconst filteredLines = section.filter(line=>{if(line === firstLine)returnfalse;if(line.startsWith('a=rtpmap:')){return preferredPayloads.some(payload=> line.startsWith(`a=rtpmap:${payload}`));}if(line.startsWith('a=fmtp:')|| line.startsWith('a=rtcp-fb:')){return preferredPayloads.some(payload=> line.startsWith(`a=${line.split(':')[0].split('a=')[1]}:${payload}`));}returntrue;});return[newMLine,...filteredLines].join('\n');});return processedSections.join('\n');}/* * GetIceServers callback */WebRtcStreamer.prototype.onReceiveGetIceServers=function(iceServers, videourl, audiourl, options, stream, prefmime){this.iceServers = iceServers;this.pcConfig = iceServers ||{"iceServers":[]};try{this.createPeerConnection();let callurl =this.srvurl +"/api/call?peerid="+this.pc.peerid +"&url="+encodeURIComponent(videourl);if(audiourl){ callurl +="&audiourl="+encodeURIComponent(audiourl);}if(options){ callurl +="&options="+encodeURIComponent(options);}if(stream){this.pc.addStream(stream);}// clear early candidatesthis.earlyCandidates.length =0;// create Offerthis.pc.createOffer(this.mediaConstraints).then((sessionDescription)=>{ console.log("Create offer:"+JSON.stringify(sessionDescription)); console.log(`video codecs:${Array.from(newSet(RTCRtpReceiver.getCapabilities("video")?.codecs?.map(codec=> codec.mimeType)))}`) console.log(`audio codecs:${Array.from(newSet(RTCRtpReceiver.getCapabilities("audio")?.codecs?.map(codec=> codec.mimeType)))}`)if(prefmime !=undefined){//set prefered codeclet[prefkind]= prefmime.split('/');if(prefkind !="video"&& prefkind !="audio"){ prefkind ="video"; prefmime = prefkind +"/"+ prefmime;} console.log("sdp:"+ sessionDescription.sdp); sessionDescription.sdp =this.filterPreferredCodec(sessionDescription.sdp, prefmime); console.log("sdp:"+ sessionDescription.sdp);}this.pc.setLocalDescription(sessionDescription).then(()=>{fetch(callurl,{ method:"POST", body:JSON.stringify(sessionDescription)}).then(this._handleHttpErrors).then((response)=>(response.json())).catch((error)=>this.onError("call "+ error )).then((response)=>this.onReceiveCall(response)).catch((error)=>this.onError("call "+ error ))},(error)=>{ console.log("setLocalDescription error:"+JSON.stringify(error));});},(error)=>{alert("Create offer error:"+JSON.stringify(error));});}catch(e){this.disconnect();alert("connect error: "+ e);}}WebRtcStreamer.prototype.getIceCandidate=function(){fetch(this.srvurl +"/api/getIceCandidate?peerid="+this.pc.peerid).then(this._handleHttpErrors).then((response)=>(response.json())).then((response)=>this.onReceiveCandidate(response)).catch((error)=>this.onError("getIceCandidate "+ error ))}/* * create RTCPeerConnection */WebRtcStreamer.prototype.createPeerConnection=function(){ console.log("createPeerConnection config: "+JSON.stringify(this.pcConfig));this.pc =newRTCPeerConnection(this.pcConfig);let pc =this.pc; pc.peerid = Math.random(); pc.onicecandidate=(evt)=>this.onIceCandidate(evt); pc.onaddstream=(evt)=>this.onAddStream(evt); pc.oniceconnectionstatechange=(evt)=>{ console.log("oniceconnectionstatechange state: "+ pc.iceConnectionState);if(this.videoElement){if(pc.iceConnectionState ==="connected"){this.videoElement.style.opacity ="1.0";}elseif(pc.iceConnectionState ==="disconnected"){this.videoElement.style.opacity ="0.25";}elseif((pc.iceConnectionState ==="failed")||(pc.iceConnectionState ==="closed")){this.videoElement.style.opacity ="0.5";}elseif(pc.iceConnectionState ==="new"){this.getIceCandidate();}}} pc.ondatachannel=function(evt){ console.log("remote datachannel created:"+JSON.stringify(evt)); evt.channel.onopen=function(){ console.log("remote datachannel open");this.send("remote channel openned");} evt.channel.onmessage=function(event){ console.log("remote datachannel recv:"+JSON.stringify(event.data));}}try{let dataChannel = pc.createDataChannel("ClientDataChannel"); dataChannel.onopen=function(){ console.log("local datachannel open");this.send("local channel openned");} dataChannel.onmessage=function(evt){ console.log("local datachannel recv:"+JSON.stringify(evt.data));}}catch(e){ console.log("Cannor create datachannel error: "+ e);} console.log("Created RTCPeerConnnection with config: "+JSON.stringify(this.pcConfig));return pc;}/* * RTCPeerConnection IceCandidate callback */WebRtcStreamer.prototype.onIceCandidate=function(event){if(event.candidate){if(this.pc.currentRemoteDescription){this.addIceCandidate(this.pc.peerid, event.candidate);}else{this.earlyCandidates.push(event.candidate);}}else{ console.log("End of candidates.");}}WebRtcStreamer.prototype.addIceCandidate=function(peerid, candidate){fetch(this.srvurl +"/api/addIceCandidate?peerid="+peerid,{ method:"POST", body:JSON.stringify(candidate)}).then(this._handleHttpErrors).then((response)=>(response.json())).then((response)=>{console.log("addIceCandidate ok:"+ response)}).catch((error)=>this.onError("addIceCandidate "+ error ))}/* * RTCPeerConnection AddTrack callback */WebRtcStreamer.prototype.onAddStream=function(event){ console.log("Remote track added:"+JSON.stringify(event));this.videoElement.srcObject = event.stream;let promise =this.videoElement.play();if(promise !==undefined){ promise.catch((error)=>{ console.warn("error:"+error);this.videoElement.setAttribute("controls",true);});}}/* * AJAX /call callback */WebRtcStreamer.prototype.onReceiveCall=function(dataJson){ console.log("offer: "+JSON.stringify(dataJson));let descr =newRTCSessionDescription(dataJson);this.pc.setRemoteDescription(descr).then(()=>{ console.log("setRemoteDescription ok");while(this.earlyCandidates.length){let candidate =this.earlyCandidates.shift();this.addIceCandidate(this.pc.peerid, candidate);}this.getIceCandidate()},(error)=>{ console.log("setRemoteDescription error:"+JSON.stringify(error));});}/* * AJAX /getIceCandidate callback */WebRtcStreamer.prototype.onReceiveCandidate=function(dataJson){ console.log("candidate: "+JSON.stringify(dataJson));if(dataJson){for(let i=0; i<dataJson.length; i++){let candidate =newRTCIceCandidate(dataJson[i]); console.log("Adding ICE candidate :"+JSON.stringify(candidate));this.pc.addIceCandidate(candidate).then(()=>{ console.log("addIceCandidate OK");},(error)=>{ console.log("addIceCandidate error:"+JSON.stringify(error));});}this.pc.addIceCandidate();}}/* * AJAX callback for Error */WebRtcStreamer.prototype.onError=function(status){ console.log("onError:"+ status);}return WebRtcStreamer;})();if(typeof window !=='undefined'&&typeof window.document !=='undefined'){ window.WebRtcStreamer = WebRtcStreamer;}if(typeof module !=='undefined'&&typeof module.exports !=='undefined'){ module.exports = WebRtcStreamer;}

1.2 封装视频组件,在需要视频的地方引入此封装的视频组件即可,也是粘贴即用,注意其中import的webrtcstreamer.js的地址替换为自己的

<template><div class="rtsp_video_container"><div v-if="videoUrls.length === 1"class="rtsp_video single-video"><video :id="'video_0'" controls autoPlay muted width="100%" height="100%" style="object-fit: fill"></video></div><div v-if="videoUrls.length >1" v-for="(videoUrl, index) in videoUrls":key="index"class="rtsp_video"><video :id="'video_' + index" controls autoPlay muted width="100%" height="100%" style="object-fit: fill"></video></div></div></template><script>import WebRtcStreamer from'../untils/webrtcstreamer';// 注意此处替换为webrtcstreamer.js所在的路径exportdefault{ name:'RtspVideo', props:{ videoUrls:{ type: Array, required:true,}},data(){return{ cameraIp:'localhost:8000',// 这里的IP固定为本地,不要修改,是用来与本地的webrtc-streamer插件进行通讯的,见文章1.3 webRtcServers:[],// 存储 WebRtcStreamer 实例};},mounted(){this.initializeStreams();}, watch:{// 监听 videoUrls 或 cameraIp 的变化,重新初始化流 videoUrls:{handler(newUrls, oldUrls){if(newUrls.length !== oldUrls.length ||!this.isSameArray(newUrls, oldUrls)){this.resetStreams();this.initializeStreams();}}, deep:true,},cameraIp(newIp, oldIp){if(newIp !== oldIp){this.resetStreams();this.initializeStreams();}}}, methods:{// 初始化视频流连接initializeStreams(){if(this.webRtcServers.length ===0){this.videoUrls.forEach((videoUrl, index)=>{const videoElement = document.getElementById(`video_${index}`);const webRtcServer =newWebRtcStreamer(videoElement,`http://${this.cameraIp}`);this.webRtcServers.push(webRtcServer); webRtcServer.connect(videoUrl,null,'rtptransport=tcp',null);});}},// 检查新旧数组是否相同isSameArray(arr1, arr2){return arr1.length === arr2.length && arr1.every((value, index)=> value === arr2[index]);},// 清除 WebRtcStreamer 实例resetStreams(){this.webRtcServers.forEach((webRtcServer)=>{if(webRtcServer){ webRtcServer.disconnect();// 断开连接}});this.webRtcServers =[];// 清空实例},},beforeDestroy(){this.resetStreams();// 页面销毁时清理 WebRtcStreamer 实例,避免内存泄漏},};</script><style lang="less" scoped>.rtsp_video_container { display: flex; flex-wrap: wrap; gap:10px; justify-content: space-between;}.rtsp_video { flex:1148%; height:225px; max-width:48%; background: #000; border-radius:8px; overflow: hidden;}.single-video { flex:11100%; height:100%; max-width:100%; background: #000;} video { width:100%; height:100%; object-fit: cover;}</style>

父组件中进行此视频组件的引用示例:

<template><div style="margin-top: 10px;width: 100%;height: 100%;"><rtsp-video :videoUrls="selectedUrls":key="selectedUrls.join(',')"></rtsp-video></div></template>import RtspVideo from"../views/video"; components:{ RtspVideo }data(){return{ selectedUrls:['rtsp://user:[email protected]:xxxx/Streaming/Channels/101','rtsp://user:[email protected]:xxxx/Streaming/Channels/201'],}}

1.3 以上完成之后,需要观看视频的本地PC设备启动webrtc-streamer插件

webrtc-streamer插件下载webrtc-streamer

下载图中的版本,标题1.1中对应的js版本就是此版本


下载解压完成之后,其中的exe和js是配套的,插件脚本在webrtc-streamer-v0.8.13-dirty-Windows-AMD64-Release\bin目录下,对应的webrtcstreamer.js在webrtc-streamer-v0.8.13-dirty-Windows-AMD64-Release\share\webrtc-streamer\html目录下,只需要webrtc-streamer.exe和webrtcstreamer.js即可,也可以直接用博主在上面提供的,切记一定要配套,不然可能画面取不出。

实现效果图见下:

在这里插入图片描述

至此海康威视实时视频预览功能已完成,写作不易,如果对您有帮助,恳请保留一个赞。

补充:
如果启动webrtc-streamer.exe导致客户端卡顿 或者 需要更改webrtc-streamer.exe的端口号,可参考下图

在这里插入图片描述
在这里插入图片描述


视频监控观看插件.bat:
@echo off
cd C:
start webrtc-streamer.exe -o -H 0.0.0.0:8124
exit

Read more

前端如何渲染 Markdown 格式:从基础到实战全指南

在前端开发中,我们常需要将 Markdown 文本(如接口文档、博客内容、用户评论)渲染成美观的 HTML 页面。不同于纯文本展示,Markdown 渲染需要借助专门的库解析语法规则,再结合样式实现可视化。本文将聚焦 “如何在前端页面中渲染 Markdown 内容”,从主流库选型到实战案例,带你快速掌握核心方法。 一、前端渲染 Markdown 的核心逻辑 Markdown 本质是 “轻量级标记语言”,无法直接被浏览器识别。前端渲染的核心流程是: 1. 解析:通过库将 Markdown 文本(如 # 标题)转换为 HTML 字符串(如 <h1>标题</h1>); 2. 渲染:将解析后的

Rust与WebAssembly深度实战——将高性能Rust代码运行在浏览器与Node.js

Rust与WebAssembly深度实战——将高性能Rust代码运行在浏览器与Node.js

Rust与WebAssembly深度实战——将高性能Rust代码运行在浏览器与Node.js 一、学习目标与重点 1.1 学习目标 1. 理解WebAssembly基础:深入掌握WebAssembly(Wasm/Wasmtime)的核心定义、运行机制、与JavaScript的性能对比 2. 掌握Rust到Wasm的编译:熟练使用wasm-pack、cargo-web等工具链,完成Rust代码到Wasm模块的编译、打包、优化 3. 精通Rust与JavaScript交互:实现双向交互(Rust调用JS函数、JS调用Rust函数),处理复杂数据类型(数组、对象、字符串),管理内存(Wasm线性内存的分配与释放) 4. 开发真实Wasm应用:编写浏览器端高性能任务(Canvas图像滤镜、WebGL计算辅助)、Node.js端计算密集型任务(图像处理、加密解密、数据压缩) 5. 优化Wasm模块:使用wasm-opt工具优化Wasm体积,学习代码分割、懒加载、模块缓存

前端微前端:别让你的应用变成巨石应用

前端微前端:别让你的应用变成巨石应用 毒舌时刻 这应用做得跟巨石似的,想改个功能都得动全身。 各位前端同行,咱们今天聊聊前端微前端。别告诉我你还在维护一个巨大的单体应用,那感觉就像在没有分区的大房子里生活——能住,但乱得要命。 为什么你需要微前端 最近看到一个项目,代码量超过 100 万行,构建时间超过 10 分钟,团队协作困难。我就想问:你是在做应用还是在做代码仓库? 反面教材 // 反面教材:单体应用 // App.jsx import React from 'react'; import Header from './components/Header'; import Sidebar from './components/Sidebar'; import Dashboard from

Rust WebAssembly与Three.js结合的3D数据可视化实战:高性能粒子系统

Rust WebAssembly与Three.js结合的3D数据可视化实战:高性能粒子系统

Rust WebAssembly与Three.js结合的3D数据可视化实战:高性能粒子系统 一、引言 💡3D数据可视化是现代Web应用的高级场景之一,广泛应用于数据分析、科学计算、游戏开发、虚拟仿真等领域。传统的JavaScript+WebGL/Three.js方案在处理大量数据(如百万级粒子)时,性能往往难以满足要求。Rust WebAssembly的高性能和内存安全特性,使得它非常适合优化3D数据可视化的核心算法,提高应用的响应速度和渲染帧率。 本章将深入探讨Rust WebAssembly与Three.js结合的3D数据可视化开发,介绍WebGL/Three.js的基本概念,讲解Rust Wasm与WebGL的交互方式,重点实现一个高性能粒子系统,支持粒子的创建、更新、删除,以及各种动画效果。最后,本章还将介绍如何优化粒子系统的性能,如何打包和部署项目。 二、WebGL与Three.js基础 2.1 WebGL概述 WebGL是一种基于OpenGL ES的Web图形库,允许开发者在Web浏览器中使用GPU加速渲染3D图形。WebGL的核心是着色器语言(GLSL)