先了解一下流程 和 流程图(chatGpt的回答)
实现 (底层代码实现, 可作为demo熟悉)
小demo
html
<template>
<div>
<video ref="localVideo" autoplay muted></video> <!-- 本地视频元素,用于显示本地视频 -->
<video ref="remoteVideo" autoplay></video> <!-- 远程视频元素,用于显示远程视频 -->
<button @click="startCall">开始视频</button> <!-- 点击按钮开始呼叫 -->
<button @click="endCall">结束视频</button> <!-- 点击按钮结束通话 -->
</div>
</template>
<script lang="ts">
import { ref, onMounted } from 'vue';
// import WebSocket from 'websocket'
export default {
setup() {
// 创建本地视频和远程视频的引用
const localVideo = ref(null); // 本地视频元素引用
const remoteVideo = ref(null); // 远程视频元素引用
// 保存本地媒体流和RTCPeerConnection对象
let localStream = null; // 本地媒体流
let peerConnection = null; // RTCPeerConnection对象
// 开始呼叫方法
const startCall = async () => {
console.log('开始');
try {
// 获取本地媒体流(视频和音频)
localStream = await navigator.mediaDevices.getUserMedia({ video: true, audio: true });
// 将本地媒体流绑定到本地视频元素
localVideo.value.srcObject = localStream;
// 创建RTCPeerConnection对象
peerConnection = new RTCPeerConnection();
// 将本地媒体流中的轨道添加到peerConnection中
localStream.getTracks().forEach(track => {
peerConnection.addTrack(track, localStream);
});
// 当远程流到达时,将其绑定到远程视频元素
peerConnection.ontrack = (event) => {
remoteVideo.value.srcObject = event.streams[0];
};
// 创建一个offer并设置为本地描述
const offer = await peerConnection.createOffer();
await peerConnection.setLocalDescription(offer);
// 发送offer给对方并等待对方的answer
// 在实际应用中,这部分需要与信令服务器交互来完成
// 示例:通过Socket 发送offer给对方
// Socket .send(JSON.stringify({ type: 'offer', offer: offer }));
} catch (error) {
console.error('Error starting call:', error);
}
};
// 结束通话方法
const endCall = () => {
// 停止本地媒体流中的所有轨道
localStream.getTracks().forEach(track => track.stop());
// 关闭peerConnection连接
peerConnection.close();
// 重置视频元素的srcObject
localVideo.value.srcObject = null;
remoteVideo.value.srcObject = null;
};
// 初始化操作,例如连接信令服务器等
onMounted(() => {
});
// 返回给模板部分需要使用的变量和方法
return {
localVideo,
remoteVideo,
startCall,
endCall
};
}
};
</script>
https://juejin.cn/post/7266417942182608955https://juejin.cn/post/7266417942182608955https://juejin.cn/post/7170767923005358094https://juejin.cn/post/7170767923005358094webrtc一对一多对多音视频通话开发第一集_哔哩哔哩_bilibili项目地址:zou-hong-run/webrtc_one2one_many2many (github.com), 视频播放量 1177、弹幕量 0、点赞数 29、投硬币枚数 20、收藏人数 79、转发人数 4, 视频作者 red润, 作者简介 学习成份复杂男 学习讨论群:811710917,相关视频:webrtc一对一多对多音视频通话教程第八集,webrtc一对一多对多音视频通话教程第十集,webrtc多人音视频通话教程第十一集,webrtc多人音视频通话教程第十三集,webrtc多人音视频通话教程第十四集(完结),webrtc一对一多对多音视频通话教程第七集,webrtc一对一多对多音视频通话教程第五集,【手把手WebRTC音视频SDK】22-基础架构-封装采集数据为MediaFrame结构,webrtc一对一多对多音视频通话教程第六集,webrtc一对一多对多音视频通话教程第四集https://www.bilibili.com/video/BV1gK411v7wy/?spm_id_from=333.788&vd_source=3e36960fd2cef2338d62a0f86944333aWebRTC 使用入门详解_webrtc教程-CSDN博客文章浏览阅读1.8k次,点赞6次,收藏15次。文档来源https://webrtc.org/getting-started/media-devices?hl=zh-cn。_webrtc教程https://blog.csdn.net/qq_47658204/article/details/130177016
实现 ( 这个是采用了 引入sip-0.13.6.min.js(已封装好的脚本实现的) )
封装的组件, 代码采用父子传参, pinia传参, 后续因为视频通话,音频通话免登录需求, 需要独立项目外, 做了 http 携带参数...
已知问题: 项目上线后, http浏览器不支持麦克风和摄像头(本地支持).
解决方案: 换 https 即可
父
javascript
const handleVideoPhone = async (type, row) => {
// 存储对象到 Pinia 中
// const myObject = { type, row }
// await myStore.setMyObject(myObject)
// console.log(myStore.videoData, '取到了-------------');
// 命名的路由
// router.push({ name: 'callVA', params: { userId: '123' } })
// message.alertError("当前设备不在线,无法进行视频通话!")
// 用户id 设备id 音视频类型type // 0 音屏,1 视频-------
const url = `https://www.XXXXXX.com/callVA?type=${type}&id=${row.aqmPkId}&deviceid=${row.deviceId}`
// const url = `http://localhost:8388/callVA?type=${type}&id=${row.aqmPkId}&deviceid=${row.deviceId}`
// console.log(url, 'url-------------------');
// window.location.href = url
// window.open(url, '_blank'); // 在新窗口中打开链接
window.open(url, 'video'); // 在新窗口中打开链接
}
子
html
<template>
<div style="width: 100%; height: 100%">
<!-- /* -webkit-background-clip: text; */ -->
<h1
style="
letter-spacing: 3px;
text-align: center;
background-image: linear-gradient(to top, #89ceeb, #00e7ee);
-webkit-text-fill-color: transparent;
background-clip: text;
"
>{{ status }}</h1
>
<div style="display: flex; align-items: center; justify-content: center">
<dv-decoration-6 v-if="status === '通话中...'" style="width: 300px; height: 30px" />
<!-- <dv-decoration-6 style="width:300px;height:30px;" /> -->
</div>
<div style="margin: 30px; text-align: center">
<!-- 开始呼叫 -->
<el-button type="primary" round @click="call()">{{
v_type == '1' ? '开启视频' : '开启音频'
}}</el-button>
<!-- 同意 -->
<!-- <button @click="acceptCall">同意视频</button> -->
<!-- 挂断 -->
<el-button type="danger" round @click="endCall">{{
v_type == '1' ? '挂断视频' : '挂断音频'
}}</el-button>
<!-- <p id="regUA_msg"> 用户代理状态 </p> -->
</div>
<div style="display: flex; justify-content: space-evenly; align-items: center; width: 100%">
<!-- 本地视频元素,用于显示本地视频 -->
<!-- <div
style="
width: 800px;
height: 450px;
padding: 10px;
margin: 10px;
border: 3px solid gray;
border-radius: 5px;
box-sizing: border-box;
"
>
<video style="width: 100%; height: 100%" ref="localVideo" autoplay muted></video>
</div> -->
<!-- 远程视频元素,用于显示远程视频 -->
<div
style="
/* width: 800px;
height: 450px; */
width: 80%;
/* height: 100%; */
padding: 10px;
margin: 10px;
border: 3px solid gray;
border-radius: 5px;
box-sizing: border-box;
"
>
<video style="width: 100%; height: 100%" ref="remoteVideo" autoplay></video>
<!-- <audio v-else-if="v_type == '0'" controls>
<source ref="remoteVideo" type="audio/mp3" />
</audio> -->
</div>
</div>
</div>
</template>
<script setup lang="ts">
// import { useVideoStore } from '../../stores/myvideo' // pinia
// import { useRouter } from 'vue-router' // router
// @/api/drone/ws
// import * as wsApi from '../../api/ws/index' // api
import { onMounted, onUnmounted, ref } from 'vue'
import axios from 'axios'
// const myStore = useVideoStore() // pinia
// const router = useRouter()
// 获取存储在 Pinia 中的对象
// let myObject = myStore.videoData
// console.log(myObject,myObject?.type, myObject?.row, 'ccccccccccccccccccc')
/**
* 发起视频通话
*/
const status = ref<string>('等待开启通话...')
const v_type = ref<string>() // 0 音屏,1 视频
const userId = ref('') //用户id
const deviceId = ref('') // 用户设备id
// const access_token = ref('') // token
const socket = ref() // new websocket
const activeMessage = ref<any>() // websockt收到的信息
let timer: any //心跳定时器
// 创建本地视频和远程视频的引用-------------------------------
// const localVideo = ref<any>(null) // 播放本地视频
const remoteVideo = ref<any>(null) // 播放远程视频
let userAgent: any // 注册 UA
let sipsession: any // SIP 如果sipsession存在,则调用它的terminate()方法来终止会话
const sip_id = ref<string>() // 用户设备 sip_id
const sip_host = ref<string>() // 本地登录 sip服务器地址 sip_host
// 组件加载
const handleVideoPhone = async (
type: string,
row: { id?: string | null; deviceId: any; aqmPkId?: any }
) => {
// console.log(type, row)
userId.value = row?.aqmPkId // 用户id 1011
deviceId.value = row?.deviceId // 用户 设备id xxxxxxxxxxxxx
v_type.value = type // 0 音屏,1 视频-------
// 获取 设备sip_id
// let res = await wsApi.getUserDeviceSipId(`${deviceId.value}`)
// console.log(res, '===========');
// if (res) {
// console.log(111);
// sip_id.value = res.sip_id
// console.log('用户id------', userId.value, res)
// }
const res = await axios.get(
`https://www.XXXXXX.com:1443/admin-api/drone/ws/getUserDeviceSipId/${deviceId.value}`
)
if (res.status === 200) {
// console.log('设备sip_id', res)
sip_id.value = res.data.data.sip_id
} else {
console.log('接口未连接')
}
websocketFun() //创建WebSocket连接
}
// --------------------------------------------------
// 3.发送音视频通话请求,成功后配置通话参数,发起通话并监听拨打结果并做相应处理
const call = async () => {
// 发送音视频通话请求通知设备
const video = {
act: 'ma_set_sip_info', // 请求标识
v_type: v_type.value, //视频 1 语音 0
user_id: userId.value // 用户id
}
status.value = '连接中...'
// console.log(video);
socket.value.send(JSON.stringify(video))
// 长链接发送报文指定设备开启推流
socket.value.send(JSON.stringify({ act: 'ma_open_rtsp', device_id: deviceId.value }))
// console.log('设备推流')
// 长链接发送报文指定设备关闭推流
// socket.value.send(JSON.stringify({ act: 'ma_stop_rtsp', device_id: deviceId.value }))
// 服务器地址
// var host = document.getElementById('sip_host').value
var host = sip_host.value
//呼叫目标,可以是设备的sip_id,或者群组通话的room_id
// var to = document.getElementById('device_sipId').value
// var to = deviceId.value
var to = String(sip_id.value)
console.log(host, to, '测试')
sipsession = await userAgent.invite(to + '@' + host, {
sessionDescriptionHandlerOptions: {
constraints: {
audio: true,
// video: true //音频通话则为false
video: v_type.value == '1' ? true : false //音频通话则为false
}
}
})
// 当呼叫被接受时触发的事件
sipsession.on('accepted', async function () {
status.value = '通话中...'
console.log('呼叫接收,开始通话')
// 我们需要检查 peer connection 来确定添加了哪个轨道
var pc = await sipsession.sessionDescriptionHandler.peerConnection
// 获取远程轨道
var remoteStream = new MediaStream()
// console.log(remoteStream, '远程轨道')
pc.getReceivers().forEach(function (receiver) {
remoteStream.addTrack(receiver.track)
})
//此处remoteVideo为一个video标签,将远程轨道绑定到它上面并播放
remoteVideo.value.srcObject = remoteStream
remoteVideo.value.play()
if (pc.getSenders()) {
// console.log('开启本地视频')
// var localStream = new MediaStream()
// // var localStream = await navigator.mediaDevices.getUserMedia({ video: true, audio: true })
// pc.getSenders().forEach(function (sender) {
// localStream.addTrack(sender.track)
// })
// // localVideo 是一个 video 标签,将本地轨道绑定到它上面并播放
// localVideo.value.srcObject = localStream
// localVideo.value.play()
}
})
// xu获取本地媒体流(视频和音频)
// xu await getLocalStream()
}
// 获取本地媒体流(视频和音频)
// const getLocalStream = async () => {
// const stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: true })
// // 将本地媒体流绑定到本地视频元素
// localVideo.value.srcObject = stream
// localVideo.value.play()
// localStream.value = stream
// return stream
// }
// 挂断通话
const endCall = async () => {
// 如果sipsession存在,则调用它的terminate()方法来终止会话
if (sipsession) {
await sipsession.terminate()
status.value = '已挂断...'
}
//
// path: 'user-device', name: 'DroneUserDevice', // 跳转回:人员设备绑定页面
console.log('挂断,关闭当前窗口')
// router.push({ name: 'DroneUserDevice', params: { userId: '123' } })
// 关闭当前窗口
// window.close();
}
//--------------------------------------------------
// websocket 连接Socket信令服务器
const websocketFun = async () => {
const res = await axios.get('https://www.XXXXXX.com:1443/admin-api/drone/ws/getCurrentAqmUser')
if (res.status !== 200) {
console.log('accessToken--------------')
return
}
socket.value = new WebSocket('wss://XXXXXX.com/wss')
socket.value.onopen = async function () {
// socket.value = Sock // websocket
// 管理员账号登录
const message = {
act: 'ma_login', // 管理员登录
// user_name: 'admin',
// access_token: (await wsApi.getCurrentAqmUserInfo()).accessToken // 接口返回token,会过期
access_token: res.data.data.accessToken // 接口返回token,会过期
}
console.log(message)
socket.value.send(JSON.stringify(message))
if (timer) clearInterval(timer) //清空上一个定时器
timer = setInterval(() => {
// 获取长链接实时、状态等心跳包
return socket.value.send(JSON.stringify({ act: 'ma_get_active_devices' }))
}, 5000)
}
socket.value.onmessage = async (event) => {
// 处理收到的消息
activeMessage.value = JSON.parse(event.data)
// console.log('收到消息:', activeMessage.value)
// 登录
if (activeMessage.value.cmd == 'ma_login') {
if (activeMessage.value.status == true) {
// console.log(activeMessage.value)
// 1. 获取sip对象注册信息
let sip_info = activeMessage.value.admin_info.sip_info
sip_host.value = sip_info.sip_host // 服务器地址sip_host 拨打电话
// sip_id.value = sip_info.sip_id // 呼叫目标
// console.log('登录成功: sip_info =', sip_info)
regUserAgent(sip_info) // 注册UA
} else {
console.log(activeMessage.value.msg)
}
}
// 心跳包()设备活跃 获取长链接实时、状态等心跳包
else if (activeMessage.value.cmd == 'ma_get_active_devices') {
if (activeMessage.value.status == true) {
// console.log(activeMessage.value.msg)
// console.log('心跳包:', activeMessage.value)
// let res = activeMessage.value.data.filter((item) => {
// // console.log(item.user_info.user_id)
// return item.user_info.user_id == userId.value
// })
// sip_id.value = res[0].user_info.sip_id // 设备 sip_id,心跳包
// console.log(res, sip_id.value, 'ccccccccccccccccccccc')
} else {
console.log(activeMessage.value.msg)
}
}
// 发送音视频通话请求通知设备
else if (activeMessage.value.cmd == 'ma_set_sip_info') {
if (activeMessage.value.status == true) {
// console.log('发送音视频通话请求通知设备', activeMessage.value.msg, activeMessage.value)
// flag.value = true // 标杆 视频电话是否接听
} else {
console.log(activeMessage.value.msg)
// flag.value = false // 标杆 视频电话是否接听
// console.log(flag.value)
endCall() // 挂断电话, 跳回页面
}
} else if (activeMessage.value.cmd == 'ma_open_rtsp') {
if (activeMessage.value.status == true) {
// console.log('发送报文指定设备推流', activeMessage.value)
} else {
activeMessage.value.status == true
}
} else if (activeMessage.value.cmd == 'ma_stop_rtsp' && activeMessage.value.status == true) {
if (activeMessage.value.status == true) {
// console.log('发送报文指定设备关闭推流', activeMessage.value)
} else {
console.log(activeMessage.value.msg)
}
}
}
socket.value.onclose = function (event) {
clearInterval(timer) // 停止心跳检测
console.log('Sock连接已关闭', event.code, event.reason)
// 连接已关闭,执行清理操作
}
socket.value.onerror = function (error) {
clearInterval(timer) // 停止心跳检测
console.error('Sock错误:', error)
// 处理Sock错误
}
}
// 2.使用sip_info中的参数注册UA对象,监听注册结果
//注册UA
const regUserAgent = async (sip_info) => {
let sip_id = sip_info.sip_id,
sip_pwd = sip_info.sip_pwd,
sip_host = sip_info.sip_host,
wss_url = sip_info.wss_url,
stun_host = sip_info.stun_host,
turn_host = sip_info.turn_host,
turn_pwd = sip_info.turn_pwd,
turn_user = sip_info.turn_user,
userAgentStatus = false
//配置参数
let config = {
uri: sip_id + '@' + sip_host, //此sip_id为管理员的sip_id
transportOptions: {
wsServers: [wss_url],
connectionTimeout: 30
},
authorizationUser: sip_id,
password: sip_pwd,
sessionDescriptionHandlerFactoryOptions: {
peerConnectionOptions: {
rtcConfiguration: {
iceServers: [
{ urls: 'stun:' + stun_host },
{
urls: 'turn:' + turn_host,
username: turn_user,
credential: turn_pwd
}
]
}
}
}
}
//创建user agent
userAgent = await new SIP.UA(config)
//注册成功监听处理
userAgent.on('registered', () => {
console.log('代理注册成功: registered ok')
userAgentStatus = true // 模拟注册成功
// let regUA_msgEl: any = document.getElementById('regUA_msg')
// regUA_msgEl.innerText = '用户代理注册成功!'
})
//注册失败监听处理
userAgent.on('registrationFailed', (response, cause) => {
console.log('代理注册失败: registrationFailed, ', response, cause)
userAgentStatus = false
// let regUA_msgEl: any = document.getElementById('regUA_msg')
// regUA_msgEl.innerText = '用户代理注册失败!'
})
userAgent.on('invite', function (session) {
var url = session.remoteIdentity.uri.toString() + '--->call'
var isaccept = confirm(url)
if (isaccept) {
//接收来电
session.accept({
sessionDescriptionHandlerOptions: {
constraints: {
audio: true,
video: true
}
}
})
sipsession = session
// 接听通话
session.on('accepted', function () {
// We need to check the peer connection to determine which track was added
var pc = session.sessionDescriptionHandler.peerConnection
// console.log(pc)
// console.log(pc.getLocalStreams())
// Gets remote tracks
var remoteStream = new MediaStream()
pc.getReceivers().forEach(function (receiver) {
remoteStream.addTrack(receiver.track)
})
remoteVideo.value.srcObject = remoteStream
remoteVideo.value.play()
if (pc.getSenders()) {
// var localStream = new MediaStream()
// pc.getSenders().forEach(function (sender) {
// localStream.addTrack(sender.track)
// })
// localVideo.value.srcObject = localStream
// localVideo.value.play()
}
})
} else {
//拒绝来电
session.reject()
}
})
}
// watch(videoVisible, (newVal, oldVal) => {
// // console.log(newVal,oldVal);
// if (newVal == false) {
// socket.value.close() // 断开websocket
// endCall() // 关闭视频
// }
// })
/** 初始化 **/
onMounted(async () => {
// getList()
// websocketFun() //创建WebSocket连接
// id.value = route.params.id
// let myObject = myStore.videoData
// console.log(myObject, myObject?.type, myObject?.type, 'ccccccccccccccccccc')
// 获取当前页面的 URL 地址
const urlParams = new URLSearchParams(window.location.search)
// 获取参数值
const type = urlParams.get('type')
const id = urlParams.get('id')
const deviceId = urlParams.get('deviceid')
const myObject = {
type,
row: {
aqmPkId: id,
deviceId
}
}
// 现在 type、id 和 deviceId 分别包含了 URL 中对应的数值
handleVideoPhone(myObject.type, myObject.row)
// const ccc = 'xxxxxxxxxxxxxx';
// const url = `https://www.XXXXXX.com:1443/drone/ws/getUserDeviceSipId/?deviceId=${ccc}`;
// const res = await axios.get(url);
// if (res.status === 200) {
// console.log('设备sip_id', res);
// sip_id.value = res.data.sip_id;
// } else {
// console.log('接口未连接');
// }
})
// 在组件离开时销毁定时器
onUnmounted(() => {
// socket.value.close() // 清空websocket
clearInterval(timer)
})
</script>
<style lang="scss" scoped></style>