uniapp+unipush2.0+WebRTC实现h5一对一视频通话

sendvideocall.vue,发起视频通话邀请。

javascript 复制代码
<template>
	<view class="content">
		<image :src="getFile(friendInfo.headImage)" style="height: 100vh;" mode="heightFix"></image>
		<view class="ftbg">
			<view style="margin-top: 280rpx;">
				<u-avatar :src="getFile(friendInfo.headImage)" size="100" style="margin: 10rpx;"
					mode="square"></u-avatar>
			</view>
			<view style="margin-top: 10rpx;color: white;font-size: 28rpx;">
				{{ friendInfo.nickName }}
			</view>
			<view style="margin-top: 130rpx;color: white;">
				正在等待对方回应...
			</view>
		</view>

		<view
			style="position: fixed;bottom: 100rpx;left: 0rpx;width: 100%;display: flex;justify-content: center;align-items: center;">
			<image @click="cancel" src="https://www.minglina.com/oa-boot/api/v1/common/static/icon_guad.png"
				style="width: 120rpx;height: 120rpx;border-radius: 60rpx;" mode=""></image>
		</view>
	</view>
</template>

<script>
	import {
		mapGetters,
	} from 'vuex';
	import {
		apiWebRtcCall,
		apiWebRtcCancel,
		apiWebRtcFailed
	} from '@/common/http.api.js'
	import {
		getTerminalType
	} from '@/common/utils/utils.js'

	export default {
		data() {
			return {
				id: '',
				mid: '',
				friend: "",
				currentCallId: '',
				userId: "",
				mode: "",
				terminalType: "",
				timer: null
			}
		},
		computed: {
			...mapGetters(['findFriend', 'findChatIdx']),
			friendInfo() {
				return this.findFriend(this.currentCallId);
			},
		},
		onBackPress(options) {
			if (options.from && options.from == 'backbutton') {
				return true
			}
			return false
		},
		onUnload() {
			uni.$off('handleReject')
			if (this.timer) {
				clearTimeout(this.timer)
			}
		},
		onLoad({
			mode,
			currentCallId
		}) {
			this.mode = mode
			this.currentCallId = currentCallId
			this.userId = this.$store.state.userInfo.id
			this.terminalType = getTerminalType();

			// 发起通话邀请
			this.sendCallInvitation();

			uni.$on("handleReject", () => {
				uni.showToast({
					title: "对方拒绝通话",
					icon: "none"
				})
				uni.navigateBack()
			})
		},
		methods: {
			async sendCallInvitation() {
				console.log('发送通话邀请给:', this.currentCallId);
				apiWebRtcCall({
					toUserId: this.currentCallId,
					mode: this.mode,
					terminal: this.terminalType
				}, {
					custom: {
						catch: true
					}
				}).then(() => {
					this.timer = setTimeout(async () => {
						await apiWebRtcFailed({
							toUserId: this.currentCallId,
							reason: "对方无应答",
							terminal: this.terminalType
						})
						uni.showToast({
							title: "对方无应答",
							icon: "none"
						})
						setTimeout(async () => {
							uni.navigateBack()
						}, 2000)
					}, 30000)
				}).catch(() => {
					setTimeout(() => {
						uni.navigateBack()
					}, 2000)
				})
			},

			cancel: async function() {
				await apiWebRtcCancel({
					toUserId: this.currentCallId,
					terminal: this.terminalType
				})
				uni.navigateBack()
			},
		}
	}
</script>

<style scoped>
	.content {
		width: 100%;
		height: 100vh;
		display: flex;
		flex-direction: column;
		align-items: center;
		justify-content: center;
		background: linear-gradient(to bottom, #666, #222);
	}

	.ftbg {
		width: 100%;
		height: 100vh;
		top: 0rpx;
		left: 0rpx;
		background: #00000099;
		backdrop-filter: blur(20px);
		position: fixed;
		display: flex;
		flex-direction: column;
		align-items: center;
	}
</style>

videocallrev.vue,接听通话页面。

javascript 复制代码
<template>
	<view class="content">
		<image :src="getFile(friendInfo.headImage)" style="height: 100vh;" mode="heightFix"></image>
		<view class="ftbg">
			<view style="margin-top: 280rpx;">
				<u-avatar :src="getFile(friendInfo.headImage)" size="100" style="margin: 10rpx;"
					mode="square"></u-avatar>
			</view>
			<view style="margin-top: 10rpx;color: white;font-size: 28rpx;">
				{{friendInfo.nickName}}
			</view>
			<view style="margin-top: 130rpx;color: white;">
				邀请你{{mode =='video'?'视频通话':(mode =='voice'?'语音通话':'')}}
			</view>
		</view>

		<view
			style="position: fixed;bottom: 100rpx;left: 0rpx;width: 100%;display: flex;justify-content: center;align-items: center;">
			<image @click="reject" src="https://www.minglina.com/oa-boot/api/v1/common/static/icon_guad.png"
				style="width: 120rpx;height: 120rpx;border-radius: 60rpx;" mode=""></image>
			<image @click="agree" src="https://www.minglina.com/oa-boot/api/v1/common/static/icon_hup.png"
				style="width: 120rpx;height: 120rpx;border-radius: 60rpx;margin-left: 240rpx;" mode=""></image>
		</view>
	</view>
</template>

<script>
	import {
		mapGetters
	} from 'vuex';
	import {
		apiWebRtcReject
	} from '@/common/http.api.js'

	import {
		getTerminalType
	} from '@/common/utils/utils.js'

	import {
		apiWebRtcAccept,
	} from '@/common/http.api.js'

	export default {
		data() {
			return {
				currentCallId: "",
				mode: "",
				terminalType: ""
			}
		},
		computed: {
			...mapGetters(['findFriend', 'findChatIdx']),
			friendInfo() {
				return this.findFriend(this.currentCallId);
			},
		},
		onBackPress(options) {
			if (options.from && options.from == 'backbutton') {
				return true
			}
			return false
		},

		onUnload() {
			uni.$off('closeCall')
			uni.$off('handleCancel')
			uni.$off("handleCallFailed")
		},

		onLoad({
			mode,
			currentCallId
		}) {
			this.mode = mode
			this.currentCallId = currentCallId
			this.terminalType = getTerminalType()

			uni.$on("closeCall", () => {
				uni.showToast({
					title: "已在其他设备接听",
					icon: "none"
				})
				this.backPress()
			});

			uni.$on("handleCancel", () => {
				uni.showToast({
					title: "对方取消通话",
					icon: "none"
				})
				this.backPress()
			})

			uni.$on("handleCallFailed", () => {
				uni.showToast({
					title: "您未接听",
					icon: "none"
				})
				setTimeout(async () => {
					this.backPress();
				}, 1000)
			})
		},
		methods: {
			backPress() {
				setTimeout(() => {
					let chatIdx = this.findChatIdx({
						type: "PRIVATE",
						targetId: this.currentCallId
					});
					uni.navigateTo({
						url: "/subpages/chat/chat-box?chatIdx=" + chatIdx
					})
				}, 2000)
			},
			//拒绝通话
			reject: async function() {
				await apiWebRtcReject({
					toUserId: this.currentCallId,
					terminal: this.terminalType
				})
				this.backPress();
			},
			agree: async function() {
				await apiWebRtcAccept({
					toUserId: this.currentCallId,
					terminal: this.terminalType,
					mode: this.mode
				})
				// #ifdef APP
				uni.redirectTo({
					url: `/subpages/videocall/videocall?mode=${this.mode}&currentCallId=${this.currentCallId}&&isCaller=false`
				})
				// #endif
				// #ifdef H5
				uni.redirectTo({
					url: `/subpages/videocall/videocallh5?mode=${this.mode}&currentCallId=${this.currentCallId}&&isCaller=false`
				})
				// #endif
			},
		}
	}
</script>

<style scoped>
	.content {
		width: 100%;
		height: 100vh;
		display: flex;
		flex-direction: column;
		align-items: center;
		justify-content: center;
		background: linear-gradient(to bottom, #666, #222);
	}

	.ftbg {
		width: 100%;
		height: 100vh;
		top: 0rpx;
		left: 0rpx;
		background: #00000099;
		backdrop-filter: blur(20px);
		position: fixed;
		display: flex;
		flex-direction: column;
		align-items: center;
	}
</style>

videocallh5.vue,接通后,视频通话页面。

javascript 复制代码
<template>
	<view>
		<view class="video_content" v-if="mode=='video'">
			<view style="" class="topvg">
				<view style="width: 100%;display: flex;justify-content: space-between;padding: 50rpx;">
					<view style="display: flex;align-items: center;">
						<view @click="flipcamera = !flipcamera" class="btnitem" style="">
							<image src="https://www.minglina.com/oa-boot/api/v1/common/static/qqhsxt.png"
								style="width: 60rpx;" mode="widthFix"></image>
						</view>
						<view @click="showself = !showself" class="btnitem" style="margin-left: 20rpx;">
							<image src="https://www.minglina.com/oa-boot/api/v1/common/static/sshow.png"
								style="width: 60rpx;" mode="widthFix"></image>
						</view>
						<view v-if="!uservddata.audio" class="btnitem" style="margin-left: 20rpx;">
							<image src="https://www.minglina.com/oa-boot/api/v1/common/static/dfab.png"
								style="width: 60rpx;" mode="widthFix"></image>
						</view>
					</view>
					<view style="font-size: 28rpx;color: #bec7df;">

					</view>
					<view style="width: 60rpx;">

					</view>
				</view>
			</view>
			<template v-if="!flipcamera">
				<view v-show="showself"
					style="width: 240rpx;height: 400rpx; position: fixed;right: 40rpx;top: 130rpx;z-index: 101;border-radius: 16rpx;">
					<bgyx-video-item :avatar="$store.state.userInfo.avatar" :name="$store.state.userInfo.realname"
						radius="16rpx" :id="`bgyx_video_1`" :src="myvddata.stream" status="play" :video="myvddata.video"
						:audio="myvddata.audio" :muted="true" />
				</view>
				<view style="width: 100%;display: flex;flex-wrap: wrap;flex: 1;overflow-y: hidden;">
					<bgyx-video-item :avatar="avatar" :name="name" :id="`bgyx_video_0`" :src="uservddata.stream"
						status="play" :video="uservddata.video" :audio="uservddata.audio" />
				</view>
			</template>
			<template v-else>
				<view
					style="width: 240rpx;height: 400rpx; position: fixed;right: 40rpx;top: 130rpx;z-index: 101;border-radius: 16rpx;">
					<bgyx-video-item :avatar="avatar" :name="name" radius="16rpx" :id="`bgyx_video_1`"
						:src="uservddata.stream" status="play" :video="uservddata.video" :audio="uservddata.audio"
						:muted="true" />
				</view>
				<view style="width: 100%;display: flex;flex-wrap: wrap;flex: 1;overflow-y: hidden;" v-show="showself">
					<bgyx-video-item :avatar="$store.state.userInfo.avatar" :name="$store.state.userInfo.realname"
						:id="`bgyx_video_0`" :src="myvddata.stream" status="play" :video="myvddata.video"
						:audio="myvddata.audio" />
				</view>
			</template>

			<view style="" class="btmvg">
				<view
					style="display: flex;width: 100%;align-items: center;justify-content: center;margin-bottom: 60rpx;">
					<view @click="changeaudio(false)" v-if="myvddata.audio" class="btnitem" style="">
						<image src="https://www.minglina.com/oa-boot/api/v1/common/static/mmic.png"
							style="width: 60rpx;" mode="widthFix"></image>
					</view>
					<view @click="changeaudio(true)" v-if="!myvddata.audio" class="btnitem"
						style="background: #ff5e5e66;">
						<image src="https://www.minglina.com/oa-boot/api/v1/common/static/mmic.png"
							style="width: 60rpx;" mode="widthFix"></image>
					</view>

					<view @click="hangup" class="btnitem"
						style="margin-left: 80rpx;background: #ff5e5e;width: 140rpx;height: 140rpx;">
						<image src="https://www.minglina.com/oa-boot/api/v1/common/static/hhdown.png"
							style="width: 70rpx;" mode="widthFix"></image>
					</view>

					<view @click="changevideo(false)" v-if="myvddata.video" class="btnitem" style="margin-left: 80rpx;">
						<image src="https://www.minglina.com/oa-boot/api/v1/common/static/ccamera.png"
							style="width: 60rpx;" mode="widthFix"></image>
					</view>
					<view @click="changevideo(true)" v-if="!myvddata.video" class="btnitem"
						style="margin-left: 80rpx;background: #ff5e5e66;">
						<image src="https://www.minglina.com/oa-boot/api/v1/common/static/ccamera.png"
							style="width: 60rpx;" mode="widthFix"></image>
					</view>
				</view>
			</view>
		</view>
		<view class="voice_content" v-if="mode=='voice'">
			<view
				style="width: 100%;height: 100vh;display: flex;flex-direction: column;justify-content: space-between;align-items: center;background: linear-gradient(to bottom,#111,#00000000,#111);">
				<view style="display: flex;flex-direction: column;align-items: center;width: 100%;margin-top: 180rpx;">
					<u-avatar :src="getFile($store.state.userInfo.avatar)" size="120" style="margin: 10rpx;"
						mode="square"></u-avatar>
					<view style="font-size: 28rpx;color: #e5f5ff;">
						{{$store.state.userInfo.realname}}
					</view>

					<view style="font-size: 28rpx;color: #d9e0ff;margin-top: 80rpx;">
						{{secondsstr}}
					</view>
				</view>
				<view @click="hangup" class="btnitem"
					style="margin-bottom: 180rpx;background: #ff5e5e99;width: 140rpx;height: 140rpx;">
					<image src="https://www.minglina.com/oa-boot/api/v1/common/static/hhdown.png" style="width: 70rpx;"
						mode="widthFix"></image>
				</view>
			</view>

			<view style="width: 1rpx;height: 1rpx; position: fixed;left: 0rpx;top: 0rpx;z-index: -1;">
				<bgyx-video-item :avatar="avatar" :name="name" :id="`bgyx_video_0`" :src="uservddata.stream"
					status="play" :video="uservddata.video" :audio="uservddata.audio" />
			</view>
		</view>
	</view>
</template>

<script>
	import {
		mapGetters
	} from 'vuex';
	import {
		getTerminalType
	} from '@/common/utils/utils.js'
	import {
		apiWebRtcHandleOffer,
		apiWebRtcHandleAnswer,
		apiWebRtcCandidate,
		apiWebRtcHandup,
		apiWebRtcHeartbeat,
		apiWebRtcChangeVideoAudio
	} from '@/common/http.api.js'
	import BgyxVideoItem from '@/subpages/components/im/bgyx-video-item.vue'

	export default {
		components: {
			BgyxVideoItem
		},
		data() {
			return {
				id: '',
				name: '',
				avatar: "",
				src: '',
				showself: true,
				flipcamera: false, //是否交换摄像头
				myvddata: '',
				uservddata: '',
				audio: true,
				video: true,
				isCaller: false,
				userId: "",
				currentCallId: "",
				peerConnection: null,
				configuration: {
					iceServers: [{
						urls: [
							"stun:stun.l.google.com:19302",
							"stun:stun1.l.google.com:19302",
							"stun:stun2.l.google.com:19302",
							"stun:stun3.l.google.com:19302",
							"stun:stun4.l.google.com:19302",
						]
					}],
					iceTransportPolicy: 'all',
					bundlePolicy: 'max-bundle',
					rtcpMuxPolicy: 'require',
				},
				timer: null,
				iceCandidateQueue: [],
				isRemoteDescriptionSet: false,
				mode: "",
				seconds: 0,
				secondsstr: ""
			}
		},
		computed: {
			...mapGetters(['findFriend', 'findChatIdx']),
			friendInfo() {
				return this.findFriend(this.currentCallId);
			},
		},

		async onLoad({
			mode,
			currentCallId,
			isCaller
		}) {
			this.mode = mode
			this.currentCallId = currentCallId
			this.userId = this.$store.state.userInfo.id
			this.isCaller = JSON.parse(isCaller)
			this.name = this.friendInfo?.nickName
			this.avatar = this.friendInfo?.headImage

			if (this.mode == 'voice') {
				setInterval(() => {
					this.seconds++
					this.secondsstr = this.secondsToTimeString()
				}, 1000)
			}

			if (this.isCaller) { //呼叫者
				this.createTimer();
				this.publishStream({
					video: true,
					audio: true,
				})
			}

			//被呼叫者接收offer
			uni.$on('handleOffer', (data) => {
				this.handleOffer(data.fromId, data.data)
			})

			//呼叫者接收被呼叫者创建的应答
			uni.$on('handleAnswer', (data) => {
				this.handleAnswer(data.fromId, data.data)
			})

			//双方交换ice
			uni.$on('handleIceCandidate', (data) => {
				this.handleIceCandidate(data.fromId, data.data)
			})

			//通话后对方挂断通话
			uni.$on('handleHandup', (data) => {
				this.handleCallEnded(data.fromId)
			})

			//改变音视频
			uni.$on('handleChangeVideoAudio', (data) => {
				this.handleChangeVideoAudio(data.fromId, data.data)
			})
		},

		onHide() {
			this.cleanOff()
		},

		onUnload() {
			this.cleanOff()
		},

		onBackPress(options) {
			if (options.from && options.from == 'backbutton') {
				return true
			}
			return false
		},

		methods: {
			secondsToTimeString: function() {
				const seconds = this.seconds
				let hours = Math.floor(seconds / 3600);
				let minutes = Math.floor((seconds % 3600) / 60);
				let secs = seconds % 60;

				hours = hours < 10 ? '0' + hours : hours;
				minutes = minutes < 10 ? '0' + minutes : minutes;
				secs = secs < 10 ? '0' + secs : secs;

				return hours + ':' + minutes + ':' + secs;
			},
			muteAudio(type) {
				this.myvddata.stream.getTracks().forEach(function(track) {
					if (track.kind === 'audio') {
						track.enabled = type ? type : false;
					}
				});
			},
			muteVideo(type) {
				this.myvddata.stream.getTracks().forEach(function(track) {
					if (track.kind === 'video') {
						track.enabled = type ? type : false;
					}
				});
			},
			changeaudio(e) {
				this.muteAudio(e)
				this.myvddata.audio = e
				const terminalType = getTerminalType()
				apiWebRtcChangeVideoAudio({
					toUserId: this.currentCallId,
					changeType: 2,
					terminal: terminalType,
					audio: e
				})
			},
			changevideo(e) {
				this.muteVideo(e)
				this.myvddata.video = e
				const terminalType = getTerminalType()
				apiWebRtcChangeVideoAudio({
					toUserId: this.currentCallId,
					changeType: 1,
					terminal: terminalType,
					video: e
				})
			},
			cleanOff() {
				uni.$off('handleOffer')
				uni.$off('handleAnswer')
				uni.$off('handleIceCandidate')
				uni.$off('handleHandup')
				uni.$off('handleChangeVideoAudio')

				if (this.timer) {
					clearInterval(this.timer);
				}
			},

			createTimer() {
				if (this.timer) {
					return;
				}
				this.timer = setInterval(() => {
					apiWebRtcHeartbeat({
						toUserId: this.currentCallId
					});
				}, 50000);
			},

			backPress() {
				setTimeout(() => {
					let chatIdx = this.findChatIdx({
						type: "PRIVATE",
						targetId: this.currentCallId
					});
					uni.navigateTo({
						url: "/subpages/chat/chat-box?chatIdx=" + chatIdx
					})
				}, 2000)
			},

			async hangup() {
				await this.close();
				const terminalType = getTerminalType()
				await apiWebRtcHandup({
					toUserId: this.currentCallId,
					terminal: terminalType
				})
				this.backPress();
			},

			async close() {
				let that = this
				this.myvddata.stream.getTracks().forEach(function(track) {
					track.stop();
					that.myvddata.stream.removeTrack(track);
				});
				this.uservddata.stream = null;
				this.peerConnection?.close();
				this.peerConnection = null;
			},

			async handleCallEnded(fromId) {
				console.log("对方结束了通话,from:", fromId)
				if (fromId == this.currentCallId) {
					await this.close();
					uni.showToast({
						title: "对方结束了通话",
						icon: "none"
					})
					this.backPress();
				}
			},

			// 添加处理 answer 的函数
			async handleAnswer(fromId, answer) {
				console.log('收到answer,from:', fromId);
				const pc = this.peerConnection;
				let that = this
				if (pc) {
					await pc.setRemoteDescription(new RTCSessionDescription(answer));
					this.isRemoteDescriptionSet = true;
					this.iceCandidateQueue.map(async candidate =>
						await that.peerConnection.addIceCandidate(candidate))
					this.iceCandidateQueue = [];
				}
			},

			// 添加处理 ICE candidate 的函数
			async handleIceCandidate(fromId, candidate) {
				console.log('收到candidate,from:', fromId);
				const pc = this.peerConnection;
				if (pc) {
					if (!this.isRemoteDescriptionSet) {
						this.iceCandidateQueue.push(candidate);
					} else {
						await pc.addIceCandidate(new RTCIceCandidate(candidate));
					}
				}
			},

			handleChangeVideoAudio(fromId, data) {
				if (data.changeType == 1) {
					this.uservddata.video = data.video
				} else {
					this.uservddata.audio = data.audio
				}
			},

			// 修改 handleOffer 函数
			async handleOffer(fromId, offer) {
				console.log('收到offer,from:', fromId);
				const pc = new RTCPeerConnection(this.configuration);
				const localStream = await this.getStreamLoc({
					audio: true,
					video: true,
				})
				let that = this
				localStream.getTracks().forEach(function(track) {
					pc.addTrack(track);
					if (that.mode == "voice" && track.kind == 'video') {
						track.enabled = false;
					}
				});

				let remoteStream = new MediaStream();
				pc.ontrack = (event) => {
					remoteStream.addTrack(event.track);
				};

				console.log('设置远程offer描述:', offer);
				await pc.setRemoteDescription(new RTCSessionDescription(offer));

				const answer = await pc.createAnswer();
				console.log('创建answer:', answer);

				await pc.setLocalDescription(answer);

				pc.onicecandidate = async (event) => {
					if (event.candidate) { // 移除 host 限制
						console.log('handleOffer发送 ICE 候选者:', fromId);

						const terminalType = getTerminalType()
						apiWebRtcCandidate({
							toUserId: fromId,
							candidate: event.candidate,
							terminal: terminalType
						})
					}
				};

				const mydata = {
					id: this.userId,
					pc,
					stream: localStream,
					audio: true,
					video: true,
				};
				this.myvddata = mydata

				const vdata = {
					id: this.currentCallId,
					pc,
					stream: remoteStream,
					audio: true,
					video: true,
				};
				this.uservddata = vdata

				const terminalType = getTerminalType()
				await apiWebRtcHandleAnswer({
					toUserId: fromId,
					answer: answer,
					terminal: terminalType
				})
				this.peerConnection = pc
			},

			async publishStream(options) {
				const pc = new RTCPeerConnection(this.configuration);
				// 设置事件处理器
				pc.onconnectionstatechange = (event) => {
					console.log(`与 ${this.currentCallId} 的连接状态:`, pc.connectionState, event);
				};

				pc.oniceconnectionstatechange = async (event) => {
					console.log(`与 ${this.currentCallId} 的ICE连接状态:`, pc.iceConnectionState, event);
				};

				pc.onsignalingstatechange = (event) => {
					console.log(`与 ${this.currentCallId} 的signaling状态:`, pc.signalingState, event);
				};

				const localStream = await this.getStreamLoc({
					audio: true,
					video: true,
				})
				let that = this
				localStream.getTracks().forEach(function(track) {
					pc.addTrack(track);
					if (that.mode == "voice" && track.kind == 'video') {
						track.enabled = false;
					}
				});

				let remoteStream = new MediaStream();
				pc.ontrack = (event) => {
					remoteStream.addTrack(event.track);
				};

				let offer = await pc.createOffer();
				await pc.setLocalDescription(offer);
				console.log('创建offer:', offer);

				pc.onicecandidate = async (event) => {
					if (event.candidate) { // 移除 host 限制
						console.log('publishStream发送 ICE 候选者:', this.currentCallId);

						const terminalType = getTerminalType()
						apiWebRtcCandidate({
							toUserId: this.currentCallId,
							candidate: event.candidate,
							terminal: terminalType
						})
					}
				};

				const myvdata = {
					id: this.userId,
					pc,
					stream: localStream,
					audio: options.audio,
					video: options.video,
				};

				this.myvddata = myvdata

				const uvdata = {
					id: this.currentCallId,
					pc,
					stream: remoteStream,
					audio: true,
					video: true,
				};

				this.uservddata = uvdata

				const terminalType = getTerminalType()
				await apiWebRtcHandleOffer({
					toUserId: this.currentCallId,
					offer: offer,
					terminal: terminalType
				})
				this.peerConnection = pc
			},

			getStreamLoc: async function(constraints) {
				if (navigator.mediaDevices.getUserMedia) {
					console.log('最新的标准API', navigator.mediaDevices.getUserMedia);
					const rs = await navigator.mediaDevices.getUserMedia(constraints)
					return rs
				} else if (navigator.webkitGetUserMedia) {
					console.log('webkit核心浏览器');
					const rs = await navigator.webkitGetUserMedia(constraints)
					return rs
				} else if (navigator.mozGetUserMedia) {
					console.log('firfox浏览器');
					const rs = await navigator.mozGetUserMedia(constraints);
					return rs
				} else if (navigator.getUserMedia) {
					console.log('旧版API');
					const rs = await navigator.getUserMedia(constraints);
					return rs
				} else {
					const rs = await navigator.mediaDevices.getUserMedia(constraints)
					return rs
				}
			},
		}
	}
</script>


<style scoped>
	.video_content {
		display: flex;
		flex-direction: column;
		align-items: center;
		width: 100%;
		height: 100vh;
		justify-content: space-between;
		overflow-y: hidden;
	}

	.voice_content {
		display: flex;
		flex-direction: column;
		align-items: center;
		width: 100%;
		height: 100vh;
		background: #333;
	}

	.btmvg {
		width: 100%;
		display: flex;
		height: 40%;
		background: linear-gradient(to bottom, #33333300, #111111);
		position: fixed;
		left: 0rpx;
		bottom: 0rpx;
		color: white;
		align-items: flex-end;
	}

	.topvg {
		width: 100%;
		display: flex;
		height: 40%;
		background: linear-gradient(to bottom, #111111, #33333300);
		position: fixed;
		left: 0rpx;
		top: 0rpx;
		color: white;
		align-items: flex-start;
		z-index: 99;
	}

	.btnitem {
		width: 120rpx;
		height: 120rpx;
		display: flex;
		align-items: center;
		justify-content: center;
		border-radius: 160rpx;
		background: #cfcfcf55;
	}
</style>

App.vue中unipush收到后端转发的消息。

javascript 复制代码
<script>
	export default {
		onLaunch: function() {
			 	uni.onPushMessage((res) => {
				console.log("收到推送消息:", res) //监听推送消息
				if (res.type == "click") {
					const {
						cmd,
						data
					} = res.data.payload.text
						if (data.type == msgType.MESSAGE_TYPE.RTC_CALL_VOICE ||
							data.type == msgType.MESSAGE_TYPE.RTC_CALL_VIDEO) {
							let mode = data.type == msgType.MESSAGE_TYPE.RTC_CALL_VIDEO ? "video" : "voice";
							uni.navigateTo({
								url: `/subpages/videocallrev/videocallrev?mode=${mode}&currentCallId=${data.sendId}`
							})
						} else {
							const idx = this.$store.getters.findChatIdx({
								type: "PRIVATE",
								targetId: data.sendId
							})
							if (idx >= 0) {
								uni.navigateTo({
									url: "/subpages/chat/chat-box?chatIdx=" + idx
								})
							}
						}
				} else if (res.type == "receive") {
					const {
						cmd,
						data,
						ignoreOfflineNotify,
						sender,
						receivers
					} = res.data.payload.text
					const {
						title,
						content
					} = res.data

					 	// #ifdef APP-PLUS
						this.createNotification(title, content, data.type, ignoreOfflineNotify, res.data
							.payload)
						// #endif

						// 私聊消息
						this.handlePrivateMessage(data, receivers.length > 0 && receivers[0].id == sender.id);
				}
			})

			// #ifdef APP-PLUS
			this.checkNotificationAuthorized()
			// #endif
		},
		onShow: function() {
	    },

		onHide: function() {
			console.log('App Hide')
		},

		methods: {
			// 检查app是否开启了通知权限 安卓苹果通用
			async checkNotificationAuthorized() {
				const appNotify = uni.getStorageSync("app_notify")
				if (appNotify) {
					return
				}
				const notificationAuthorized = uni.getAppAuthorizeSetting().notificationAuthorized
				if (notificationAuthorized !== 'authorized') {
					uni.showModal({
						title: '通知权限',
						content: '您还没有开启通知权限,无法接收到消息通知,请前往设置!',
						confirmText: '去设置',
						cancelText: '拒绝',
						success: (res) => {
							uni.setStorageSync("app_notify", 1)
							if (res.confirm) {
								uni.openAppAuthorizeSetting()
							}
						}
					});
				}
			},

			createNotification(title, content, messageType, ignoreOfflineNotify, payload) {
				if (ignoreOfflineNotify) {
					return;
				}
				uni.createPushMessage({
					title: title,
					content: content,
					payload: payload,
					success: () => {},
					fail: () => {}
				})
			},

			//sendToSelfTerminal:是否是自己其他设备推送给自己的
			// 用于音视频通话在自己某设备上接受通话后,告知自己的其他设备关闭呼叫
			async handlePrivateMessage(msg, sendToSelfTerminal) {
				const userId = this.$store.state.userInfo.id
				// 标记这条消息是不是自己给自己发的
				msg.selfSend = msg.sendId == userId;
				// 好友id
				let friendId = msg.selfSend ? msg.recvId : msg.sendId;
				// 会话信息
				let chatInfo = {
					type: 'PRIVATE',
					targetId: friendId
				}
				 // 消息插入
				let friend = await this.loadFriendInfo(friendId);
				this.insertPrivateMessage(friend, msg, userId, sendToSelfTerminal);
			},

			async insertPrivateMessage(friend, msg, userId, sendToSelfTerminal) {
				// 单人视频信令
				if (msgType.isRtcPrivate(msg.type)) {
					// #ifdef MP-WEIXIN
					// 小程序不支持音视频
					return;
					// #endif
					// 被呼叫,弹出视频页面
					if (msg.type == msgType.MESSAGE_TYPE.RTC_CALL_VOICE ||
						msg.type == msgType.MESSAGE_TYPE.RTC_CALL_VIDEO) {
						let mode = msg.type == msgType.MESSAGE_TYPE.RTC_CALL_VIDEO ? "video" : "voice";
						let pages = getCurrentPages();
						let curPage = pages[pages.length - 1].route;
						if (curPage != "/subpages/videocallrev/videocallrev") {
							uni.navigateTo({
								url: `/subpages/videocallrev/videocallrev?mode=${mode}&currentCallId=${msg.sendId}`
							})
						}
					}

					//接受通话
					if (msg.type == msgType.MESSAGE_TYPE.RTC_ACCEPT_VIDEO ||
						msg.type == msgType.MESSAGE_TYPE.RTC_ACCEPT_VOICE) {
						if (sendToSelfTerminal) {
							uni.$emit("closeCall");
							return;
						}
						let mode = msg.type == msgType.MESSAGE_TYPE.RTC_ACCEPT_VIDEO ? "video" : "voice";
						let pages = getCurrentPages();
						let curPage = pages[pages.length - 1].route;
						if (curPage != "/subpages/videocall/videocallh5") {
							// #ifdef APP
							uni.redirectTo({
								url: `/subpages/videocall/videocall?mode=${mode}&currentCallId=${msg.sendId}&isCaller=true`
							})
							// #endif
							// #ifdef H5
							uni.redirectTo({
								url: `/subpages/videocall/videocallh5?mode=${mode}&currentCallId=${msg.sendId}&isCaller=true`
							})
							// #endif
						}
					}
					//取消呼叫
					if (msg.type == msgType.MESSAGE_TYPE.RTC_CANCEL) {
						uni.$emit("handleCancel", {
							fromId: msg.sendId
						})
					}
					//呼叫失败
					if (msg.type == msgType.MESSAGE_TYPE.RTC_FAILED) {
						uni.$emit("handleCallFailed", {
							fromId: msg.sendId
						})
					}
					//被呼叫者收到offer
					if (msg.type == msgType.MESSAGE_TYPE.RTC_OFFER) {
						// #ifdef APP
						console.log("被呼叫者收到offer,调用handleOfferApp", msg)
						uni.$emit("handleOfferApp", {
							fromId: msg.sendId,
							data: msg.content
						})
						// #endif
						// #ifdef H5
						uni.$emit("handleOffer", {
							fromId: msg.sendId,
							data: JSON.parse(this.unGzip(msg.content))
						})
						// #endif
					}

					//呼叫者收到answer
					if (msg.type == msgType.MESSAGE_TYPE.RTC_ANSWER) {
						// #ifdef APP
						uni.$emit("handleAnswerApp", {
							fromId: msg.sendId,
							data: msg.content
						})
						// #endif
						// #ifdef H5
						uni.$emit("handleAnswer", {
							fromId: msg.sendId,
							data: JSON.parse(this.unGzip(msg.content))
						})
						// #endif
					}

					//收到candidate
					if (msg.type == msgType.MESSAGE_TYPE.RTC_CANDIDATE) {
						// #ifdef APP
						uni.$emit("handleIceCandidateApp", {
							fromId: msg.sendId,
							data: JSON.parse(msg.content)
						})
						// #endif
						// #ifdef H5
						uni.$emit("handleIceCandidate", {
							fromId: msg.sendId,
							data: JSON.parse(msg.content)
						})
						// #endif
					}

					//对方拒绝通话
					if (msg.type == msgType.MESSAGE_TYPE.RTC_REJECT) {
						uni.$emit('handleReject', {
							fromId: msg.sendId,
						})
					}
					//通话后对方挂断通话
					if (msg.type == msgType.MESSAGE_TYPE.RTC_HANDUP) {
						// #ifdef APP
						uni.$emit("handleHandupApp", {
							fromId: msg.sendId,
						})
						// #endif
						// #ifdef H5
						uni.$emit("handleHandup", {
							fromId: msg.sendId,
						})
						// #endif
					}

					//改变音视频
					if (msg.type == msgType.MESSAGE_TYPE.RTC_CHANGE_VIDEO_AUDIO) {
						// #ifdef APP
						uni.$emit("handleChangeVideoAudioApp", {
							fromId: msg.sendId,
							data: JSON.parse(msg.content)
						})
						// #endif
						// #ifdef H5
						uni.$emit("handleChangeVideoAudio", {
							fromId: msg.sendId,
							data: JSON.parse(msg.content)
						})
						// #endif
					}
					return;
				}
			},

			async loadFriendInfo(id) {
				let friend = this.$store.getters.findFriend(id);
				if (!friend) {
					await this.$store.dispatch('loadFriend')
					friend = this.$store.getters.findFriend(id);
				}
				return friend;
			},
		}
	}
</script>

<style lang="scss">
 
</style>

uniapp调用接口,使用unipush云函数转发消息,App.vue监听消息。

java 复制代码
package com.ynfy.app.api.v1.controller;

import cn.hutool.core.codec.Base64;
import cn.hutool.core.util.CharsetUtil;
import cn.hutool.core.util.ZipUtil;
import com.bx.implatform.service.WebrtcPrivateService;
import com.ynfy.app.api.v1.entity.dto.WebrtcDTO;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.jeecg.common.api.vo.Result;
import org.jeecg.common.util.TokenUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.Objects;

@Slf4j
@Tag(name = "单人通话")
@RestController
@RequestMapping("/api/v1/webrtc/private")
@RequiredArgsConstructor
public class ApiWebrtcPrivateController extends ApiBaseController {

    @Autowired
    private WebrtcPrivateService webrtcPrivateService;

    @Operation(summary = "呼叫视频通话")
    @PostMapping("/call")
    public Result<?> call(@RequestBody WebrtcDTO dto) {
        String userId = TokenUtil.getUserId(TokenUtil.getToken(request));
        webrtcPrivateService.call(userId, dto.getTerminal(), dto.getMode(), dto.getToUserId());
        return Result.OK();
    }

    @Operation(summary = "接受视频通话")
    @PostMapping("/accept")
    public Result<?> accept(@RequestBody WebrtcDTO dto) {
        String userId = TokenUtil.getUserId(TokenUtil.getToken(request));
        webrtcPrivateService.accept(userId, dto.getToUserId(), dto.getTerminal(), dto.getMode());
        return Result.OK();
    }

    @Operation(summary = "向被呼叫者发送offer")
    @PostMapping("/handleOffer")
    public Result<?> handleOffer(@RequestBody WebrtcDTO dto) {
        String userId = TokenUtil.getUserId(TokenUtil.getToken(request));
        webrtcPrivateService.handleOffer(userId, dto.getToUserId(), dto.getTerminal(), dto.getOffer());
        return Result.OK();
    }

    @Operation(summary = "向呼叫者发送answer")
    @PostMapping("/handleAnswer")
    public Result<?> handleAnswer(@RequestBody WebrtcDTO dto) {
        String userId = TokenUtil.getUserId(TokenUtil.getToken(request));
        webrtcPrivateService.handleAnswer(userId, dto.getToUserId(), dto.getTerminal(), dto.getAnswer());
        return Result.OK();
    }


    @Operation(summary = "拒绝视频通话")
    @PostMapping("/reject")
    public Result<?> reject(@RequestBody WebrtcDTO dto) {
        String userId = TokenUtil.getUserId(TokenUtil.getToken(request));
        webrtcPrivateService.reject(userId, dto.getToUserId(), dto.getTerminal());
        return Result.OK();
    }

    @Operation(summary = "取消呼叫")
    @PostMapping("/cancel")
    public Result<?> cancel(@RequestBody WebrtcDTO dto) {
        String userId = TokenUtil.getUserId(TokenUtil.getToken(request));
        webrtcPrivateService.cancel(userId, dto.getToUserId(), dto.getTerminal());
        return Result.OK();
    }

    @Operation(summary = "呼叫失败")
    @PostMapping("/failed")
    public Result<?> failed(@RequestBody WebrtcDTO dto) {
        String userId = TokenUtil.getUserId(TokenUtil.getToken(request));
        webrtcPrivateService.failed(userId, dto.getToUserId(), dto.getReason(), dto.getTerminal());
        return Result.OK();
    }

    @Operation(summary = "挂断")
    @PostMapping("/handup")
    public Result<?> handup(@RequestBody WebrtcDTO dto) {
        String userId = TokenUtil.getUserId(TokenUtil.getToken(request));
        webrtcPrivateService.handup(userId, dto.getToUserId(), dto.getTerminal());
        return Result.OK();
    }

    @PostMapping("/candidate")
    @Operation(summary = "同步candidate")
    public Result<?> candidate(@RequestBody WebrtcDTO dto) {
        String userId = TokenUtil.getUserId(TokenUtil.getToken(request));
        webrtcPrivateService.candidate(userId, dto.getToUserId(), dto.getTerminal(), dto.getCandidate());
        return Result.OK();
    }

    @Operation(summary = "心跳")
    @PostMapping("/heartbeat")
    public Result<?> heartbeat(@RequestBody WebrtcDTO dto) {
        String userId = TokenUtil.getUserId(TokenUtil.getToken(request));
        webrtcPrivateService.heartbeat(userId, dto.getToUserId());
        return Result.OK();
    }

    @Operation(summary = "获取offer")
    @PostMapping("/getOfferData")
    public Result<?> getOfferData(@RequestBody WebrtcDTO dto) {
        String decompressObjStr = null;
        if (Objects.nonNull(dto.getOffer())) {
            try {
                decompressObjStr = ZipUtil.unGzip(Base64.decode(dto.getOffer().toString()), CharsetUtil.UTF_8);
            } catch (Exception e) {
                e.printStackTrace();
                log.error("文本解压缩异常:{}", e.getMessage());
            }
        }
        return Result.OK("", decompressObjStr);
    }

    @Operation(summary = "获取answer")
    @PostMapping("/getAnswerData")
    public Result<?> getAnswerData(@RequestBody WebrtcDTO dto) {
        String decompressObjStr = null;
        if (Objects.nonNull(dto.getAnswer())) {
            try {
                decompressObjStr = ZipUtil.unGzip(Base64.decode(dto.getAnswer().toString()), CharsetUtil.UTF_8);
            } catch (Exception e) {
                e.printStackTrace();
                log.error("文本解压缩异常:{}", e.getMessage());
            }
        }
        return Result.OK("", decompressObjStr);
    }

    @Operation(summary = "改变音视频传输")
    @PostMapping("/changeVideoAudio")
    public Result<?> changeVideoAudio(@RequestBody WebrtcDTO dto) {
        String userId = TokenUtil.getUserId(TokenUtil.getToken(request));
        webrtcPrivateService.changeVideoAudio(userId, dto);
        return Result.OK();
    }

}
相关推荐
4***R2404 小时前
C++在音视频处理中的库
开发语言·c++·音视频
天蓝色的鱼鱼6 小时前
mescroll老用户亲测z-paging:这些功能让我果断切换!
前端·uni-app
anyup6 小时前
🔥100+ 天,已全面支持鸿蒙!uView Pro 近期更新盘点及未来计划
前端·uni-app·harmonyos
Docda6 小时前
批量视频数据或高质量图片数据下载
音视频
顾道长生'6 小时前
(Arxiv-2025)MAGREF:用于任意参考视频生成的掩码引导与主体解耦
音视频
m0_626535206 小时前
代码分析 长音频分割为短音频
javascript·python·音视频
Black蜡笔小新6 小时前
视频融合平台EasyCVR远程监控技术在沙尘暴交通监控中的应用
音视频
EasyCVR10 小时前
视频汇聚平台EasyCVR赋能石油管道计量站精准监控与安全管理
安全·音视频
半兽先生17 小时前
uniapp高性能ui框架uni-ui
ui·uni-app