vue项目基于WebRTC实现一对一音视频通话

草样的年华 2024-08-07 13:03:05 阅读 84

效果

前端代码

<code><template>

<div class="flex items-center flex-col text-center p-12 h-screen">code>

<div class="relative h-full mb-4 fBox">code>

<video id="localVideo"></video>code>

<video id="remoteVideo"></video>code>

<div v-if="caller && calling">code>

<p class="mb-4 text-white">等待对方接听...</p>code>

<img style="width: 60px;" @click="hangUp" src="@/assets/guaDuang.png" alt="">code>

</div>

<div v-if="called && calling">code>

<p>收到视频邀请...</p>

<div class="flex">code>

<img style="width: 60px" @click="hangUp" src="@/assets/guaDuang.png" alt="">code>

<img style="width: 60px" @click="acceptCall" src="@/assets/jieTing.png" alt="">code>

</div>

</div>

</div>

<div>

<button @click="callRemote" style="margin-right: 10px">发起视频</button>code>

<button @click="hangUp" style="margin-left: 10px">挂断视频</button>code>

</div>

</div>

</template>

<script>

import { io, Socket } from "socket.io-client";

let roomId = '001';

export default {

name: 'HelloWorld',

props: {

msg: String

},

data(){

return{

wsSocket:null,//实例

called:false,// 是否是接收方

caller:false,// 是否是发起方

calling:false,// 呼叫中

communicating:false,// 视频通话中

localVideo:null,// video标签实例,播放本人的视频

remoteVideo:null,// video标签实例,播放对方的视频

peer:null,

localStream:null,

}

},

methods:{

// 发起方发起视频请求

async callRemote(){

let that = this;

console.log('发起视频');

that.caller = true;

that.calling = true;

// await getLocalStream()

// 向信令服务器发送发起请求的事件

await that.getLocalStream();

that.wsSocket.emit('callRemote', roomId)

},

// 接收方同意视频请求

acceptCall(){

console.log('同意视频邀请');

this.wsSocket.emit('acceptCall', roomId)

},

// 挂断视频

hangUp(){

this.wsSocket.emit('hangUp', roomId)

},

reset(){

this.called = false;

this.caller = false;

this.calling = false;

this.communicating = false;

this.peer = null;

this.localVideo.srcObject = null;

this.remoteVideo.srcObject = null;

this.localStream = undefined;

console.log('挂断结束视频-------')

},

// 获取本地音视频流

async getLocalStream(){

let that = this;

let obj = { audio: true, video: true };

const stream = await navigator.mediaDevices.getUserMedia(obj); // 获取音视频流

that.localVideo.srcObject = stream;

that.localVideo.play();

that.localStream = stream;

return stream;

}

},

mounted() {

let that = this;

that.$nextTick(()=>{

that.localVideo = document.getElementById('localVideo');

that.remoteVideo = document.getElementById('remoteVideo');

})

let sock = io('localhost:3000'); // 对应服务的端口

// 连接成功

sock.on('connectionSuccess', (sock) => {

console.log('连接成功:');

});

sock.emit('joinRoom', roomId) // 前端发送加入房间事件

sock.on('callRemote', (sock) => {

// 如果是发送方自己收到这个事件就不用管

if (!that.caller){ // 不是发送方(用户A)

that.called = true; // 接听方

that.calling = true; // 视频通话中

}

});

sock.on('acceptCall',async ()=>{

if (that.caller){

// 用户A收到用户B同意视频的请求

that.peer = new RTCPeerConnection();

// 添加本地音视频流

that.peer.addStream && that.peer.addStream(that.localStream);

// 通过监听onicecandidate事件获取candidate信息

that.peer.onicecandidate = (event) => {

if (event.candidate) {

console.log('用户A获取candidate信息', event.candidate);

// 通过信令服务器发送candidate信息给用户B

sock.emit('sendCandidate', {roomId, candidate: event.candidate})

}

}

// 接下来用户A和用户B就可以进行P2P通信流

// 监听onaddstream来获取对方的音视频流

that.peer.onaddstream = (event) => {

console.log('用户A收到用户B的stream',event.stream);

that.calling = false;

that.communicating = true;

that.remoteVideo.srcObject = event.stream;

that.remoteVideo.play();

}

// 生成offer

let offer = await that.peer.createOffer({

offerToReceiveAudio: 1,

offerToReceiveVideo: 1

})

console.log('offer', offer);

// 设置本地描述的offer

await that.peer.setLocalDescription(offer);

// 通过信令服务器将offer发送给用户B

sock.emit('sendOffer', { offer, roomId })

}

})

// 收到offer

sock.on('sendOffer',async (offer) => {

if (that.called){ // 接收方 - 用户B

console.log('收到offer',offer);

// 创建自己的RTCPeerConnection

that.peer = new RTCPeerConnection();

// 添加本地音视频流

const stream = await that.getLocalStream();

that.peer.addStream && that.peer.addStream(stream);

// 通过监听onicecandidate事件获取candidate信息

that.peer.onicecandidate = (event) => {

if (event.candidate) {

console.log('用户B获取candidate信息', event.candidate);

// 通过信令服务器发送candidate信息给用户A

sock.emit('sendCandidate', {roomId, candidate: event.candidate})

}

}

// 接下来用户A和用户B就可以进行P2P通信流

// 监听onaddstream来获取对方的音视频流

that.peer.onaddstream = (event) => {

console.log('用户B收到用户A的stream',event.stream);

that.calling = false;

that.communicating = true;

that.remoteVideo.srcObject = event.stream;

that.remoteVideo.play();

}

// 设置远端描述信息

await that.peer.setRemoteDescription(offer);

let answer = await that.peer.createAnswer();

console.log('用户B生成answer',answer);

await that.peer.setLocalDescription(answer);

// 发送answer给信令服务器

sock.emit('sendAnswer', { answer, roomId })

}

})

// 用户A收到answer

sock.on('sendAnswer',async (answer) => {

if (that.caller){ // 接收方 - 用户A 判断是否是发送方

// console.log('用户A收到answer',answer);

await that.peer.setRemoteDescription(answer);

}

})

// 收到candidate信息

sock.on('sendCandidate',async (candidate) => {

console.log('收到candidate信息',candidate);

// await that.peer.addIceCandidate(candidate) // 用户A和用户B分别收到candidate后,都添加到自己的peer对象上

// await that.peer.addCandidate(candidate)

await that.peer.addIceCandidate(candidate)

})

// 挂断

sock.on('hangUp',()=>{

that.reset()

})

that.wsSocket = sock;

}

}

</script>

服务端代码

const socket = require('socket.io');

const http = require('http');

const server = http.createServer()

const io = socket(server, {

cors: {

origin: '*' // 配置跨域

}

});

io.on('connection', sock => {

console.log('连接成功...')

// 向客户端发送连接成功的消息

sock.emit('connectionSuccess');

sock.on('joinRoom',(roomId)=>{

sock.join(roomId);

console.log('joinRoom-房间ID:'+roomId);

})

// 广播有人加入到房间

sock.on('callRemote',(roomId)=>{

io.to(roomId).emit('callRemote')

})

// 广播同意接听视频

sock.on('acceptCall',(roomId)=>{

io.to(roomId).emit('acceptCall')

})

// 接收offer

sock.on('sendOffer',({offer,roomId})=>{

io.to(roomId).emit('sendOffer',offer)

})

// 接收answer

sock.on('sendAnswer',({answer,roomId})=>{

io.to(roomId).emit('sendAnswer',answer)

})

// 收到candidate

sock.on('sendCandidate',({candidate,roomId})=>{

io.to(roomId).emit('sendCandidate',candidate)

})

// 挂断结束视频

sock.on('hangUp',(roomId)=>{

io.to(roomId).emit('hangUp')

})

})

server.listen(3000, () => {

console.log('服务器启动成功');

});

完整代码gitee地址: https://gitee.com/wade-nian/wdn-webrtc.git

参考文章:基于WebRTC实现音视频通话_npm create vite@latest webrtc-client -- --template-CSDN博客y

要是在拨打电话过程中,无法打开摄像头或者麦克风,浏览器也没有弹出获取摄像头及麦克风的权限运行,这是需要进行浏览器安全源的设置,步骤如下:

1、在 chrome 中 输入 chrome://flags/#unsafely-treat-insecure-origin-as-secure

2、找到Insecure origins treated as secure

3、添加你服务器的地址 例如:http://192.168.1.10:8080

4、选择Enabled属性

5、点击右下角的Relaunch即可



声明

本文内容仅代表作者观点,或转载于其他网站,本站不以此文作为商业用途
如有涉及侵权,请联系本站进行删除
转载本站原创文章,请注明来源及作者。