山海科技发展网

08月14日科技常识:WebRTC:一个视频聊天的简单例子

导读 摘要 今天小编跟大家讲解下有关WebRTC:一个视频聊天的简单例子 ,相信小伙伴们对这个话题应该有所关注吧,小编也收集到了有关WebRTC:一...
摘要 今天小编跟大家讲解下有关WebRTC:一个视频聊天的简单例子 ,相信小伙伴们对这个话题应该有所关注吧,小编也收集到了有关WebRTC:一个视频

今天小编跟大家讲解下有关WebRTC:一个视频聊天的简单例子 ,相信小伙伴们对这个话题应该有所关注吧,小编也收集到了有关WebRTC:一个视频聊天的简单例子 的相关资料,希望小伙伴们看了有所帮助。

相关API简介

在前面的章节中,已经对WebRTC相关的重要知识点进行了介绍,包括涉及的网络协议、会话描述协议、如何进行网络穿透等,剩下的就是WebRTC的API了。

WebRTC通信相关的API非常多,主要完成了如下功能:

信令交换通信候选地址交换音视频采集音视频发送、接收

相关API太多,为避免篇幅过长,文中部分采用了伪代码进行讲解。详细代码参考文章末尾,也可以在笔者的Github上找到,有问题欢迎留言交流。

信令交换

信令交换是WebRTC通信中的关键环节,交换的信息包括编解码器、网络协议、候选地址等。对于如何进行信令交换,WebRTC并没有明确说明,而是交给应用自己来决定,比如可以采用WebSocket。

发送方伪代码如下:

const pc = new RTCPeerConnection(iceConfig);const offer = await pc.createOffer();await pc.setLocalDescription(offer);sendToPeerViaSignalingServer(SIGNALING_OFFER, offer); // 发送方发送信令消息

接收方伪代码如下:

const pc = new RTCPeerConnection(iceConfig);await pc.setRemoteDescription(offer);const answer = await pc.createAnswer();await pc.setLocalDescription(answer);sendToPeerViaSignalingServer(SIGNALING_ANSWER, answer); // 接收方发送信令消息候选地址交换服务

当本地设置了会话描述信息,并添加了媒体流的情况下,ICE框架就会开始收集候选地址。两边收集到候选地址后,需要交换候选地址,并从中知道合适的候选地址对。

候选地址的交换,同样采用前面提到的信令服务,伪代码如下:

// 设置本地会话描述信息const localPeer = new RTCPeerConnection(iceConfig);const offer = await pc.createOffer();await localPeer.setLocalDescription(offer);// 本地采集音视频const localVideo = document.getElementById('local-video');const mediaStream = await navigator.mediaDevices.getUserMedia({ video: true, audio: true});localVideo.srcObject = mediaStream;// 添加音视频流mediaStream.getTracks().forEach(track => { localPeer.addTrack(track, mediaStream);});// 交换候选地址localPeer.onicecandidate = function(evt) { if (evt.candidate) { sendToPeerViaSignalingServer(SIGNALING_CANDIDATE, evt.candidate); }}音视频采集

可以使用浏览器提供的getUserMedia接口,采集本地的音视频。

const localVideo = document.getElementById('local-video');const mediaStream = await navigator.mediaDevices.getUserMedia({ video: true, audio: true});localVideo.srcObject = mediaStream;音视频发送、接收

将采集到的音视频轨道,通过addTrack进行添加,发送给远端。

mediaStream.getTracks().forEach(track => { localPeer.addTrack(track, mediaStream);});

远端可以通过监听ontrack来监听音视频的到达,并进行播放。

remotePeer.ontrack = function(evt) { const remoteVideo = document.getElementById('remote-video'); remoteVideo.srcObject = evt.streams[0];}完整代码

包含两部分:客户端代码、服务端代码。

1、客户端代码

const socket = io.connect('http://localhost:3000');const CLIENT_RTC_EVENT = 'CLIENT_RTC_EVENT';const SERVER_RTC_EVENT = 'SERVER_RTC_EVENT';const CLIENT_USER_EVENT = 'CLIENT_USER_EVENT';const SERVER_USER_EVENT = 'SERVER_USER_EVENT';const CLIENT_USER_EVENT_LOGIN = 'CLIENT_USER_EVENT_LOGIN'; // 登录const SERVER_USER_EVENT_UPDATE_USERS = 'SERVER_USER_EVENT_UPDATE_USERS';const SIGNALING_OFFER = 'SIGNALING_OFFER';const SIGNALING_ANSWER = 'SIGNALING_ANSWER';const SIGNALING_CANDIDATE = 'SIGNALING_CANDIDATE';let remoteUser = ''; // 远端用户let localUser = ''; // 本地登录用户function log(msg) { console.log(`[client] ${msg}`);}socket.on('connect', function() { log('ws connect.');});socket.on('connect_error', function() { log('ws connect_error.');});socket.on('error', function(errorMessage) { log('ws error, ' + errorMessage);});socket.on(SERVER_USER_EVENT, function(msg) { const type = msg.type; const payload = msg.payload; switch(type) { case SERVER_USER_EVENT_UPDATE_USERS: updateUserList(payload); break; } log(`[${SERVER_USER_EVENT}] [${type}], ${jsON.stringify(msg)}`);});socket.on(SERVER_RTC_EVENT, function(msg) { const {type} = msg; switch(type) { case SIGNALING_OFFER: handleReceiveOffer(msg); break; case SIGNALING_ANSWER: handleReceiveAnswer(msg); break; case SIGNALING_CANDIDATE: handleReceiveCandidate(msg); break; }});async function handleReceiveOffer(msg) { log(`receive remote description from ${msg.payload.from}`); // 设置远端描述 const remoteDescription = new RTCSessionDescription(msg.payload.sdp); remoteUser = msg.payload.from; createPeerConnection(); await pc.setRemoteDescription(remoteDescription); // TODO 错误处理 // 本地音视频采集 const localVideo = document.getElementById('local-video'); const mediaStream = await navigator.mediaDevices.getUserMedia({ video: true, audio: true }); localVideo.srcObject = mediaStream; mediaStream.getTracks().forEach(track => { pc.addTrack(track, mediaStream); // pc.addTransceiver(track, {streams: [mediaStream]}); // 这个也可以 }); // pc.addStream(mediaStream); // 目前这个也可以,不过接口后续会废弃 const answer = await pc.createAnswer(); // TODO 错误处理 await pc.setLocalDescription(answer); sendRTCEvent({ type: SIGNALING_ANSWER, payload: { sdp: answer, from: localUser, target: remoteUser } });}async function handleReceiveAnswer(msg) { log(`receive remote answer from ${msg.payload.from}`); const remoteDescription = new RTCSessionDescription(msg.payload.sdp); remoteUser = msg.payload.from; await pc.setRemoteDescription(remoteDescription); // TODO 错误处理}async function handleReceiveCandidate(msg){ log(`receive candidate from ${msg.payload.from}`); await pc.addIceCandidate(msg.payload.candidate); // TODO 错误处理}/** * 发送用户相关消息给服务器 * @param {Object} msg 格式如 { type: 'xx', payload: {} } */function sendUserEvent(msg) { socket.emit(CLIENT_USER_EVENT, jsON.stringify(msg));}/** * 发送RTC相关消息给服务器 * @param {Object} msg 格式如{ type: 'xx', payload: {} } */function sendRTCEvent(msg) { socket.emit(CLIENT_RTC_EVENT, jsON.stringify(msg));}let pc = null;/** * 邀请用户加入视频聊天 * 1、本地启动视频采集 * 2、交换信令 */async function startVideoTalk() { // 开启本地视频 const localVideo = document.getElementById('local-video'); const mediaStream = await navigator.mediaDevices.getUserMedia({ video: true, audio: true }); localVideo.srcObject = mediaStream; // 创建 peerConnection createPeerConnection(); // 将媒体流添加到webrtc的音视频收发器 mediaStream.getTracks().forEach(track => { pc.addTrack(track, mediaStream); // pc.addTransceiver(track, {streams: [mediaStream]}); }); // pc.addStream(mediaStream); // 目前这个也可以,不过接口后续会废弃}function createPeerConnection() { const iceConfig = {"iceServers": [ {url: 'stun:stun.ekiga.net'}, {url: 'turn:turnserver.com', username: 'user', credential: 'pass'} ]}; pc = new RTCPeerConnection(iceConfig); pc.onnegotiationneeded = onnegotiationneeded; pc.onicecandidate = onicecandidate; pc.onicegatheringstatechange = onicegatheringstatechange; pc.oniceconnectionstatechange = oniceconnectionstatechange; pc.onsignalingstatechange = onsignalingstatechange; pc.ontrack = ontrack; return pc;}async function onnegotiationneeded() { log(`onnegotiationneeded.`); const offer = await pc.createOffer(); await pc.setLocalDescription(offer); // TODO 错误处理 sendRTCEvent({ type: SIGNALING_OFFER, payload: { from: localUser, target: remoteUser, sdp: pc.localDescription // TODO 直接用offer? } });}function onicecandidate(evt) { if (evt.candidate) { log(`onicecandidate.`); sendRTCEvent({ type: SIGNALING_CANDIDATE, payload: { from: localUser, target: remoteUser, candidate: evt.candidate } }); }}function onicegatheringstatechange(evt) { log(`onicegatheringstatechange, pc.iceGatheringState is ${pc.iceGatheringState}.`);}function oniceconnectionstatechange(evt) { log(`oniceconnectionstatechange, pc.iceConnectionState is ${pc.iceConnectionState}.`);}function onsignalingstatechange(evt) { log(`onsignalingstatechange, pc.signalingstate is ${pc.signalingstate}.`);}// 调用 pc.addTrack(track, mediaStream),remote peer的 onTrack 会触发两次// 实际上两次触发时,evt.streams[0] 指向同一个mediaStream引用// 这个行为有点奇怪,github issue 也有提到 https://github.com/meetecho/janus-gateway/issues/1313let stream;function ontrack(evt) { // if (!stream) { // stream = evt.streams[0]; // } else { // console.log(`${stream === evt.streams[0]}`); // 这里为true // } log(`ontrack.`); const remoteVideo = document.getElementById('remote-video'); remoteVideo.srcObject = evt.streams[0];}// 点击用户列表async function handleUserClick(evt) { const target = evt.target; const userName = target.getAttribute('data-name').trim(); if (userName === localUser) { alert('不能跟自己进行视频会话'); return; } log(`online user selected: ${userName}`); remoteUser = userName; await startVideoTalk(remoteUser);}/** * 更新用户列表 * @param {Array} users 用户列表,比如 [{name: '小明', name: '小强'}] */function updateUserList(users) { const fragment = document.createDocumentFragment(); const userList = document.getElementById('login-users'); userList.innerhtml = ''; users.forEach(user => { const li = document.createElement('li'); li.innerhtml = user.userName; li.setAttribute('data-name', user.userName); li.addEventListener('click', handleUserClick); fragment.appendChild(li); }); userList.appendChild(fragment);}/** * 用户登录 * @param {String} loginName 用户名 */function login(loginName) { localUser = loginName; sendUserEvent({ type: CLIENT_USER_EVENT_LOGIN, payload: { loginName: loginName } });}// 处理登录function handleLogin(evt) { let loginName = document.getElementById('login-name').value.trim(); if (loginName === '') { alert('用户名为空!'); return; } login(loginName);}function init() { document.getElementById('login-btn').addEventListener('click', handleLogin);}init();

2、服务端代码

// 添加ws服务const io = require('socket.io')(server);let connectionList = [];const CLIENT_RTC_EVENT = 'CLIENT_RTC_EVENT';const SERVER_RTC_EVENT = 'SERVER_RTC_EVENT';const CLIENT_USER_EVENT = 'CLIENT_USER_EVENT';const SERVER_USER_EVENT = 'SERVER_USER_EVENT';const CLIENT_USER_EVENT_LOGIN = 'CLIENT_USER_EVENT_LOGIN';const SERVER_USER_EVENT_UPDATE_USERS = 'SERVER_USER_EVENT_UPDATE_USERS';function getOnlineUser() { return connectionList .filter(item => { return item.userName !== ''; }) .map(item => { return { userName: item.userName }; });}function setUserName(connection, userName) { connectionList.forEach(item => { if (item.connection.id === connection.id) { item.userName = userName; } });}function updateUsers(connection) { connection.emit(SERVER_USER_EVENT, { type: SERVER_USER_EVENT_UPDATE_USERS, payload: getOnlineUser()}); }io.on('connection', function (connection) { connectionList.push({ connection: connection, userName: '' }); // 连接上的用户,推送在线用户列表 // connection.emit(SERVER_USER_EVENT, { type: SERVER_USER_EVENT_UPDATE_USERS, payload: getOnlineUser()}); updateUsers(connection); connection.on(CLIENT_USER_EVENT, function(jsonString) { const msg = jsON.parse(jsonString); const {type, payload} = msg; if (type === CLIENT_USER_EVENT_LOGIN) { setUserName(connection, payload.loginName); connectionList.forEach(item => { // item.connection.emit(SERVER_USER_EVENT, { type: SERVER_USER_EVENT_UPDATE_USERS, payload: getOnlineUser()}); updateUsers(item.connection); }); } }); connection.on(CLIENT_RTC_EVENT, function(jsonString) { const msg = jsON.parse(jsonString); const {payload} = msg; const target = payload.target; const targetConn = connectionList.find(item => { return item.userName === target; }); if (targetConn) { targetConn.connection.emit(SERVER_RTC_EVENT, msg); } }); connection.on('disconnect', function () { connectionList = connectionList.filter(item => { return item.connection.id !== connection.id; }); connectionList.forEach(item => { // item.connection.emit(SERVER_USER_EVENT, { type: SERVER_USER_EVENT_UPDATE_USERS, payload: getOnlineUser()}); updateUsers(item.connection); }); });});写在后面

WebRTC的API非常多,因为WebRTC本身就比较复杂,随着时间的推移,WebRTC的某些API(包括某些协议细节)也在改动或被废弃,这其中也有向后兼容带来的复杂性,比如本地视频采集后加入传输流,可以采用 addStream 或 addTrack 或 addTransceiver,再比如会话描述版本从plan-b迁移到unified-plan。

建议亲自动手撸一遍代码,加深了解。

相关链接

2019.08.02-video-talk-using-webrtc

https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection

onremotestream called twice for each remote stream

原文:https://segmentfault.com/a/1190000019970102

来源:爱蒂网