准备:
- 信令服务
- 前端页面用于视频通话
demo github 地址。
前端页面
为了使 demo 尽量简单,功能页面如下,即包含登录、通过对方手机号拨打电话的功能。在实际生成过程中,未必使用的手机号,可能是任何能代表用户身份的字符串。
代码如下:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
</head>
<body>
<div style="margin: 20px">
<label for="loginAccount">登录账号</label><input
type="text">
<button >登录</button>
</div>
<div style="margin: 20px">
<video autoplay controls height="360px" ></video>
<video autoplay controls height="360px" ></video>
</div>
<div style="margin: 20px">
<label for="toAccount">对方账号</label>
<input >
<button >请求视频通话</button>
</div>
<div style="margin: 20px">
<fieldset>
<button >接通</button>
<button >挂断</button>
</fieldset>
</div>
<div style="margin: 20px">
<fieldset>
<div>
录制格式: <select disabled ></select>
</div>
<button >开始录制视频</button>
<button >停止录制视频</button>
<button >下载</button>
</fieldset>
</div>
</body>
<script>
let config = {
iceServers: [
{
'urls': 'turn:turn.wildfirechat.cn:3478',
'credential': 'wfchat',
'username': 'wfchat'
}
]
}
const localVideo = document.getElementById('localVideo');
const remoteVideo = document.getElementById('remoteVideo');
const requestVideoButton = document.getElementById('requestVideo');
const acceptButton = document.getElementById('accept');
const hangupButton = document.getElementById('hangup');
const codecPreferences = document.querySelector('#codecPreferences');
const recordButton = document.getElementById('startRecord')
const stopRecordButton = document.getElementById('stopRecord')
const downloadButton = document.getElementById('downloadRecord')
const wsAddress = 'ws://localhost:9113/ws';
let loginAttemptCount = 0;
let myId, toId;
let pc, localStream, ws;
let mediaRecorder;
let recordedBlobs;
function login() {
loginAttemptCount = 0;
myId = document.getElementById('loginAccount').value;
ws = new WebSocket(wsAddress);
ws.onopen = function () {
console.log("WebSocket is open now.");
connect();
alert("登录成功");
};
ws.onmessage = function (message) {
let msg = JSON.parse(message.data);
console.log("ws 收到消息:" + msg.type);
switch (msg.type) {
case "offline": {
if (loginAttemptCount < 10) {
setTimeout(() => {
loginAttemptCount++;
watch();
}, 1000);
}
break;
}
case "watch": {
handleWatch(msg);
break;
}
case "offer": {
handleOffer(msg);
break;
}
case "answer": {
handleAnswer(msg);
break;
}
case "candidate": {
handleCandidate(msg);
break;
}
case "hangup": {
handleHangup(msg);
break;
}
}
};
}
requestVideoButton.onclick = async () => {
toId = document.getElementById('toAccount').value;
if (!myId) {
alert('请先登录');
return;
}
if (!toId) {
alert('请输入对方手机号');
return;
}
watch();
localStream = await navigator.mediaDevices.getUserMedia({audio: true, video: true});
localVideo.srcObject = localStream;
createPeerConnection();
}
function connect() {
send({
type: "connect",
from: myId
});
}
function handleWatch(msg) {
toId = msg.from;
}
acceptButton.onclick = async () => {
localStream = await navigator.mediaDevices.getUserMedia({audio: true, video: true});
localVideo.srcObject = localStream;
createPeerConnection();
pc.createOffer().then(offer => {
pc.setLocalDescription(offer);
send({
type: 'offer',
from: myId,
to: toId,
data: offer
});
});
}
function handleOffer(msg) {
pc.setRemoteDescription(msg.data);
pc.createAnswer().then(answer => {
pc.setLocalDescription(answer);
send({
type: "answer",
from: myId,
to: toId,
data: answer
});
});
}
function watch() {
send({
type: 'watch',
from: myId,
to: toId
});
}
function handleAnswer(msg) {
if (!pc) {
console.error('no peer connection');
return;
}
pc.setRemoteDescription(msg.data);
}
function handleCandidate(msg) {
if (!pc) {
console.error('no peer connection');
return;
}
pc.addIceCandidate(new RTCIceCandidate(msg.data)).then(() => {
console.log('candidate添加成功')
}).catch(handleError)
}
function handleError(error) {
console.log(error);
}
function createPeerConnection() {
pc = new RTCPeerConnection(config);
pc.onicecandidate = e => {
if (e.candidate) {
send({
type: "candidate",
from: myId,
to: toId,
data: e.candidate
});
}
};
pc.ontrack = e => remoteVideo.srcObject = e.streams[0];
localStream.getTracks().forEach(track => pc.addTrack(track, localStream));
}
hangupButton.onclick = async () => {
if (pc) {
pc.close();
pc = null;
}
if (localStream) {
localStream.getTracks().forEach(track => track.stop());
localStream = null;
}
send({
type: "hangup",
from: myId,
to: toId
});
}
function handleHangup() {
if (!pc) {
console.error('no peer connection');
return;
}
pc.close();
pc = null;
if (localStream) {
localStream.getTracks().forEach(track => track.stop());
localStream = null;
}
console.log('hangup');
}
function send(msg) {
ws.send(JSON.stringify(msg));
}
function getSupportedMimeTypes() {
const possibleTypes = [
'video/webm;codecs=vp9,opus',
'video/webm;codecs=vp8,opus',
'video/webm;codecs=h264,opus',
'video/mp4;codecs=h264,aac',
];
return possibleTypes.filter(mimeType => {
return MediaRecorder.isTypeSupported(mimeType);
});
}
function startRecording() {
recordedBlobs = [];
getSupportedMimeTypes().forEach(mimeType => {
const option = document.createElement('option');
option.value = mimeType;
option.innerText = option.value;
codecPreferences.appendChild(option);
});
const mimeType = codecPreferences.options[codecPreferences.selectedIndex].value;
const options = {mimeType};
try {
mediaRecorder = new MediaRecorder(remoteVideo.srcObject, options);
} catch (e) {
console.error('Exception while creating MediaRecorder:', e);
alert('Exception while creating MediaRecorder: ' + e);
return;
}
console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
recordButton.textContent = 'Stop Recording';
mediaRecorder.onstop = (event) => {
console.log('Recorder stopped: ', event);
console.log('Recorded Blobs: ', recordedBlobs);
};
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start();
console.log('MediaRecorder started', mediaRecorder);
}
function handleDataAvailable(event) {
console.log('handleDataAvailable', event);
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
}
function stopRecording() {
mediaRecorder.stop();
}
function download() {
const blob = new Blob(recordedBlobs, {type: 'video/webm'});
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'test.webm';
document.body.appendChild(a);
a.click();
setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
}
</script>
</html>
信令服务
基于 JDK 1.8 Spring Boot、Netty 搭建,主要用于解决两个问题:
- 确认参与人,即拨打视频电话的人和接通视频电话的人
- 提供功能按钮 API,比如:发起视频通话、挂电话、以及 webRTC 建立通信通道
主要功能如下:
switch (event.getType()) {
case "connect": {
USER_MAP.put(event.getFrom(), ctx);
break;
}
case "watch": {
WebRtcEvent watchRequest = new WebRtcEvent();
if (USER_MAP.containsKey(event.getTo())) {
watchRequest.setType("watch");
watchRequest.setFrom(event.getFrom());
watchRequest.setTo(event.getTo());
USER_MAP.get(event.getTo()).writeAndFlush(new TextWebSocketFrame(JSONObject.toJSONString(watchRequest)));
} else {
watchRequest.setType("offline");
USER_MAP.get(event.getFrom()).writeAndFlush(new TextWebSocketFrame(JSONObject.toJSONString(watchRequest)));
}
break;
}
case "offer": {
WebRtcEvent offerRequest = new WebRtcEvent();
offerRequest.setType("offer");
offerRequest.setFrom(event.getFrom());
offerRequest.setTo(event.getTo());
offerRequest.setData(event.getData());
USER_MAP.get(event.getTo()).writeAndFlush(new TextWebSocketFrame(JSONObject.toJSONString(offerRequest)));
break;
}
case "answer": {
WebRtcEvent answerRequest = new WebRtcEvent();
answerRequest.setType("answer");
answerRequest.setFrom(event.getFrom());
answerRequest.setData(event.getData());
USER_MAP.get(event.getTo()).writeAndFlush(new TextWebSocketFrame(JSONObject.toJSONString(answerRequest)));
break;
}
case "candidate": {
WebRtcEvent candidateRequest = new WebRtcEvent();
candidateRequest.setType("candidate");
candidateRequest.setFrom(event.getFrom());
candidateRequest.setData(event.getData());
USER_MAP.get(event.getTo()).writeAndFlush(new TextWebSocketFrame(JSONObject.toJSONString(candidateRequest)));
break;
}
case "hangup": {
WebRtcEvent hangupRequest = new WebRtcEvent();
hangupRequest.setType("hangup");
hangupRequest.setFrom(event.getFrom());
hangupRequest.setTo(event.getTo());
USER_MAP.get(event.getTo()).writeAndFlush(new TextWebSocketFrame(JSONObject.toJSONString(hangupRequest)));
break;
}
}
connect -> 登录
与 html 页面中的“登录”按钮对应,当输入手机号后,点击登录,手机号将会在信令服务中存到 map 中,以待后续操作使用。
如下图所示,至少两个客户端登录以后,才能正常视频通话。
watch -> 请求视频通话
点击 watch 按钮后,前端将发送一个事件到信令服务中,结构如下:
{
type: 'watch', //事件类型
from: 13789122381, // 我的账号,比如 13789122381
to: 1323493929 // 对方的账号,比如 1323493929
}
此时输入的对方账号对应 “to” 字段。
信令服务器收到 watch 事件后,从 map 中找出对应的在线客户端,将该事件转发至相应的客户端中。
offer -> 接通
对于接收者来说,点击“接通”按钮以后,webRTC 将开始建立通信隧道。
接通的 json 结构如下:
{
type: 'offer',
from: myId,
to: toId,
data: offer
}
整个拨打电话、接通的流程如下:
总结
在 html 中还需要配置 coturn TURN 服务 地址,我在 demo 中使用的地址是测试地址,所以请不要在生产中使用。