WebRTC基础与音视频通信
约 1539 字大约 5 分钟
webrtcvideo
2025-08-02
概述
WebRTC(Web Real-Time Communication)是一组浏览器原生支持的 API,使 Web 应用能够进行实时音视频通信和数据传输,无需安装插件。WebRTC 支持点对点(P2P)连接,广泛应用于视频会议、直播、屏幕共享、文件传输等场景。
WebRTC 连接建立流程
1. Signaling(信令)
WebRTC 本身不定义信令协议,开发者可以使用 WebSocket、HTTP、Socket.IO 等任何通信方式来交换连接信息。
// 信令服务器(使用 WebSocket)
const signalingSocket = new WebSocket('wss://signal.example.com');
signalingSocket.onmessage = async (event) => {
const message = JSON.parse(event.data);
switch (message.type) {
case 'offer':
await handleOffer(message);
break;
case 'answer':
await handleAnswer(message);
break;
case 'ice-candidate':
await handleIceCandidate(message);
break;
}
};
function sendSignaling(message) {
signalingSocket.send(JSON.stringify(message));
}2. ICE / STUN / TURN
- STUN(Session Traversal Utilities for NAT):帮助发现公网 IP 和端口,实现 NAT 穿透
- TURN(Traversal Using Relays around NAT):当直接连接失败时,通过中继服务器转发数据
- ICE(Interactive Connectivity Establishment):框架协议,自动尝试所有可能的连接路径
// ICE 服务器配置
const configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' }, // 免费 STUN
{ urls: 'stun:stun1.l.google.com:19302' },
{
urls: 'turn:turn.example.com:3478', // TURN 需要认证
username: 'user',
credential: 'pass'
},
{
urls: 'turns:turn.example.com:5349', // TURN over TLS
username: 'user',
credential: 'pass'
}
],
iceCandidatePoolSize: 10, // 预先收集 ICE 候选
};3. SDP(Session Description Protocol)
SDP 描述了媒体会话的参数,包括编解码器、带宽、传输协议等。
// SDP 示例(简化)
v=0
o=- 123456 2 IN IP4 127.0.0.1
s=-
t=0 0
a=group:BUNDLE audio video
m=audio 9 UDP/TLS/RTP/SAVPF 111 103
a=rtpmap:111 opus/48000/2
a=mid:audio
m=video 9 UDP/TLS/RTP/SAVPF 96 97
a=rtpmap:96 VP8/90000
a=rtpmap:97 H264/90000
a=mid:video4. RTCPeerConnection
RTCPeerConnection 是 WebRTC 的核心 API,管理 P2P 连接。
// 完整的连接建立示例
class WebRTCClient {
constructor(signalingSocket) {
this.signaling = signalingSocket;
this.pc = null;
this.localStream = null;
this.remoteStream = new MediaStream();
}
async init() {
this.pc = new RTCPeerConnection(configuration);
// 监听 ICE 候选
this.pc.onicecandidate = (event) => {
if (event.candidate) {
this.signaling.send(JSON.stringify({
type: 'ice-candidate',
candidate: event.candidate
}));
}
};
// 监听远程媒体轨道
this.pc.ontrack = (event) => {
event.streams[0].getTracks().forEach(track => {
this.remoteStream.addTrack(track);
});
document.getElementById('remoteVideo').srcObject = this.remoteStream;
};
// 监听连接状态
this.pc.onconnectionstatechange = () => {
console.log('Connection state:', this.pc.connectionState);
// new → connecting → connected → disconnected → failed → closed
};
this.pc.oniceconnectionstatechange = () => {
console.log('ICE state:', this.pc.iceConnectionState);
};
}
// 获取本地媒体流
async getLocalMedia() {
this.localStream = await navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 1280 },
height: { ideal: 720 },
frameRate: { ideal: 30 }
},
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true
}
});
document.getElementById('localVideo').srcObject = this.localStream;
// 将轨道添加到连接
this.localStream.getTracks().forEach(track => {
this.pc.addTrack(track, this.localStream);
});
}
// 发起呼叫
async call() {
const offer = await this.pc.createOffer({
offerToReceiveAudio: true,
offerToReceiveVideo: true
});
await this.pc.setLocalDescription(offer);
this.signaling.send(JSON.stringify({ type: 'offer', sdp: offer }));
}
// 接受呼叫
async handleOffer(offer) {
await this.pc.setRemoteDescription(new RTCSessionDescription(offer));
const answer = await this.pc.createAnswer();
await this.pc.setLocalDescription(answer);
this.signaling.send(JSON.stringify({ type: 'answer', sdp: answer }));
}
async handleAnswer(answer) {
await this.pc.setRemoteDescription(new RTCSessionDescription(answer));
}
async handleIceCandidate(candidate) {
await this.pc.addIceCandidate(new RTCIceCandidate(candidate));
}
}5. MediaStream API
// 获取摄像头和麦克风
const stream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
// 枚举可用设备
const devices = await navigator.mediaDevices.enumerateDevices();
const cameras = devices.filter(d => d.kind === 'videoinput');
const mics = devices.filter(d => d.kind === 'audioinput');
// 切换摄像头
async function switchCamera(deviceId) {
const newStream = await navigator.mediaDevices.getUserMedia({
video: { deviceId: { exact: deviceId } }
});
const videoTrack = newStream.getVideoTracks()[0];
const sender = pc.getSenders().find(s => s.track.kind === 'video');
await sender.replaceTrack(videoTrack);
}
// 静音/取消静音
function toggleMute() {
const audioTrack = localStream.getAudioTracks()[0];
audioTrack.enabled = !audioTrack.enabled;
}
// 开关摄像头
function toggleCamera() {
const videoTrack = localStream.getVideoTracks()[0];
videoTrack.enabled = !videoTrack.enabled;
}6. Data Channels
RTCDataChannel 提供低延迟的点对点数据传输。
// 发起方创建数据通道
const dataChannel = pc.createDataChannel('chat', {
ordered: true, // 是否保证顺序
maxRetransmits: 3, // 最大重传次数(与 maxPacketLifeTime 互斥)
});
dataChannel.onopen = () => {
console.log('Data channel opened');
dataChannel.send('Hello peer!');
};
dataChannel.onmessage = (event) => {
console.log('Received:', event.data);
};
dataChannel.onclose = () => console.log('Data channel closed');
// 接收方监听数据通道
pc.ondatachannel = (event) => {
const channel = event.channel;
channel.onmessage = (e) => console.log('Received:', e.data);
};
// 发送二进制数据
const buffer = new ArrayBuffer(1024);
dataChannel.send(buffer);
// 发送文件
async function sendFile(file) {
const reader = file.stream().getReader();
while (true) {
const { done, value } = await reader.read();
if (done) break;
// 检查缓冲区
while (dataChannel.bufferedAmount > 65535) {
await new Promise(r => setTimeout(r, 10));
}
dataChannel.send(value);
}
dataChannel.send('EOF');
}7. 屏幕共享
async function shareScreen() {
const screenStream = await navigator.mediaDevices.getDisplayMedia({
video: {
cursor: 'always',
displaySurface: 'monitor' // monitor / window / browser
},
audio: true // 系统音频(部分浏览器支持)
});
// 替换视频轨道
const screenTrack = screenStream.getVideoTracks()[0];
const sender = pc.getSenders().find(s => s.track.kind === 'video');
await sender.replaceTrack(screenTrack);
// 监听用户停止共享
screenTrack.onended = async () => {
const cameraTrack = localStream.getVideoTracks()[0];
await sender.replaceTrack(cameraTrack);
};
}8. SFU / MCU 架构
| 架构 | 优点 | 缺点 | 适用场景 |
|---|---|---|---|
| P2P Mesh | 无需服务器 | 带宽随参与者指数增长 | 2-4 人通话 |
| SFU | 服务器不编解码,延迟低 | 客户端需处理多路流 | 视频会议(5-50人) |
| MCU | 客户端带宽需求低 | 服务器 CPU 密集 | 大型会议、录制 |
编解码器选择
// 优先使用特定编解码器
const transceiver = pc.addTransceiver('video', {
direction: 'sendrecv'
});
const codecs = RTCRtpSender.getCapabilities('video').codecs;
// 优先 H.264(硬件编解码支持好)
const h264Codecs = codecs.filter(c =>
c.mimeType === 'video/H264'
);
const otherCodecs = codecs.filter(c =>
c.mimeType !== 'video/H264'
);
transceiver.setCodecPreferences([...h264Codecs, ...otherCodecs]);
// 常见编解码器:
// 视频: VP8, VP9, H.264, AV1
// 音频: Opus(推荐), G.711, AAC总结
WebRTC 通过信令交换(SDP Offer/Answer)、ICE/STUN/TURN 网络穿透和 RTCPeerConnection 管理,实现了浏览器原生的实时音视频通信。MediaStream API 提供摄像头/麦克风/屏幕访问,Data Channel 支持低延迟数据传输。在多人场景中,SFU 架构是目前最常用的方案,它在服务器负载和客户端体验之间取得了良好平衡。理解 WebRTC 的完整连接流程和各组件的职责,是构建实时通信应用的基础。
贡献者
更新日志
2026/3/14 13:09
查看所有更新日志
9f6c2-feat: organize wiki content and refresh site setup于