一、库集成
首先,确保在你的 Podfile 中添加依赖:
pod 'GoogleWebRTC'
然后执行 pod install 安装库。
二、代码示例
2.1、权限配置:在 Info.plist 中添加摄像头、麦克风权限
<!-- 需要在 Info.plist 中添加以下权限 -->
<key>NSCameraUsageDescription</key>
<string>需要访问摄像头进行视频通话</string>
<key>NSMicrophoneUsageDescription</key>
<string>需要访问麦克风进行语音通话</string>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
2.2、代码
核心类oc:WebRTCManager.m
objectivec
#import "WebRTCManager.h"
#import <AVFoundation/AVFoundation.h>
@interface WebRTCManager ()
@property (nonatomic, strong) RTCPeerConnectionFactory *factory;
@property (nonatomic, strong) RTCVideoTrack *localVideoTrack;
@property (nonatomic, strong) RTCAudioTrack *localAudioTrack;
@property (nonatomic, strong) RTCVideoRendererAdapter *localRenderer;
@end
@implementation WebRTCManager
- (instancetype)initWithDelegate:(id<WebRTCManagerDelegate>)delegate {
self = [super init];
if (self) {
_delegate = delegate;
[self setupPeerConnectionFactory];
[self setupPeerConnection];
}
return self;
}
// 初始化 PeerConnection 工厂
- (void)setupPeerConnectionFactory {
RTCInitializeSSL();
_factory = [[RTCPeerConnectionFactory alloc] init];
}
// 配置并创建 PeerConnection
- (void)setupPeerConnection {
RTCConfiguration *config = [[RTCConfiguration alloc] init];
config.iceServers = @[
[[RTCIceServer alloc] initWithURLStrings:@[@"stun:stun.l.google.com:19302"]]
];
RTCOfferAnswerOptions *offerAnswerOptions = [[RTCOfferAnswerOptions alloc] init];
offerAnswerOptions.offerToReceiveAudio = YES;
offerAnswerOptions.offerToReceiveVideo = YES;
RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@[] optionalConstraints:@[]];
_peerConnection = [_factory peerConnectionWithConfiguration:config constraints:constraints delegate:self];
}
// 初始化本地媒体流
- (void)setupLocalStreamWithVideoView:(UIView *)videoView {
// 请求音视频权限
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
dispatch_async(dispatch_get_main_queue(), ^{
if (granted) {
[self createLocalMediaStream];
[self setupLocalVideoRender:videoView];
} else {
NSLog(@"需要摄像头权限才能使用视频功能");
}
});
}];
}
// 创建本地媒体流
- (void)createLocalMediaStream {
_localStream = [_factory mediaStreamWithStreamId:@"localStream"];
// 创建音频轨道
RTCAudioSource *audioSource = [_factory audioSourceWithConstraints:[self defaultMediaConstraints]];
_localAudioTrack = [_factory audioTrackWithSource:audioSource trackId:@"audio0"];
[_localStream addAudioTrack:_localAudioTrack];
// 创建视频轨道
RTCVideoSource *videoSource = [_factory videoSourceWithConstraints:[self videoConstraints]];
_localVideoTrack = [_factory videoTrackWithSource:videoSource trackId:@"video0"];
[_localStream addVideoTrack:_localVideoTrack];
// 将本地流添加到 PeerConnection
[_peerConnection addStream:_localStream];
}
// 设置本地视频渲染
- (void)setupLocalVideoRender:(UIView *)videoView {
RTCMTLVideoView *rendererView = [[RTCMTLVideoView alloc] init];
rendererView.frame = videoView.bounds;
rendererView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
[videoView addSubview:rendererView];
_localRenderer = [[RTCVideoRendererAdapter alloc] initWithRenderer:rendererView];
[_localVideoTrack addRenderer:_localRenderer];
}
// 创建 Offer
- (void)createOffer {
RTCOfferAnswerConstraints *constraints = [[RTCOfferAnswerConstraints alloc] init];
constraints.mandatoryConstraints = @[
[[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"],
[[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" value:@"true"]
];
[_peerConnection createOfferWithConstraints:constraints completionHandler:^(RTCSessionDescription * _Nullable sdp, NSError * _Nullable error) {
if (error) {
NSLog(@"创建 Offer 失败: %@", error.localizedDescription);
return;
}
[self.peerConnection setLocalDescriptionWithSessionDescription:sdp completionHandler:^(NSError * _Nullable error) {
if (error) {
NSLog(@"设置本地描述失败: %@", error.localizedDescription);
return;
}
// 这里应该将 sdp 发送给信令服务器
NSString *offerString = [self sessionDescriptionToString:sdp];
NSLog(@"生成 Offer: %@", offerString);
// [self.signalingClient sendOffer:offerString];
}];
}];
}
// 处理远程 Offer
- (void)handleRemoteOffer:(NSString *)offer {
RTCSessionDescription *remoteSDP = [self stringToSessionDescription:offer type:RTCSdpTypeOffer];
[_peerConnection setRemoteDescriptionWithSessionDescription:remoteSDP completionHandler:^(NSError * _Nullable error) {
if (error) {
NSLog(@"设置远程 Offer 失败: %@", error.localizedDescription);
return;
}
// 创建 Answer
[self createAnswer];
}];
}
// 创建 Answer
- (void)createAnswer {
RTCOfferAnswerConstraints *constraints = [[RTCOfferAnswerConstraints alloc] init];
[_peerConnection createAnswerWithConstraints:constraints completionHandler:^(RTCSessionDescription * _Nullable sdp, NSError * _Nullable error) {
if (error) {
NSLog(@"创建 Answer 失败: %@", error.localizedDescription);
return;
}
[self.peerConnection setLocalDescriptionWithSessionDescription:sdp completionHandler:^(NSError * _Nullable error) {
if (error) {
NSLog(@"设置本地 Answer 失败: %@", error.localizedDescription);
return;
}
// 将 Answer 发送给信令服务器
NSString *answerString = [self sessionDescriptionToString:sdp];
NSLog(@"生成 Answer: %@", answerString);
// [self.signalingClient sendAnswer:answerString];
}];
}];
}
// 处理远程 Answer
- (void)handleRemoteAnswer:(NSString *)answer {
RTCSessionDescription *remoteSDP = [self stringToSessionDescription:answer type:RTCSdpTypeAnswer];
[_peerConnection setRemoteDescriptionWithSessionDescription:remoteSDP completionHandler:^(NSError * _Nullable error) {
if (error) {
NSLog(@"设置远程 Answer 失败: %@", error.localizedDescription);
}
}];
}
// 处理远程 ICE 候选者
- (void)handleRemoteICECandidate:(NSString *)candidate {
NSDictionary *candidateDict = [NSJSONSerialization JSONObjectWithData:[candidate dataUsingEncoding:NSUTF8StringEncoding] options:0 error:nil];
if (!candidateDict) return;
RTCIceCandidate *iceCandidate = [[RTCIceCandidate alloc]
initWithSdpMLineIndex:[candidateDict[@"sdpMLineIndex"] integerValue]
sdpMid:candidateDict[@"sdpMid"]
sdp:candidateDict[@"sdp"]];
[_peerConnection addIceCandidate:iceCandidate completionHandler:^(NSError * _Nullable error) {
if (error) {
NSLog(@"添加远程 ICE 候选者失败: %@", error.localizedDescription);
}
}];
}
// 断开连接
- (void)disconnect {
[_peerConnection close];
_peerConnection = nil;
[_localStream removeAllAudioTracks];
[_localStream removeAllVideoTracks];
_localStream = nil;
}
#pragma mark - RTCPeerConnectionDelegate
// 收到远程媒体流
- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream {
NSLog(@"收到远程媒体流");
if ([self.delegate respondsToSelector:@selector(didReceiveRemoteStream:)] && stream) {
[self.delegate didReceiveRemoteStream:stream];
}
}
// 生成 ICE 候选者
- (void)peerConnection:(RTCPeerConnection *)peerConnection didGenerateIceCandidate:(RTCIceCandidate *)candidate {
NSDictionary *candidateDict = @{
@"sdpMLineIndex": @(candidate.sdpMLineIndex),
@"sdpMid": candidate.sdpMid ?: @"",
@"sdp": candidate.sdp
};
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:candidateDict options:0 error:nil];
NSString *candidateString = [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding];
// 将 ICE 候选者发送给信令服务器
NSLog(@"生成 ICE 候选者: %@", candidateString);
// [self.signalingClient sendICECandidate:candidateString];
}
#pragma mark - 工具方法
// 媒体约束配置
- (RTCMediaConstraints *)defaultMediaConstraints {
return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@[] optionalConstraints:@[]];
}
// 视频约束配置
- (RTCMediaConstraints *)videoConstraints {
return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@[
[[RTCPair alloc] initWithKey:@"maxWidth" value:@"1280"],
[[RTCPair alloc] initWithKey:@"maxHeight" value:@"720"],
[[RTCPair alloc] initWithKey:@"maxFrameRate" value:@"30"]
] optionalConstraints:@[]];
}
// 将 SessionDescription 转换为字符串
- (NSString *)sessionDescriptionToString:(RTCSessionDescription *)sdp {
NSDictionary *dict = @{
@"type": [self sdpTypeToString:sdp.type],
@"sdp": sdp.sdp
};
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:dict options:0 error:nil];
return [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding];
}
// 将字符串转换为 SessionDescription
- (RTCSessionDescription *)stringToSessionDescription:(NSString *)string type:(RTCSdpType)type {
NSData *data = [string dataUsingEncoding:NSUTF8StringEncoding];
NSDictionary *dict = [NSJSONSerialization JSONObjectWithData:data options:0 error:nil];
return [[RTCSessionDescription alloc] initWithType:type sdp:dict[@"sdp"]];
}
// SDP 类型转换
- (NSString *)sdpTypeToString:(RTCSdpType)type {
switch (type) {
case RTCSdpTypeOffer: return @"offer";
case RTCSdpTypeAnswer: return @"answer";
case RTCSdpTypePrAnswer: return @"pranswer";
case RTCSdpTypeRollback: return @"rollback";
default: return @"";
}
}
@end
调用OC:ViewController.m
objectivec
#import "ViewController.h"
#import <WebRTC/WebRTC.h>
@interface ViewController ()
@property (nonatomic, strong) WebRTCManager *rtcManager;
@property (nonatomic, strong) UIView *localVideoView;
@property (nonatomic, strong) UIView *remoteVideoView;
@property (nonatomic, strong) UIButton *connectButton;
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.view.backgroundColor = UIColor.whiteColor;
[self setupUI];
[self setupWebRTC];
}
- (void)setupUI {
// 本地视频视图
_localVideoView = [[UIView alloc] init];
_localVideoView.backgroundColor = UIColor.lightGrayColor;
_localVideoView.translatesAutoresizingMaskIntoConstraints = NO;
[self.view addSubview:_localVideoView];
// 远程视频视图
_remoteVideoView = [[UIView alloc] init];
_remoteVideoView.backgroundColor = UIColor.darkGrayColor;
_remoteVideoView.translatesAutoresizingMaskIntoConstraints = NO;
[self.view addSubview:_remoteVideoView];
// 连接按钮
_connectButton = [UIButton buttonWithType:UIButtonTypeSystem];
[_connectButton setTitle:@"建立连接" forState:UIControlStateNormal];
[_connectButton addTarget:self action:@selector(connectButtonTapped) forControlEvents:UIControlEventTouchUpInside];
_connectButton.translatesAutoresizingMaskIntoConstraints = NO;
[self.view addSubview:_connectButton];
// 布局
[NSLayoutConstraint activateConstraints:@[
// 本地视频(右上角小窗口)
[_localVideoView topAnchor constraintEqualToAnchor:self.view.safeAreaLayoutGuide.topAnchor constant:20],
[_localVideoView.trailingAnchor constraintEqualToAnchor:self.view.trailingAnchor constant:-20],
[_localVideoView.widthAnchor constraintEqualToConstant:120],
[_localVideoView.heightAnchor constraintEqualToConstant:180],
// 远程视频(全屏)
[_remoteVideoView.topAnchor constraintEqualToAnchor:self.view.safeAreaLayoutGuide.topAnchor],
[_remoteVideoView.leadingAnchor constraintEqualToAnchor:self.view.leadingAnchor],
[_remoteVideoView.trailingAnchor constraintEqualToAnchor:self.view.trailingAnchor],
[_remoteVideoView.bottomAnchor constraintEqualToAnchor:self.connectButton.topAnchor constant:-20],
// 连接按钮
[_connectButton.bottomAnchor constraintEqualToAnchor:self.view.safeAreaLayoutGuide.bottomAnchor constant:-20],
[_connectButton.centerXAnchor constraintEqualToAnchor:self.view.centerXAnchor],
[_connectButton.widthAnchor constraintEqualToConstant:120],
[_connectButton.heightAnchor constraintEqualToConstant:44]
]];
}
- (void)setupWebRTC {
_rtcManager = [[WebRTCManager alloc] initWithDelegate:self];
[_rtcManager setupLocalStreamWithVideoView:self.localVideoView];
}
- (void)connectButtonTapped {
[_rtcManager createOffer];
}
#pragma mark - WebRTCManagerDelegate
// 处理收到的远程流
- (void)didReceiveRemoteStream:(RTCMediaStream *)stream {
dispatch_async(dispatch_get_main_queue(), ^{
// 渲染远程视频
if (stream.videoTracks.count > 0) {
RTCVideoTrack *remoteVideoTrack = stream.videoTracks[0];
RTCMTLVideoView *rendererView = [[RTCMTLVideoView alloc] init];
rendererView.frame = self.remoteVideoView.bounds;
rendererView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
[self.remoteVideoView addSubview:rendererView];
[remoteVideoTrack addRenderer:[[RTCVideoRendererAdapter alloc] initWithRenderer:rendererView]];
}
});
}
- (void)dealloc {
[_rtcManager disconnect];
}
@end
代码说明
这个示例实现了 iOS 平台上基于 WebRTC 的基本音视频推拉流功能,主要包含以下部分:
WebRTCManager:核心管理类,负责:
- 初始化 WebRTC 相关组件
- 处理本地音视频流的捕获和预览
- 管理 PeerConnection 连接
- 处理 SDP 交换和 ICE 候选者
ViewController:界面控制器,负责:
- 创建本地和远程视频的预览视图
- 处理用户交互(如建立连接)
- 渲染远程视频流
三、使用说明
这个示例缺少信令服务器的实现,你需要自己搭建一个信令服务器
在实际使用中,需要将代码中注释掉的信令发送部分替换为实际的网络请求
代码中的 STUN 服务器使用了 Google 的公共服务器,生产环境中建议使用自己的 STUN/TURN 服务器
四、扩展建议
- 添加错误处理和重连机制
- 实现多人通话功能
- 添加视频质量控制
- 实现屏幕共享功能
- 添加音频 /video 开关控制
这个示例仅仅提供了基础的推拉流框架,你可以根据实际需求进行扩展和优化。