1.Intel WebRTC SDK的下载
官网下载地址
官方文档
下载后我们会得到WebRTC的套件压缩包,找到iOS端的Demo解压.
我是下载的4.3.1版本,编译发现缺少静态库,后来发现4.3版本有这三个静态库,具体可以参考这篇文章
2.WebRTC的使用
解决完编译报错后我们就可以了解WebRTC的使用了
1.1 初始化
OWTConferenceClientConfiguration* config=[[OWTConferenceClientConfiguration alloc]init];
NSArray *ice=[[NSArray alloc]initWithObjects:[[RTCIceServer alloc]initWithURLStrings:[[NSArray alloc]initWithObjects:@"stun:61.152.239.47:3478", nil]], nil];
config.rtcConfiguration=[[RTCConfiguration alloc] init];
config.rtcConfiguration.iceServers=ice;
_conferenceClient=[[OWTConferenceClient alloc]initWithConfiguration:config];
_conferenceClient.delegate=self;
1.2 加入房间,拿到后端返回的Token后加入房间会返回房间内所有流的信息
[_conferenceClient joinWithToken:weakSelf.userInfo.webRtcToken onSuccess:^(OWTConferenceInfo* info) {
dispatch_async(dispatch_get_main_queue(), ^{
if([info.remoteStreams count]>0){
NSMutableArray *arr = [NSMutableArray arrayWithArray:info.remoteStreams];
//1.RTC获取流
//移除混合流
NSMutableArray *finalArr = [NSMutableArray arrayWithArray:arr];
for (OWTRemoteStream* s in arr) {
if([s isKindOfClass:[OWTRemoteMixedStream class]]){
[finalArr removeObject:s];
}
}
//拿处理好的流数组,跳转
__weak typeof(self)weakSelf = self;
if (finalArr.count) {
ConferenceStreamViewController *vc = [[ConferenceStreamViewController alloc] init];
vc.streamsArr = [NSMutableArray arrayWithArray:finalArr];
[self.navigationController pushViewController:vc animated:YES];
break;
}
} else {
}
} else {
}
});
} onFailure:^(NSError* err) {
}];
1.3 加入房间后的一些代理
-(void)conferenceClient:(OWTConferenceClient *)client didAddStream:(OWTRemoteStream *)stream{
NSLog(@"流增加事件AppDelegate on stream added");
}
-(void)conferenceClientDidDisconnect:(OWTConferenceClient *)client{
NSLog(@"服务器已断开连接Server disconnected");
}
-(void)conferenceClient:(OWTConferenceClient *)client didReceiveMessage:(NSString *)message from:(NSString *)senderId{
NSLog(@"AppDelegate 收到一条信息received message: %@, from %@", message, senderId);
}
- (void)conferenceClient:(OWTConferenceClient *)client didAddParticipant:(OWTConferenceParticipant *)user{
user.delegate=self;
NSLog(@"一个新用户加入==A new participant joined the meeting.");
}
-(void)streamDidEnd:(OWTRemoteStream *)stream{
NSLog(@"流失效移除流===Stream did end");
[[NSNotificationCenter defaultCenter] postNotificationName:@"OnStreamRemoved" object:self userInfo:[NSDictionary dictionaryWithObject:stream forKey:@"stream"]];
}
-(void)participantDidLeave:(OWTConferenceParticipant *)participant{
NSLog(@"有人离开===Participant left conference.");
}
1.4 推流,将自己的流推到RTC服务
-(void)doPublish{
#if TARGET_IPHONE_SIMULATOR
NSLog(@"Camera is not supported on simulator模拟器不支持推流");
OWTStreamConstraints* constraints=[[OWTStreamConstraints alloc]init];
constraints.audio=YES;
constraints.video=nil;
#else
/* Create LocalStream with constraints */
OWTStreamConstraints* constraints=[[OWTStreamConstraints alloc] init];
constraints.audio=YES;
constraints.video=[[OWTVideoTrackConstraints alloc] init];
constraints.video.frameRate=_roomSettingModel.frameRate;
//352*288 640*480 960x540 1280*720 1920*1080 iPhone推流支持这些分辨率
if (_roomSettingModel.width == 0) {
constraints.video.resolution=CGSizeMake(352,288);
} else {
NSInteger VW = _roomSettingModel.width;
if (VW < 640) {
constraints.video.resolution=CGSizeMake(352,288);
} else if (VW < 960 && VW > 352) {
constraints.video.resolution=CGSizeMake(640,480);
} else if (VW < 1280 && VW > 640) {
constraints.video.resolution=CGSizeMake(960,540);
} else if (VW < 1920 && VW > 960) {
constraints.video.resolution=CGSizeMake(1280,720);
} else {
constraints.video.resolution=CGSizeMake(1920,1080);
}
}
constraints.video.devicePosition=AVCaptureDevicePositionFront;
#endif
NSError *err=[[NSError alloc]init];
_localStream=[[OWTLocalStream alloc] initWithConstratins:constraints error:&err];
//标记推流者信息
_localStream.attributes = @{
// @"PubUseInfo":@"",
};
#if TARGET_IPHONE_SIMULATOR
NSLog(@"Stream does not have video track 信息流没有视频轨道.");
#else
dispatch_async(dispatch_get_main_queue(), ^{
[((StreamView *)self.view).localVideoView setCaptureSession:[self->_capturer captureSession]];
});
#endif
OWTPublishOptions* options=[[OWTPublishOptions alloc] init];
OWTAudioCodecParameters* opusParameters=[[OWTAudioCodecParameters alloc] init];
opusParameters.name=OWTAudioCodecOpus;
OWTAudioEncodingParameters *audioParameters=[[OWTAudioEncodingParameters alloc] init];
audioParameters.codec=opusParameters;
audioParameters.maxBitrate = _roomSettingModel.audioMaxBitrate;
options.audio=[NSArray arrayWithObjects:audioParameters, nil];
OWTVideoCodecParameters *h264Parameters=[[OWTVideoCodecParameters alloc] init];
// h264Parameters.name=OWTVideoCodecH264;
h264Parameters.name=OWTVideoCodecVP8;
OWTVideoEncodingParameters *videoParameters=[[OWTVideoEncodingParameters alloc]init];
videoParameters.codec=h264Parameters;
videoParameters.maxBitrate = _roomSettingModel.videoMaxBitrate;
options.video=[NSArray arrayWithObjects:videoParameters, nil];
[_conferenceClient publish:_localStream withOptions:options onSuccess:^(OWTConferencePublication* p) {
self->_publication=p;
self->_publication.delegate=self;
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"publish success 推流成功!");
self->isPublish = YES;
});
} onFailure:^(NSError* err) {
NSLog(@"publish failure推流失败! %@",[err localizedFailureReason]);
[self showMsg:[err localizedFailureReason]];
}];
}
}
}
//停止推流方法
[OWTConferencePublication stop];
1.5 流订阅
RTC服务返回的流是需要订阅成功后才能播放的
- (void)subscribeWithStream:(OWTRemoteStream *)stream andAllFinsh:(BOOL)isAllFinsh{
//自己的本地流不需要订阅
if ([stream isKindOfClass:[OWTLocalStream class]]) {
return;
}
OWTConferenceSubscribeOptions* subOption = [[OWTConferenceSubscribeOptions alloc] init];
OWTConferenceAudioSubscriptionConstraints* audioP = [[OWTConferenceAudioSubscriptionConstraints alloc] init];
OWTAudioCodecParameters* opusParameters1=[[OWTAudioCodecParameters alloc] init];
opusParameters1.name=OWTAudioCodecPcma;
OWTAudioCodecParameters* opusParameters2=[[OWTAudioCodecParameters alloc] init];
opusParameters2.name=OWTAudioCodecPcmu;
OWTAudioCodecParameters* opusParameters3=[[OWTAudioCodecParameters alloc] init];
opusParameters3.name=OWTAudioCodecAc3;
OWTAudioCodecParameters* opusParameters4=[[OWTAudioCodecParameters alloc] init];
opusParameters4.name=OWTAudioCodecUnknown;
OWTAudioCodecParameters* opusParameters5=[[OWTAudioCodecParameters alloc] init];
opusParameters5.name=OWTAudioCodecG722;
OWTAudioCodecParameters* opusParameters6=[[OWTAudioCodecParameters alloc] init];
opusParameters6.name=OWTAudioCodecIsac;
OWTAudioCodecParameters* opusParameters7=[[OWTAudioCodecParameters alloc] init];
opusParameters7.name=OWTAudioCodecAac;
OWTAudioCodecParameters* opusParameters8=[[OWTAudioCodecParameters alloc] init];
opusParameters8.name=OWTAudioCodecAsao;
// OWTAudioEncodingParameters *audioParameters=[[OWTAudioEncodingParameters alloc] init];
// audioParameters.codec=opusParameters;
audioP.codecs = [NSArray arrayWithObjects:opusParameters1,opusParameters2,opusParameters3,opusParameters4,opusParameters5,opusParameters6,opusParameters7,opusParameters8, nil];
subOption.audio= audioP;
// OWTVideoCodecVP8 = 1,
// OWTVideoCodecVP9 = 2,
// OWTVideoCodecH264 = 3,
// OWTVideoCodecH265 = 4,
// OWTVideoCodecUnknown = 5,
OWTConferenceVideoSubscriptionConstraints *videoP = [[OWTConferenceVideoSubscriptionConstraints alloc] init];
OWTVideoCodecParameters *h264Parameters=[[OWTVideoCodecParameters alloc] init];
h264Parameters.name=OWTVideoCodecH264;
OWTVideoCodecParameters *vp8Parameters=[[OWTVideoCodecParameters alloc] init];
vp8Parameters.name=OWTVideoCodecVP8;
OWTVideoCodecParameters *vp9Parameters=[[OWTVideoCodecParameters alloc] init];
vp9Parameters.name=OWTVideoCodecVP9;
// OWTVideoEncodingParameters *videoParameters=[[OWTVideoEncodingParameters alloc]init];
// videoParameters.codec=h264Parameters;
videoP.codecs = [NSArray arrayWithObjects:h264Parameters,vp8Parameters,vp9Parameters, nil];
subOption.video= videoP;
int width = INT_MAX;
int height = INT_MAX;
for (NSValue* value in stream.capabilities.video.resolutions) {
CGSize resolution=[value CGSizeValue];
if (resolution.width == 640 && resolution.height == 480) {
width = resolution.width;
height = resolution.height;
break;
}
if (resolution.width < width && resolution.height != 0) {
width = resolution.width;
height = resolution.height;
}
}
[[AVAudioSession sharedInstance]
overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker
error:nil];
[_conferenceClient subscribe:stream withOptions:subOption onSuccess:^(OWTConferenceSubscription* subscription) {
self->_subscription=subscription;
self->_subscription.delegate=self;
[self->_subArr addObject:subscription];
if (isAllFinsh) {
}
NSLog(@"Subscribe stream success订阅流成功.");
self->_subscribedMix = NO;
} onFailure:^(NSError* err) {
NSLog(@"Subscribe stream failed.订阅失败 %@", [err localizedDescription]);
}];
}
1.6 流的播放
流播放View的初始化
UIView<RTCVideoRenderer> *streamView = [[RTCEAGLVideoView alloc] init];
老的废弃的流播放方法(不要用)
/**
@brief Attach the stream's first video track to a renderer.
@details The render doesn't retain this stream. Using this method is not
recommended. It will be removed in the future. please use
[RTCVideoTrack addRenderer] instead.
*/
- (void)attach:(NSObject<RTCVideoRenderer>*)renderer;
由于老的播放方法无法将流从流播放View移除,覆盖播放会出现两个流画面交互闪烁的问题,所以一定要用新的方法
//流里面包含视频轨道和音频轨道
RTCVideoTrack *videoTrack = stream.mediaStream.videoTracks.firstObject;
//将流添加到流播放View
[videoTrack addRenderer:streamView];
//将流从流播放View移除
[videoTrack removeRenderer:streamView];
1.7 离开房间
离开房间一般需要停止推流,取消订阅流
[_conferenceClient leaveWithOnSuccess:^{
} onFailure:^(NSError* err){
NSLog(@"Failed to leave. %@",err);
}];