tkchin@webrtc.org | acca675 | 2014-05-30 22:26:06 +0000 | [diff] [blame] | 1 | /* |
| 2 | * libjingle |
| 3 | * Copyright 2014, Google Inc. |
| 4 | * |
| 5 | * Redistribution and use in source and binary forms, with or without |
| 6 | * modification, are permitted provided that the following conditions are met: |
| 7 | * |
| 8 | * 1. Redistributions of source code must retain the above copyright notice, |
| 9 | * this list of conditions and the following disclaimer. |
| 10 | * 2. Redistributions in binary form must reproduce the above copyright notice, |
| 11 | * this list of conditions and the following disclaimer in the documentation |
| 12 | * and/or other materials provided with the distribution. |
| 13 | * 3. The name of the author may not be used to endorse or promote products |
| 14 | * derived from this software without specific prior written permission. |
| 15 | * |
| 16 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED |
| 17 | * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF |
| 18 | * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO |
| 19 | * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 20 | * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
| 21 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; |
| 22 | * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
| 23 | * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
| 24 | * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF |
| 25 | * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 26 | */ |
| 27 | |
| 28 | #import "APPRTCConnectionManager.h" |
| 29 | |
| 30 | #import <AVFoundation/AVFoundation.h> |
| 31 | #import "APPRTCAppClient.h" |
| 32 | #import "GAEChannelClient.h" |
| 33 | #import "RTCICECandidate.h" |
| 34 | #import "RTCMediaConstraints.h" |
| 35 | #import "RTCMediaStream.h" |
| 36 | #import "RTCPair.h" |
| 37 | #import "RTCPeerConnection.h" |
| 38 | #import "RTCPeerConnectionDelegate.h" |
| 39 | #import "RTCPeerConnectionFactory.h" |
| 40 | #import "RTCSessionDescription.h" |
| 41 | #import "RTCSessionDescriptionDelegate.h" |
| 42 | #import "RTCStatsDelegate.h" |
| 43 | #import "RTCVideoCapturer.h" |
| 44 | #import "RTCVideoSource.h" |
| 45 | |
| 46 | @interface APPRTCConnectionManager () |
| 47 | <APPRTCAppClientDelegate, GAEMessageHandler, RTCPeerConnectionDelegate, |
| 48 | RTCSessionDescriptionDelegate, RTCStatsDelegate> |
| 49 | |
| 50 | @property(nonatomic, strong) APPRTCAppClient* client; |
| 51 | @property(nonatomic, strong) RTCPeerConnection* peerConnection; |
| 52 | @property(nonatomic, strong) RTCPeerConnectionFactory* peerConnectionFactory; |
| 53 | @property(nonatomic, strong) RTCVideoSource* videoSource; |
| 54 | @property(nonatomic, strong) NSMutableArray* queuedRemoteCandidates; |
| 55 | |
| 56 | @end |
| 57 | |
| 58 | @implementation APPRTCConnectionManager { |
| 59 | NSTimer* _statsTimer; |
| 60 | } |
| 61 | |
| 62 | - (instancetype)initWithDelegate:(id<APPRTCConnectionManagerDelegate>)delegate |
| 63 | logger:(id<APPRTCLogger>)logger { |
| 64 | if (self = [super init]) { |
| 65 | self.delegate = delegate; |
| 66 | self.logger = logger; |
| 67 | self.peerConnectionFactory = [[RTCPeerConnectionFactory alloc] init]; |
| 68 | // TODO(tkchin): turn this into a button. |
| 69 | // Uncomment for stat logs. |
| 70 | // _statsTimer = |
| 71 | // [NSTimer scheduledTimerWithTimeInterval:10 |
| 72 | // target:self |
| 73 | // selector:@selector(didFireStatsTimer:) |
| 74 | // userInfo:nil |
| 75 | // repeats:YES]; |
| 76 | } |
| 77 | return self; |
| 78 | } |
| 79 | |
| 80 | - (void)dealloc { |
| 81 | [self disconnect]; |
| 82 | } |
| 83 | |
| 84 | - (BOOL)connectToRoomWithURL:(NSURL*)url { |
| 85 | if (self.client) { |
| 86 | // Already have a connection. |
| 87 | return NO; |
| 88 | } |
| 89 | self.client = [[APPRTCAppClient alloc] initWithDelegate:self |
| 90 | messageHandler:self]; |
| 91 | [self.client connectToRoom:url]; |
| 92 | return YES; |
| 93 | } |
| 94 | |
| 95 | - (void)disconnect { |
| 96 | if (!self.client) { |
| 97 | return; |
| 98 | } |
| 99 | [self.client |
| 100 | sendData:[@"{\"type\": \"bye\"}" dataUsingEncoding:NSUTF8StringEncoding]]; |
| 101 | [self.peerConnection close]; |
| 102 | self.peerConnection = nil; |
| 103 | self.client = nil; |
tkchin@webrtc.org | 738df89 | 2014-06-04 20:19:39 +0000 | [diff] [blame] | 104 | self.videoSource = nil; |
tkchin@webrtc.org | acca675 | 2014-05-30 22:26:06 +0000 | [diff] [blame] | 105 | self.queuedRemoteCandidates = nil; |
| 106 | } |
| 107 | |
| 108 | #pragma mark - APPRTCAppClientDelegate |
| 109 | |
| 110 | - (void)appClient:(APPRTCAppClient*)appClient |
| 111 | didErrorWithMessage:(NSString*)message { |
| 112 | [self.delegate connectionManager:self |
| 113 | didErrorWithMessage:message]; |
| 114 | } |
| 115 | |
| 116 | - (void)appClient:(APPRTCAppClient*)appClient |
| 117 | didReceiveICEServers:(NSArray*)servers { |
| 118 | self.queuedRemoteCandidates = [NSMutableArray array]; |
| 119 | RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc] |
| 120 | initWithMandatoryConstraints: |
| 121 | @[ |
| 122 | [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"], |
| 123 | [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" value:@"true"] |
| 124 | ] |
| 125 | optionalConstraints: |
| 126 | @[ |
| 127 | [[RTCPair alloc] initWithKey:@"internalSctpDataChannels" |
| 128 | value:@"true"], |
| 129 | [[RTCPair alloc] initWithKey:@"DtlsSrtpKeyAgreement" |
| 130 | value:@"true"] |
| 131 | ]]; |
| 132 | self.peerConnection = |
| 133 | [self.peerConnectionFactory peerConnectionWithICEServers:servers |
| 134 | constraints:constraints |
| 135 | delegate:self]; |
| 136 | RTCMediaStream* lms = |
| 137 | [self.peerConnectionFactory mediaStreamWithLabel:@"ARDAMS"]; |
| 138 | |
| 139 | // The iOS simulator doesn't provide any sort of camera capture |
| 140 | // support or emulation (http://goo.gl/rHAnC1) so don't bother |
| 141 | // trying to open a local stream. |
| 142 | RTCVideoTrack* localVideoTrack; |
| 143 | |
| 144 | // TODO(tkchin): local video capture for OSX. See |
| 145 | // https://code.google.com/p/webrtc/issues/detail?id=3417. |
| 146 | #if !TARGET_IPHONE_SIMULATOR && TARGET_OS_IPHONE |
| 147 | NSString* cameraID = nil; |
| 148 | for (AVCaptureDevice* captureDevice in |
| 149 | [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { |
| 150 | if (captureDevice.position == AVCaptureDevicePositionFront) { |
| 151 | cameraID = [captureDevice localizedName]; |
| 152 | break; |
| 153 | } |
| 154 | } |
| 155 | NSAssert(cameraID, @"Unable to get the front camera id"); |
| 156 | |
| 157 | RTCVideoCapturer* capturer = |
| 158 | [RTCVideoCapturer capturerWithDeviceName:cameraID]; |
| 159 | self.videoSource = [self.peerConnectionFactory |
| 160 | videoSourceWithCapturer:capturer |
| 161 | constraints:self.client.videoConstraints]; |
| 162 | localVideoTrack = |
| 163 | [self.peerConnectionFactory videoTrackWithID:@"ARDAMSv0" |
| 164 | source:self.videoSource]; |
| 165 | if (localVideoTrack) { |
| 166 | [lms addVideoTrack:localVideoTrack]; |
| 167 | } |
| 168 | [self.delegate connectionManager:self |
| 169 | didReceiveLocalVideoTrack:localVideoTrack]; |
| 170 | #endif |
| 171 | |
| 172 | [lms addAudioTrack:[self.peerConnectionFactory audioTrackWithID:@"ARDAMSa0"]]; |
| 173 | [self.peerConnection addStream:lms constraints:constraints]; |
| 174 | [self.logger logMessage:@"onICEServers - added local stream."]; |
| 175 | } |
| 176 | |
| 177 | #pragma mark - GAEMessageHandler methods |
| 178 | |
| 179 | - (void)onOpen { |
| 180 | if (!self.client.initiator) { |
| 181 | [self.logger logMessage:@"Callee; waiting for remote offer"]; |
| 182 | return; |
| 183 | } |
| 184 | [self.logger logMessage:@"GAE onOpen - create offer."]; |
| 185 | RTCPair* audio = |
| 186 | [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"]; |
| 187 | RTCPair* video = |
| 188 | [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" value:@"true"]; |
| 189 | NSArray* mandatory = @[ audio, video ]; |
| 190 | RTCMediaConstraints* constraints = |
| 191 | [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory |
| 192 | optionalConstraints:nil]; |
| 193 | [self.peerConnection createOfferWithDelegate:self constraints:constraints]; |
| 194 | [self.logger logMessage:@"PC - createOffer."]; |
| 195 | } |
| 196 | |
| 197 | - (void)onMessage:(NSDictionary*)messageData { |
| 198 | NSString* type = messageData[@"type"]; |
| 199 | NSAssert(type, @"Missing type: %@", messageData); |
| 200 | [self.logger logMessage:[NSString stringWithFormat:@"GAE onMessage type - %@", |
| 201 | type]]; |
| 202 | if ([type isEqualToString:@"candidate"]) { |
| 203 | NSString* mid = messageData[@"id"]; |
| 204 | NSNumber* sdpLineIndex = messageData[@"label"]; |
| 205 | NSString* sdp = messageData[@"candidate"]; |
| 206 | RTCICECandidate* candidate = |
| 207 | [[RTCICECandidate alloc] initWithMid:mid |
| 208 | index:sdpLineIndex.intValue |
| 209 | sdp:sdp]; |
| 210 | if (self.queuedRemoteCandidates) { |
| 211 | [self.queuedRemoteCandidates addObject:candidate]; |
| 212 | } else { |
| 213 | [self.peerConnection addICECandidate:candidate]; |
| 214 | } |
| 215 | } else if ([type isEqualToString:@"offer"] || |
| 216 | [type isEqualToString:@"answer"]) { |
| 217 | NSString* sdpString = messageData[@"sdp"]; |
| 218 | RTCSessionDescription* sdp = [[RTCSessionDescription alloc] |
| 219 | initWithType:type |
| 220 | sdp:[[self class] preferISAC:sdpString]]; |
| 221 | [self.peerConnection setRemoteDescriptionWithDelegate:self |
| 222 | sessionDescription:sdp]; |
| 223 | [self.logger logMessage:@"PC - setRemoteDescription."]; |
| 224 | } else if ([type isEqualToString:@"bye"]) { |
| 225 | [self.delegate connectionManagerDidReceiveHangup:self]; |
| 226 | } else { |
| 227 | NSAssert(NO, @"Invalid message: %@", messageData); |
| 228 | } |
| 229 | } |
| 230 | |
| 231 | - (void)onClose { |
| 232 | [self.logger logMessage:@"GAE onClose."]; |
| 233 | [self.delegate connectionManagerDidReceiveHangup:self]; |
| 234 | } |
| 235 | |
| 236 | - (void)onError:(int)code withDescription:(NSString*)description { |
| 237 | NSString* message = [NSString stringWithFormat:@"GAE onError: %d, %@", |
| 238 | code, description]; |
| 239 | [self.logger logMessage:message]; |
| 240 | [self.delegate connectionManager:self |
| 241 | didErrorWithMessage:message]; |
| 242 | } |
| 243 | |
| 244 | #pragma mark - RTCPeerConnectionDelegate |
| 245 | |
| 246 | - (void)peerConnectionOnError:(RTCPeerConnection*)peerConnection { |
| 247 | dispatch_async(dispatch_get_main_queue(), ^{ |
| 248 | NSString* message = @"PeerConnection error"; |
| 249 | NSLog(@"%@", message); |
| 250 | NSAssert(NO, @"PeerConnection failed."); |
| 251 | [self.delegate connectionManager:self |
| 252 | didErrorWithMessage:message]; |
| 253 | }); |
| 254 | } |
| 255 | |
| 256 | - (void)peerConnection:(RTCPeerConnection*)peerConnection |
| 257 | signalingStateChanged:(RTCSignalingState)stateChanged { |
| 258 | dispatch_async(dispatch_get_main_queue(), ^{ |
| 259 | NSLog(@"PCO onSignalingStateChange: %d", stateChanged); |
| 260 | }); |
| 261 | } |
| 262 | |
| 263 | - (void)peerConnection:(RTCPeerConnection*)peerConnection |
| 264 | addedStream:(RTCMediaStream*)stream { |
| 265 | dispatch_async(dispatch_get_main_queue(), ^{ |
| 266 | NSLog(@"PCO onAddStream."); |
| 267 | NSAssert([stream.audioTracks count] == 1 || [stream.videoTracks count] == 1, |
| 268 | @"Expected audio or video track"); |
| 269 | NSAssert([stream.audioTracks count] <= 1, |
| 270 | @"Expected at most 1 audio stream"); |
| 271 | NSAssert([stream.videoTracks count] <= 1, |
| 272 | @"Expected at most 1 video stream"); |
| 273 | if ([stream.videoTracks count] != 0) { |
| 274 | [self.delegate connectionManager:self |
| 275 | didReceiveRemoteVideoTrack:stream.videoTracks[0]]; |
| 276 | } |
| 277 | }); |
| 278 | } |
| 279 | |
| 280 | - (void)peerConnection:(RTCPeerConnection*)peerConnection |
| 281 | removedStream:(RTCMediaStream*)stream { |
| 282 | dispatch_async(dispatch_get_main_queue(), |
| 283 | ^{ NSLog(@"PCO onRemoveStream."); }); |
| 284 | } |
| 285 | |
| 286 | - (void)peerConnectionOnRenegotiationNeeded:(RTCPeerConnection*)peerConnection { |
| 287 | dispatch_async(dispatch_get_main_queue(), ^{ |
| 288 | NSLog(@"PCO onRenegotiationNeeded - ignoring because AppRTC has a " |
| 289 | "predefined negotiation strategy"); |
| 290 | }); |
| 291 | } |
| 292 | |
| 293 | - (void)peerConnection:(RTCPeerConnection*)peerConnection |
| 294 | gotICECandidate:(RTCICECandidate*)candidate { |
| 295 | dispatch_async(dispatch_get_main_queue(), ^{ |
| 296 | NSLog(@"PCO onICECandidate.\n Mid[%@] Index[%li] Sdp[%@]", |
| 297 | candidate.sdpMid, |
| 298 | (long)candidate.sdpMLineIndex, |
| 299 | candidate.sdp); |
| 300 | NSDictionary* json = @{ |
| 301 | @"type" : @"candidate", |
| 302 | @"label" : @(candidate.sdpMLineIndex), |
| 303 | @"id" : candidate.sdpMid, |
| 304 | @"candidate" : candidate.sdp |
| 305 | }; |
| 306 | NSError* error; |
| 307 | NSData* data = |
| 308 | [NSJSONSerialization dataWithJSONObject:json options:0 error:&error]; |
| 309 | if (!error) { |
| 310 | [self.client sendData:data]; |
| 311 | } else { |
| 312 | NSAssert(NO, |
| 313 | @"Unable to serialize JSON object with error: %@", |
| 314 | error.localizedDescription); |
| 315 | } |
| 316 | }); |
| 317 | } |
| 318 | |
| 319 | - (void)peerConnection:(RTCPeerConnection*)peerConnection |
| 320 | iceGatheringChanged:(RTCICEGatheringState)newState { |
| 321 | dispatch_async(dispatch_get_main_queue(), |
| 322 | ^{ NSLog(@"PCO onIceGatheringChange. %d", newState); }); |
| 323 | } |
| 324 | |
| 325 | - (void)peerConnection:(RTCPeerConnection*)peerConnection |
| 326 | iceConnectionChanged:(RTCICEConnectionState)newState { |
| 327 | dispatch_async(dispatch_get_main_queue(), ^{ |
| 328 | NSLog(@"PCO onIceConnectionChange. %d", newState); |
| 329 | if (newState == RTCICEConnectionConnected) |
| 330 | [self.logger logMessage:@"ICE Connection Connected."]; |
| 331 | NSAssert(newState != RTCICEConnectionFailed, @"ICE Connection failed!"); |
| 332 | }); |
| 333 | } |
| 334 | |
| 335 | - (void)peerConnection:(RTCPeerConnection*)peerConnection |
| 336 | didOpenDataChannel:(RTCDataChannel*)dataChannel { |
| 337 | NSAssert(NO, @"AppRTC doesn't use DataChannels"); |
| 338 | } |
| 339 | |
| 340 | #pragma mark - RTCSessionDescriptionDelegate |
| 341 | |
| 342 | - (void)peerConnection:(RTCPeerConnection*)peerConnection |
| 343 | didCreateSessionDescription:(RTCSessionDescription*)origSdp |
| 344 | error:(NSError*)error { |
| 345 | dispatch_async(dispatch_get_main_queue(), ^{ |
| 346 | if (error) { |
| 347 | [self.logger logMessage:@"SDP onFailure."]; |
| 348 | NSAssert(NO, error.description); |
| 349 | return; |
| 350 | } |
| 351 | [self.logger logMessage:@"SDP onSuccess(SDP) - set local description."]; |
| 352 | RTCSessionDescription* sdp = [[RTCSessionDescription alloc] |
| 353 | initWithType:origSdp.type |
| 354 | sdp:[[self class] preferISAC:origSdp.description]]; |
| 355 | [self.peerConnection setLocalDescriptionWithDelegate:self |
| 356 | sessionDescription:sdp]; |
| 357 | [self.logger logMessage:@"PC setLocalDescription."]; |
| 358 | NSDictionary* json = @{@"type" : sdp.type, @"sdp" : sdp.description}; |
| 359 | NSError* jsonError; |
| 360 | NSData* data = [NSJSONSerialization dataWithJSONObject:json |
| 361 | options:0 |
| 362 | error:&jsonError]; |
| 363 | NSAssert(!jsonError, @"Error: %@", jsonError.description); |
| 364 | [self.client sendData:data]; |
| 365 | }); |
| 366 | } |
| 367 | |
| 368 | - (void)peerConnection:(RTCPeerConnection*)peerConnection |
| 369 | didSetSessionDescriptionWithError:(NSError*)error { |
| 370 | dispatch_async(dispatch_get_main_queue(), ^{ |
| 371 | if (error) { |
| 372 | [self.logger logMessage:@"SDP onFailure."]; |
| 373 | NSAssert(NO, error.description); |
| 374 | return; |
| 375 | } |
| 376 | [self.logger logMessage:@"SDP onSuccess() - possibly drain candidates"]; |
| 377 | if (!self.client.initiator) { |
| 378 | if (self.peerConnection.remoteDescription && |
| 379 | !self.peerConnection.localDescription) { |
| 380 | [self.logger logMessage:@"Callee, setRemoteDescription succeeded"]; |
| 381 | RTCPair* audio = [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" |
| 382 | value:@"true"]; |
| 383 | RTCPair* video = [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" |
| 384 | value:@"true"]; |
| 385 | NSArray* mandatory = @[ audio, video ]; |
| 386 | RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc] |
| 387 | initWithMandatoryConstraints:mandatory |
| 388 | optionalConstraints:nil]; |
| 389 | [self.peerConnection createAnswerWithDelegate:self |
| 390 | constraints:constraints]; |
| 391 | [self.logger logMessage:@"PC - createAnswer."]; |
| 392 | } else { |
| 393 | [self.logger logMessage:@"SDP onSuccess - drain candidates"]; |
| 394 | [self drainRemoteCandidates]; |
| 395 | } |
| 396 | } else { |
| 397 | if (self.peerConnection.remoteDescription) { |
| 398 | [self.logger logMessage:@"SDP onSuccess - drain candidates"]; |
| 399 | [self drainRemoteCandidates]; |
| 400 | } |
| 401 | } |
| 402 | }); |
| 403 | } |
| 404 | |
| 405 | #pragma mark - RTCStatsDelegate methods |
| 406 | |
| 407 | - (void)peerConnection:(RTCPeerConnection*)peerConnection |
| 408 | didGetStats:(NSArray*)stats { |
| 409 | dispatch_async(dispatch_get_main_queue(), ^{ |
| 410 | NSString* message = [NSString stringWithFormat:@"Stats:\n %@", stats]; |
| 411 | [self.logger logMessage:message]; |
| 412 | }); |
| 413 | } |
| 414 | |
| 415 | #pragma mark - Private |
| 416 | |
| 417 | // Match |pattern| to |string| and return the first group of the first |
| 418 | // match, or nil if no match was found. |
| 419 | + (NSString*)firstMatch:(NSRegularExpression*)pattern |
| 420 | withString:(NSString*)string { |
| 421 | NSTextCheckingResult* result = |
| 422 | [pattern firstMatchInString:string |
| 423 | options:0 |
| 424 | range:NSMakeRange(0, [string length])]; |
| 425 | if (!result) |
| 426 | return nil; |
| 427 | return [string substringWithRange:[result rangeAtIndex:1]]; |
| 428 | } |
| 429 | |
| 430 | // Mangle |origSDP| to prefer the ISAC/16k audio codec. |
| 431 | + (NSString*)preferISAC:(NSString*)origSDP { |
| 432 | int mLineIndex = -1; |
| 433 | NSString* isac16kRtpMap = nil; |
| 434 | NSArray* lines = [origSDP componentsSeparatedByString:@"\n"]; |
| 435 | NSRegularExpression* isac16kRegex = [NSRegularExpression |
| 436 | regularExpressionWithPattern:@"^a=rtpmap:(\\d+) ISAC/16000[\r]?$" |
| 437 | options:0 |
| 438 | error:nil]; |
| 439 | for (int i = 0; |
| 440 | (i < [lines count]) && (mLineIndex == -1 || isac16kRtpMap == nil); |
| 441 | ++i) { |
| 442 | NSString* line = [lines objectAtIndex:i]; |
| 443 | if ([line hasPrefix:@"m=audio "]) { |
| 444 | mLineIndex = i; |
| 445 | continue; |
| 446 | } |
| 447 | isac16kRtpMap = [self firstMatch:isac16kRegex withString:line]; |
| 448 | } |
| 449 | if (mLineIndex == -1) { |
| 450 | NSLog(@"No m=audio line, so can't prefer iSAC"); |
| 451 | return origSDP; |
| 452 | } |
| 453 | if (isac16kRtpMap == nil) { |
| 454 | NSLog(@"No ISAC/16000 line, so can't prefer iSAC"); |
| 455 | return origSDP; |
| 456 | } |
| 457 | NSArray* origMLineParts = |
| 458 | [[lines objectAtIndex:mLineIndex] componentsSeparatedByString:@" "]; |
| 459 | NSMutableArray* newMLine = |
| 460 | [NSMutableArray arrayWithCapacity:[origMLineParts count]]; |
| 461 | int origPartIndex = 0; |
| 462 | // Format is: m=<media> <port> <proto> <fmt> ... |
| 463 | [newMLine addObject:[origMLineParts objectAtIndex:origPartIndex++]]; |
| 464 | [newMLine addObject:[origMLineParts objectAtIndex:origPartIndex++]]; |
| 465 | [newMLine addObject:[origMLineParts objectAtIndex:origPartIndex++]]; |
| 466 | [newMLine addObject:isac16kRtpMap]; |
| 467 | for (; origPartIndex < [origMLineParts count]; ++origPartIndex) { |
| 468 | if (![isac16kRtpMap |
| 469 | isEqualToString:[origMLineParts objectAtIndex:origPartIndex]]) { |
| 470 | [newMLine addObject:[origMLineParts objectAtIndex:origPartIndex]]; |
| 471 | } |
| 472 | } |
| 473 | NSMutableArray* newLines = [NSMutableArray arrayWithCapacity:[lines count]]; |
| 474 | [newLines addObjectsFromArray:lines]; |
| 475 | [newLines replaceObjectAtIndex:mLineIndex |
| 476 | withObject:[newMLine componentsJoinedByString:@" "]]; |
| 477 | return [newLines componentsJoinedByString:@"\n"]; |
| 478 | } |
| 479 | |
| 480 | - (void)drainRemoteCandidates { |
| 481 | for (RTCICECandidate* candidate in self.queuedRemoteCandidates) { |
| 482 | [self.peerConnection addICECandidate:candidate]; |
| 483 | } |
| 484 | self.queuedRemoteCandidates = nil; |
| 485 | } |
| 486 | |
| 487 | - (void)didFireStatsTimer:(NSTimer*)timer { |
| 488 | if (self.peerConnection) { |
| 489 | [self.peerConnection getStatsWithDelegate:self |
| 490 | mediaStreamTrack:nil |
| 491 | statsOutputLevel:RTCStatsOutputLevelDebug]; |
| 492 | } |
| 493 | } |
| 494 | |
| 495 | @end |