1/* 2 * Copyright 2016 The WebRTC Project Authors. All rights reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11#import "RTCAudioSession+Private.h" 12 13#import <UIKit/UIKit.h> 14 15#include <atomic> 16#include <vector> 17 18#include "absl/base/attributes.h" 19#include "rtc_base/checks.h" 20#include "rtc_base/synchronization/mutex.h" 21 22#import "RTCAudioSessionConfiguration.h" 23#import "base/RTCLogging.h" 24 25#if !defined(ABSL_HAVE_THREAD_LOCAL) 26#error ABSL_HAVE_THREAD_LOCAL should be defined for MacOS / iOS Targets. 27#endif 28 29NSString *const kRTCAudioSessionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)"; 30NSInteger const kRTCAudioSessionErrorLockRequired = -1; 31NSInteger const kRTCAudioSessionErrorConfiguration = -2; 32NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; 33 34namespace { 35// Since webrtc::Mutex is not a reentrant lock and cannot check if the mutex is locked, 36// we need a separate variable to check that the mutex is locked in the RTCAudioSession. 37ABSL_CONST_INIT thread_local bool mutex_locked = false; 38} // namespace 39 40@interface RTC_OBJC_TYPE (RTCAudioSession) 41() @property(nonatomic, 42 readonly) std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates; 43@end 44 45// This class needs to be thread-safe because it is accessed from many threads. 46// TODO(tkchin): Consider more granular locking. We're not expecting a lot of 47// lock contention so coarse locks should be fine for now. 48@implementation RTC_OBJC_TYPE (RTCAudioSession) { 49 webrtc::Mutex _mutex; 50 AVAudioSession *_session; 51 std::atomic<int> _activationCount; 52 std::atomic<int> _webRTCSessionCount; 53 BOOL _isActive; 54 BOOL _useManualAudio; 55 BOOL _isAudioEnabled; 56 BOOL _canPlayOrRecord; 57 BOOL _isInterrupted; 58} 59 60@synthesize session = _session; 61@synthesize delegates = _delegates; 62@synthesize ignoresPreferredAttributeConfigurationErrors = 63 _ignoresPreferredAttributeConfigurationErrors; 64 65+ (instancetype)sharedInstance { 66 static dispatch_once_t onceToken; 67 static RTC_OBJC_TYPE(RTCAudioSession) *sharedInstance = nil; 68 dispatch_once(&onceToken, ^{ 69 sharedInstance = [[self alloc] init]; 70 }); 71 return sharedInstance; 72} 73 74- (instancetype)init { 75 return [self initWithAudioSession:[AVAudioSession sharedInstance]]; 76} 77 78/** This initializer provides a way for unit tests to inject a fake/mock audio session. */ 79- (instancetype)initWithAudioSession:(id)audioSession { 80 if (self = [super init]) { 81 _session = audioSession; 82 83 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; 84 [center addObserver:self 85 selector:@selector(handleInterruptionNotification:) 86 name:AVAudioSessionInterruptionNotification 87 object:nil]; 88 [center addObserver:self 89 selector:@selector(handleRouteChangeNotification:) 90 name:AVAudioSessionRouteChangeNotification 91 object:nil]; 92 [center addObserver:self 93 selector:@selector(handleMediaServicesWereLost:) 94 name:AVAudioSessionMediaServicesWereLostNotification 95 object:nil]; 96 [center addObserver:self 97 selector:@selector(handleMediaServicesWereReset:) 98 name:AVAudioSessionMediaServicesWereResetNotification 99 object:nil]; 100 // Posted on the main thread when the primary audio from other applications 101 // starts and stops. Foreground applications may use this notification as a 102 // hint to enable or disable audio that is secondary. 103 [center addObserver:self 104 selector:@selector(handleSilenceSecondaryAudioHintNotification:) 105 name:AVAudioSessionSilenceSecondaryAudioHintNotification 106 object:nil]; 107 // Also track foreground event in order to deal with interruption ended situation. 108 [center addObserver:self 109 selector:@selector(handleApplicationDidBecomeActive:) 110 name:UIApplicationDidBecomeActiveNotification 111 object:nil]; 112 [_session addObserver:self 113 forKeyPath:kRTCAudioSessionOutputVolumeSelector 114 options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld 115 context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; 116 117 RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): init.", self); 118 } 119 return self; 120} 121 122- (void)dealloc { 123 [[NSNotificationCenter defaultCenter] removeObserver:self]; 124 [_session removeObserver:self 125 forKeyPath:kRTCAudioSessionOutputVolumeSelector 126 context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; 127 RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): dealloc.", self); 128} 129 130- (NSString *)description { 131 NSString *format = @"RTC_OBJC_TYPE(RTCAudioSession): {\n" 132 " category: %@\n" 133 " categoryOptions: %ld\n" 134 " mode: %@\n" 135 " isActive: %d\n" 136 " sampleRate: %.2f\n" 137 " IOBufferDuration: %f\n" 138 " outputNumberOfChannels: %ld\n" 139 " inputNumberOfChannels: %ld\n" 140 " outputLatency: %f\n" 141 " inputLatency: %f\n" 142 " outputVolume: %f\n" 143 "}"; 144 NSString *description = [NSString stringWithFormat:format, 145 self.category, (long)self.categoryOptions, self.mode, 146 self.isActive, self.sampleRate, self.IOBufferDuration, 147 self.outputNumberOfChannels, self.inputNumberOfChannels, 148 self.outputLatency, self.inputLatency, self.outputVolume]; 149 return description; 150} 151 152- (void)setIsActive:(BOOL)isActive { 153 @synchronized(self) { 154 _isActive = isActive; 155 } 156} 157 158- (BOOL)isActive { 159 @synchronized(self) { 160 return _isActive; 161 } 162} 163 164- (void)setUseManualAudio:(BOOL)useManualAudio { 165 @synchronized(self) { 166 if (_useManualAudio == useManualAudio) { 167 return; 168 } 169 _useManualAudio = useManualAudio; 170 } 171 [self updateCanPlayOrRecord]; 172} 173 174- (BOOL)useManualAudio { 175 @synchronized(self) { 176 return _useManualAudio; 177 } 178} 179 180- (void)setIsAudioEnabled:(BOOL)isAudioEnabled { 181 @synchronized(self) { 182 if (_isAudioEnabled == isAudioEnabled) { 183 return; 184 } 185 _isAudioEnabled = isAudioEnabled; 186 } 187 [self updateCanPlayOrRecord]; 188} 189 190- (BOOL)isAudioEnabled { 191 @synchronized(self) { 192 return _isAudioEnabled; 193 } 194} 195 196- (void)setIgnoresPreferredAttributeConfigurationErrors: 197 (BOOL)ignoresPreferredAttributeConfigurationErrors { 198 @synchronized(self) { 199 if (_ignoresPreferredAttributeConfigurationErrors == 200 ignoresPreferredAttributeConfigurationErrors) { 201 return; 202 } 203 _ignoresPreferredAttributeConfigurationErrors = ignoresPreferredAttributeConfigurationErrors; 204 } 205} 206 207- (BOOL)ignoresPreferredAttributeConfigurationErrors { 208 @synchronized(self) { 209 return _ignoresPreferredAttributeConfigurationErrors; 210 } 211} 212 213// TODO(tkchin): Check for duplicates. 214- (void)addDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate { 215 RTCLog(@"Adding delegate: (%p)", delegate); 216 if (!delegate) { 217 return; 218 } 219 @synchronized(self) { 220 _delegates.push_back(delegate); 221 [self removeZeroedDelegates]; 222 } 223} 224 225- (void)removeDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate { 226 RTCLog(@"Removing delegate: (%p)", delegate); 227 if (!delegate) { 228 return; 229 } 230 @synchronized(self) { 231 _delegates.erase(std::remove(_delegates.begin(), 232 _delegates.end(), 233 delegate), 234 _delegates.end()); 235 [self removeZeroedDelegates]; 236 } 237} 238 239#pragma clang diagnostic push 240#pragma clang diagnostic ignored "-Wthread-safety-analysis" 241 242- (void)lockForConfiguration { 243 RTC_CHECK(!mutex_locked); 244 _mutex.Lock(); 245 mutex_locked = true; 246} 247 248- (void)unlockForConfiguration { 249 mutex_locked = false; 250 _mutex.Unlock(); 251} 252 253#pragma clang diagnostic pop 254 255#pragma mark - AVAudioSession proxy methods 256 257- (NSString *)category { 258 return self.session.category; 259} 260 261- (AVAudioSessionCategoryOptions)categoryOptions { 262 return self.session.categoryOptions; 263} 264 265- (NSString *)mode { 266 return self.session.mode; 267} 268 269- (BOOL)secondaryAudioShouldBeSilencedHint { 270 return self.session.secondaryAudioShouldBeSilencedHint; 271} 272 273- (AVAudioSessionRouteDescription *)currentRoute { 274 return self.session.currentRoute; 275} 276 277- (NSInteger)maximumInputNumberOfChannels { 278 return self.session.maximumInputNumberOfChannels; 279} 280 281- (NSInteger)maximumOutputNumberOfChannels { 282 return self.session.maximumOutputNumberOfChannels; 283} 284 285- (float)inputGain { 286 return self.session.inputGain; 287} 288 289- (BOOL)inputGainSettable { 290 return self.session.inputGainSettable; 291} 292 293- (BOOL)inputAvailable { 294 return self.session.inputAvailable; 295} 296 297- (NSArray<AVAudioSessionDataSourceDescription *> *)inputDataSources { 298 return self.session.inputDataSources; 299} 300 301- (AVAudioSessionDataSourceDescription *)inputDataSource { 302 return self.session.inputDataSource; 303} 304 305- (NSArray<AVAudioSessionDataSourceDescription *> *)outputDataSources { 306 return self.session.outputDataSources; 307} 308 309- (AVAudioSessionDataSourceDescription *)outputDataSource { 310 return self.session.outputDataSource; 311} 312 313- (double)sampleRate { 314 return self.session.sampleRate; 315} 316 317- (double)preferredSampleRate { 318 return self.session.preferredSampleRate; 319} 320 321- (NSInteger)inputNumberOfChannels { 322 return self.session.inputNumberOfChannels; 323} 324 325- (NSInteger)outputNumberOfChannels { 326 return self.session.outputNumberOfChannels; 327} 328 329- (float)outputVolume { 330 return self.session.outputVolume; 331} 332 333- (NSTimeInterval)inputLatency { 334 return self.session.inputLatency; 335} 336 337- (NSTimeInterval)outputLatency { 338 return self.session.outputLatency; 339} 340 341- (NSTimeInterval)IOBufferDuration { 342 return self.session.IOBufferDuration; 343} 344 345- (NSTimeInterval)preferredIOBufferDuration { 346 return self.session.preferredIOBufferDuration; 347} 348 349- (BOOL)setActive:(BOOL)active 350 error:(NSError **)outError { 351 if (![self checkLock:outError]) { 352 return NO; 353 } 354 int activationCount = _activationCount.load(); 355 if (!active && activationCount == 0) { 356 RTCLogWarning(@"Attempting to deactivate without prior activation."); 357 } 358 [self notifyWillSetActive:active]; 359 BOOL success = YES; 360 BOOL isActive = self.isActive; 361 // Keep a local error so we can log it. 362 NSError *error = nil; 363 BOOL shouldSetActive = 364 (active && !isActive) || (!active && isActive && activationCount == 1); 365 // Attempt to activate if we're not active. 366 // Attempt to deactivate if we're active and it's the last unbalanced call. 367 if (shouldSetActive) { 368 AVAudioSession *session = self.session; 369 // AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation is used to ensure 370 // that other audio sessions that were interrupted by our session can return 371 // to their active state. It is recommended for VoIP apps to use this 372 // option. 373 AVAudioSessionSetActiveOptions options = 374 active ? 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation; 375 success = [session setActive:active 376 withOptions:options 377 error:&error]; 378 if (outError) { 379 *outError = error; 380 } 381 } 382 if (success) { 383 if (active) { 384 if (shouldSetActive) { 385 self.isActive = active; 386 if (self.isInterrupted) { 387 self.isInterrupted = NO; 388 [self notifyDidEndInterruptionWithShouldResumeSession:YES]; 389 } 390 } 391 [self incrementActivationCount]; 392 [self notifyDidSetActive:active]; 393 } 394 } else { 395 RTCLogError(@"Failed to setActive:%d. Error: %@", 396 active, error.localizedDescription); 397 [self notifyFailedToSetActive:active error:error]; 398 } 399 // Set isActive and decrement activation count on deactivation 400 // whether or not it succeeded. 401 if (!active) { 402 self.isActive = active; 403 [self notifyDidSetActive:active]; 404 [self decrementActivationCount]; 405 } 406 RTCLog(@"Number of current activations: %d", _activationCount.load()); 407 return success; 408} 409 410- (BOOL)setCategory:(NSString *)category 411 withOptions:(AVAudioSessionCategoryOptions)options 412 error:(NSError **)outError { 413 if (![self checkLock:outError]) { 414 return NO; 415 } 416 return [self.session setCategory:category withOptions:options error:outError]; 417} 418 419- (BOOL)setMode:(NSString *)mode error:(NSError **)outError { 420 if (![self checkLock:outError]) { 421 return NO; 422 } 423 return [self.session setMode:mode error:outError]; 424} 425 426- (BOOL)setInputGain:(float)gain error:(NSError **)outError { 427 if (![self checkLock:outError]) { 428 return NO; 429 } 430 return [self.session setInputGain:gain error:outError]; 431} 432 433- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError { 434 if (![self checkLock:outError]) { 435 return NO; 436 } 437 return [self.session setPreferredSampleRate:sampleRate error:outError]; 438} 439 440- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration 441 error:(NSError **)outError { 442 if (![self checkLock:outError]) { 443 return NO; 444 } 445 return [self.session setPreferredIOBufferDuration:duration error:outError]; 446} 447 448- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count 449 error:(NSError **)outError { 450 if (![self checkLock:outError]) { 451 return NO; 452 } 453 return [self.session setPreferredInputNumberOfChannels:count error:outError]; 454} 455- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count 456 error:(NSError **)outError { 457 if (![self checkLock:outError]) { 458 return NO; 459 } 460 return [self.session setPreferredOutputNumberOfChannels:count error:outError]; 461} 462 463- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride 464 error:(NSError **)outError { 465 if (![self checkLock:outError]) { 466 return NO; 467 } 468 return [self.session overrideOutputAudioPort:portOverride error:outError]; 469} 470 471- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort 472 error:(NSError **)outError { 473 if (![self checkLock:outError]) { 474 return NO; 475 } 476 return [self.session setPreferredInput:inPort error:outError]; 477} 478 479- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource 480 error:(NSError **)outError { 481 if (![self checkLock:outError]) { 482 return NO; 483 } 484 return [self.session setInputDataSource:dataSource error:outError]; 485} 486 487- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource 488 error:(NSError **)outError { 489 if (![self checkLock:outError]) { 490 return NO; 491 } 492 return [self.session setOutputDataSource:dataSource error:outError]; 493} 494 495#pragma mark - Notifications 496 497- (void)handleInterruptionNotification:(NSNotification *)notification { 498 NSNumber* typeNumber = 499 notification.userInfo[AVAudioSessionInterruptionTypeKey]; 500 AVAudioSessionInterruptionType type = 501 (AVAudioSessionInterruptionType)typeNumber.unsignedIntegerValue; 502 switch (type) { 503 case AVAudioSessionInterruptionTypeBegan: 504 RTCLog(@"Audio session interruption began."); 505 self.isActive = NO; 506 self.isInterrupted = YES; 507 [self notifyDidBeginInterruption]; 508 break; 509 case AVAudioSessionInterruptionTypeEnded: { 510 RTCLog(@"Audio session interruption ended."); 511 self.isInterrupted = NO; 512 [self updateAudioSessionAfterEvent]; 513 NSNumber *optionsNumber = 514 notification.userInfo[AVAudioSessionInterruptionOptionKey]; 515 AVAudioSessionInterruptionOptions options = 516 optionsNumber.unsignedIntegerValue; 517 BOOL shouldResume = 518 options & AVAudioSessionInterruptionOptionShouldResume; 519 [self notifyDidEndInterruptionWithShouldResumeSession:shouldResume]; 520 break; 521 } 522 } 523} 524 525- (void)handleRouteChangeNotification:(NSNotification *)notification { 526 // Get reason for current route change. 527 NSNumber* reasonNumber = 528 notification.userInfo[AVAudioSessionRouteChangeReasonKey]; 529 AVAudioSessionRouteChangeReason reason = 530 (AVAudioSessionRouteChangeReason)reasonNumber.unsignedIntegerValue; 531 RTCLog(@"Audio route changed:"); 532 switch (reason) { 533 case AVAudioSessionRouteChangeReasonUnknown: 534 RTCLog(@"Audio route changed: ReasonUnknown"); 535 break; 536 case AVAudioSessionRouteChangeReasonNewDeviceAvailable: 537 RTCLog(@"Audio route changed: NewDeviceAvailable"); 538 break; 539 case AVAudioSessionRouteChangeReasonOldDeviceUnavailable: 540 RTCLog(@"Audio route changed: OldDeviceUnavailable"); 541 break; 542 case AVAudioSessionRouteChangeReasonCategoryChange: 543 RTCLog(@"Audio route changed: CategoryChange to :%@", 544 self.session.category); 545 break; 546 case AVAudioSessionRouteChangeReasonOverride: 547 RTCLog(@"Audio route changed: Override"); 548 break; 549 case AVAudioSessionRouteChangeReasonWakeFromSleep: 550 RTCLog(@"Audio route changed: WakeFromSleep"); 551 break; 552 case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory: 553 RTCLog(@"Audio route changed: NoSuitableRouteForCategory"); 554 break; 555 case AVAudioSessionRouteChangeReasonRouteConfigurationChange: 556 RTCLog(@"Audio route changed: RouteConfigurationChange"); 557 break; 558 } 559 AVAudioSessionRouteDescription* previousRoute = 560 notification.userInfo[AVAudioSessionRouteChangePreviousRouteKey]; 561 // Log previous route configuration. 562 RTCLog(@"Previous route: %@\nCurrent route:%@", 563 previousRoute, self.session.currentRoute); 564 [self notifyDidChangeRouteWithReason:reason previousRoute:previousRoute]; 565} 566 567- (void)handleMediaServicesWereLost:(NSNotification *)notification { 568 RTCLog(@"Media services were lost."); 569 [self updateAudioSessionAfterEvent]; 570 [self notifyMediaServicesWereLost]; 571} 572 573- (void)handleMediaServicesWereReset:(NSNotification *)notification { 574 RTCLog(@"Media services were reset."); 575 [self updateAudioSessionAfterEvent]; 576 [self notifyMediaServicesWereReset]; 577} 578 579- (void)handleSilenceSecondaryAudioHintNotification:(NSNotification *)notification { 580 // TODO(henrika): just adding logs here for now until we know if we are ever 581 // see this notification and might be affected by it or if further actions 582 // are required. 583 NSNumber *typeNumber = 584 notification.userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey]; 585 AVAudioSessionSilenceSecondaryAudioHintType type = 586 (AVAudioSessionSilenceSecondaryAudioHintType)typeNumber.unsignedIntegerValue; 587 switch (type) { 588 case AVAudioSessionSilenceSecondaryAudioHintTypeBegin: 589 RTCLog(@"Another application's primary audio has started."); 590 break; 591 case AVAudioSessionSilenceSecondaryAudioHintTypeEnd: 592 RTCLog(@"Another application's primary audio has stopped."); 593 break; 594 } 595} 596 597- (void)handleApplicationDidBecomeActive:(NSNotification *)notification { 598 BOOL isInterrupted = self.isInterrupted; 599 RTCLog(@"Application became active after an interruption. Treating as interruption " 600 "end. isInterrupted changed from %d to 0.", 601 isInterrupted); 602 if (isInterrupted) { 603 self.isInterrupted = NO; 604 [self updateAudioSessionAfterEvent]; 605 } 606 // Always treat application becoming active as an interruption end event. 607 [self notifyDidEndInterruptionWithShouldResumeSession:YES]; 608} 609 610#pragma mark - Private 611 612+ (NSError *)lockError { 613 NSDictionary *userInfo = 614 @{NSLocalizedDescriptionKey : @"Must call lockForConfiguration before calling this method."}; 615 NSError *error = [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain 616 code:kRTCAudioSessionErrorLockRequired 617 userInfo:userInfo]; 618 return error; 619} 620 621- (std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> >)delegates { 622 @synchronized(self) { 623 // Note: this returns a copy. 624 return _delegates; 625 } 626} 627 628// TODO(tkchin): check for duplicates. 629- (void)pushDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate { 630 @synchronized(self) { 631 _delegates.insert(_delegates.begin(), delegate); 632 } 633} 634 635- (void)removeZeroedDelegates { 636 @synchronized(self) { 637 _delegates.erase( 638 std::remove_if(_delegates.begin(), 639 _delegates.end(), 640 [](id delegate) -> bool { return delegate == nil; }), 641 _delegates.end()); 642 } 643} 644 645- (int)activationCount { 646 return _activationCount.load(); 647} 648 649- (int)incrementActivationCount { 650 RTCLog(@"Incrementing activation count."); 651 return _activationCount.fetch_add(1) + 1; 652} 653 654- (NSInteger)decrementActivationCount { 655 RTCLog(@"Decrementing activation count."); 656 return _activationCount.fetch_sub(1) - 1; 657} 658 659- (int)webRTCSessionCount { 660 return _webRTCSessionCount.load(); 661} 662 663- (BOOL)canPlayOrRecord { 664 return !self.useManualAudio || self.isAudioEnabled; 665} 666 667- (BOOL)isInterrupted { 668 @synchronized(self) { 669 return _isInterrupted; 670 } 671} 672 673- (void)setIsInterrupted:(BOOL)isInterrupted { 674 @synchronized(self) { 675 if (_isInterrupted == isInterrupted) { 676 return; 677 } 678 _isInterrupted = isInterrupted; 679 } 680} 681 682- (BOOL)checkLock:(NSError **)outError { 683 if (!mutex_locked) { 684 if (outError) { 685 *outError = [RTC_OBJC_TYPE(RTCAudioSession) lockError]; 686 } 687 return NO; 688 } 689 return YES; 690} 691 692- (BOOL)beginWebRTCSession:(NSError **)outError { 693 if (outError) { 694 *outError = nil; 695 } 696 _webRTCSessionCount.fetch_add(1); 697 [self notifyDidStartPlayOrRecord]; 698 return YES; 699} 700 701- (BOOL)endWebRTCSession:(NSError **)outError { 702 if (outError) { 703 *outError = nil; 704 } 705 _webRTCSessionCount.fetch_sub(1); 706 [self notifyDidStopPlayOrRecord]; 707 return YES; 708} 709 710- (BOOL)configureWebRTCSession:(NSError **)outError { 711 if (outError) { 712 *outError = nil; 713 } 714 RTCLog(@"Configuring audio session for WebRTC."); 715 716 // Configure the AVAudioSession and activate it. 717 // Provide an error even if there isn't one so we can log it. 718 NSError *error = nil; 719 RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig = 720 [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration]; 721 if (![self setConfiguration:webRTCConfig active:YES error:&error]) { 722 RTCLogError(@"Failed to set WebRTC audio configuration: %@", 723 error.localizedDescription); 724 // Do not call setActive:NO if setActive:YES failed. 725 if (outError) { 726 *outError = error; 727 } 728 return NO; 729 } 730 731 // Ensure that the device currently supports audio input. 732 // TODO(tkchin): Figure out if this is really necessary. 733 if (!self.inputAvailable) { 734 RTCLogError(@"No audio input path is available!"); 735 [self unconfigureWebRTCSession:nil]; 736 if (outError) { 737 *outError = [self configurationErrorWithDescription:@"No input path."]; 738 } 739 return NO; 740 } 741 742 // It can happen (e.g. in combination with BT devices) that the attempt to set 743 // the preferred sample rate for WebRTC (48kHz) fails. If so, make a new 744 // configuration attempt using the sample rate that worked using the active 745 // audio session. A typical case is that only 8 or 16kHz can be set, e.g. in 746 // combination with BT headsets. Using this "trick" seems to avoid a state 747 // where Core Audio asks for a different number of audio frames than what the 748 // session's I/O buffer duration corresponds to. 749 // TODO(henrika): this fix resolves bugs.webrtc.org/6004 but it has only been 750 // tested on a limited set of iOS devices and BT devices. 751 double sessionSampleRate = self.sampleRate; 752 double preferredSampleRate = webRTCConfig.sampleRate; 753 if (sessionSampleRate != preferredSampleRate) { 754 RTCLogWarning( 755 @"Current sample rate (%.2f) is not the preferred rate (%.2f)", 756 sessionSampleRate, preferredSampleRate); 757 if (![self setPreferredSampleRate:sessionSampleRate 758 error:&error]) { 759 RTCLogError(@"Failed to set preferred sample rate: %@", 760 error.localizedDescription); 761 if (outError) { 762 *outError = error; 763 } 764 } 765 } 766 767 return YES; 768} 769 770- (BOOL)unconfigureWebRTCSession:(NSError **)outError { 771 if (outError) { 772 *outError = nil; 773 } 774 RTCLog(@"Unconfiguring audio session for WebRTC."); 775 [self setActive:NO error:outError]; 776 777 return YES; 778} 779 780- (NSError *)configurationErrorWithDescription:(NSString *)description { 781 NSDictionary* userInfo = @{ 782 NSLocalizedDescriptionKey: description, 783 }; 784 return [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain 785 code:kRTCAudioSessionErrorConfiguration 786 userInfo:userInfo]; 787} 788 789- (void)updateAudioSessionAfterEvent { 790 BOOL shouldActivate = self.activationCount > 0; 791 AVAudioSessionSetActiveOptions options = shouldActivate ? 792 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation; 793 NSError *error = nil; 794 if ([self.session setActive:shouldActivate 795 withOptions:options 796 error:&error]) { 797 self.isActive = shouldActivate; 798 } else { 799 RTCLogError(@"Failed to set session active to %d. Error:%@", 800 shouldActivate, error.localizedDescription); 801 } 802} 803 804- (void)updateCanPlayOrRecord { 805 BOOL canPlayOrRecord = NO; 806 BOOL shouldNotify = NO; 807 @synchronized(self) { 808 canPlayOrRecord = !self.useManualAudio || self.isAudioEnabled; 809 if (_canPlayOrRecord == canPlayOrRecord) { 810 return; 811 } 812 _canPlayOrRecord = canPlayOrRecord; 813 shouldNotify = YES; 814 } 815 if (shouldNotify) { 816 [self notifyDidChangeCanPlayOrRecord:canPlayOrRecord]; 817 } 818} 819 820- (void)audioSessionDidActivate:(AVAudioSession *)session { 821 if (_session != session) { 822 RTCLogError(@"audioSessionDidActivate called on different AVAudioSession"); 823 } 824 RTCLog(@"Audio session was externally activated."); 825 [self incrementActivationCount]; 826 self.isActive = YES; 827 // When a CallKit call begins, it's possible that we receive an interruption 828 // begin without a corresponding end. Since we know that we have an activated 829 // audio session at this point, just clear any saved interruption flag since 830 // the app may never be foregrounded during the duration of the call. 831 if (self.isInterrupted) { 832 RTCLog(@"Clearing interrupted state due to external activation."); 833 self.isInterrupted = NO; 834 } 835 // Treat external audio session activation as an end interruption event. 836 [self notifyDidEndInterruptionWithShouldResumeSession:YES]; 837} 838 839- (void)audioSessionDidDeactivate:(AVAudioSession *)session { 840 if (_session != session) { 841 RTCLogError(@"audioSessionDidDeactivate called on different AVAudioSession"); 842 } 843 RTCLog(@"Audio session was externally deactivated."); 844 self.isActive = NO; 845 [self decrementActivationCount]; 846} 847 848- (void)observeValueForKeyPath:(NSString *)keyPath 849 ofObject:(id)object 850 change:(NSDictionary *)change 851 context:(void *)context { 852 if (context == (__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class) { 853 if (object == _session) { 854 NSNumber *newVolume = change[NSKeyValueChangeNewKey]; 855 RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue); 856 [self notifyDidChangeOutputVolume:newVolume.floatValue]; 857 } 858 } else { 859 [super observeValueForKeyPath:keyPath 860 ofObject:object 861 change:change 862 context:context]; 863 } 864} 865 866- (void)notifyAudioUnitStartFailedWithError:(OSStatus)error { 867 for (auto delegate : self.delegates) { 868 SEL sel = @selector(audioSession:audioUnitStartFailedWithError:); 869 if ([delegate respondsToSelector:sel]) { 870 [delegate audioSession:self 871 audioUnitStartFailedWithError:[NSError errorWithDomain:kRTCAudioSessionErrorDomain 872 code:error 873 userInfo:nil]]; 874 } 875 } 876} 877 878- (void)notifyDidBeginInterruption { 879 for (auto delegate : self.delegates) { 880 SEL sel = @selector(audioSessionDidBeginInterruption:); 881 if ([delegate respondsToSelector:sel]) { 882 [delegate audioSessionDidBeginInterruption:self]; 883 } 884 } 885} 886 887- (void)notifyDidEndInterruptionWithShouldResumeSession: 888 (BOOL)shouldResumeSession { 889 for (auto delegate : self.delegates) { 890 SEL sel = @selector(audioSessionDidEndInterruption:shouldResumeSession:); 891 if ([delegate respondsToSelector:sel]) { 892 [delegate audioSessionDidEndInterruption:self 893 shouldResumeSession:shouldResumeSession]; 894 } 895 } 896} 897 898- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason 899 previousRoute:(AVAudioSessionRouteDescription *)previousRoute { 900 for (auto delegate : self.delegates) { 901 SEL sel = @selector(audioSessionDidChangeRoute:reason:previousRoute:); 902 if ([delegate respondsToSelector:sel]) { 903 [delegate audioSessionDidChangeRoute:self 904 reason:reason 905 previousRoute:previousRoute]; 906 } 907 } 908} 909 910- (void)notifyMediaServicesWereLost { 911 for (auto delegate : self.delegates) { 912 SEL sel = @selector(audioSessionMediaServerTerminated:); 913 if ([delegate respondsToSelector:sel]) { 914 [delegate audioSessionMediaServerTerminated:self]; 915 } 916 } 917} 918 919- (void)notifyMediaServicesWereReset { 920 for (auto delegate : self.delegates) { 921 SEL sel = @selector(audioSessionMediaServerReset:); 922 if ([delegate respondsToSelector:sel]) { 923 [delegate audioSessionMediaServerReset:self]; 924 } 925 } 926} 927 928- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord { 929 for (auto delegate : self.delegates) { 930 SEL sel = @selector(audioSession:didChangeCanPlayOrRecord:); 931 if ([delegate respondsToSelector:sel]) { 932 [delegate audioSession:self didChangeCanPlayOrRecord:canPlayOrRecord]; 933 } 934 } 935} 936 937- (void)notifyDidStartPlayOrRecord { 938 for (auto delegate : self.delegates) { 939 SEL sel = @selector(audioSessionDidStartPlayOrRecord:); 940 if ([delegate respondsToSelector:sel]) { 941 [delegate audioSessionDidStartPlayOrRecord:self]; 942 } 943 } 944} 945 946- (void)notifyDidStopPlayOrRecord { 947 for (auto delegate : self.delegates) { 948 SEL sel = @selector(audioSessionDidStopPlayOrRecord:); 949 if ([delegate respondsToSelector:sel]) { 950 [delegate audioSessionDidStopPlayOrRecord:self]; 951 } 952 } 953} 954 955- (void)notifyDidChangeOutputVolume:(float)volume { 956 for (auto delegate : self.delegates) { 957 SEL sel = @selector(audioSession:didChangeOutputVolume:); 958 if ([delegate respondsToSelector:sel]) { 959 [delegate audioSession:self didChangeOutputVolume:volume]; 960 } 961 } 962} 963 964- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches { 965 for (auto delegate : self.delegates) { 966 SEL sel = @selector(audioSession:didDetectPlayoutGlitch:); 967 if ([delegate respondsToSelector:sel]) { 968 [delegate audioSession:self didDetectPlayoutGlitch:totalNumberOfGlitches]; 969 } 970 } 971} 972 973- (void)notifyWillSetActive:(BOOL)active { 974 for (id delegate : self.delegates) { 975 SEL sel = @selector(audioSession:willSetActive:); 976 if ([delegate respondsToSelector:sel]) { 977 [delegate audioSession:self willSetActive:active]; 978 } 979 } 980} 981 982- (void)notifyDidSetActive:(BOOL)active { 983 for (id delegate : self.delegates) { 984 SEL sel = @selector(audioSession:didSetActive:); 985 if ([delegate respondsToSelector:sel]) { 986 [delegate audioSession:self didSetActive:active]; 987 } 988 } 989} 990 991- (void)notifyFailedToSetActive:(BOOL)active error:(NSError *)error { 992 for (id delegate : self.delegates) { 993 SEL sel = @selector(audioSession:failedToSetActive:error:); 994 if ([delegate respondsToSelector:sel]) { 995 [delegate audioSession:self failedToSetActive:active error:error]; 996 } 997 } 998} 999 1000@end 1001