OLD | NEW |
| (Empty) |
1 /* | |
2 * Copyright 2016 The WebRTC Project Authors. All rights reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h" | |
12 | |
13 #import <UIKit/UIKit.h> | |
14 | |
15 #include "webrtc/base/atomicops.h" | |
16 #include "webrtc/base/checks.h" | |
17 #include "webrtc/base/criticalsection.h" | |
18 #include "webrtc/modules/audio_device/ios/audio_device_ios.h" | |
19 | |
20 #import "WebRTC/RTCLogging.h" | |
21 #import "webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h" | |
22 #import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h" | |
23 | |
24 NSString * const kRTCAudioSessionErrorDomain = @"org.webrtc.RTCAudioSession"; | |
25 NSInteger const kRTCAudioSessionErrorLockRequired = -1; | |
26 NSInteger const kRTCAudioSessionErrorConfiguration = -2; | |
27 NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; | |
28 | |
29 // This class needs to be thread-safe because it is accessed from many threads. | |
30 // TODO(tkchin): Consider more granular locking. We're not expecting a lot of | |
31 // lock contention so coarse locks should be fine for now. | |
32 @implementation RTCAudioSession { | |
33 rtc::CriticalSection _crit; | |
34 AVAudioSession *_session; | |
35 volatile int _activationCount; | |
36 volatile int _lockRecursionCount; | |
37 volatile int _webRTCSessionCount; | |
38 BOOL _isActive; | |
39 BOOL _useManualAudio; | |
40 BOOL _isAudioEnabled; | |
41 BOOL _canPlayOrRecord; | |
42 BOOL _isInterrupted; | |
43 } | |
44 | |
45 @synthesize session = _session; | |
46 @synthesize delegates = _delegates; | |
47 | |
48 + (instancetype)sharedInstance { | |
49 static dispatch_once_t onceToken; | |
50 static RTCAudioSession *sharedInstance = nil; | |
51 dispatch_once(&onceToken, ^{ | |
52 sharedInstance = [[self alloc] init]; | |
53 }); | |
54 return sharedInstance; | |
55 } | |
56 | |
57 - (instancetype)init { | |
58 if (self = [super init]) { | |
59 _session = [AVAudioSession sharedInstance]; | |
60 | |
61 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; | |
62 [center addObserver:self | |
63 selector:@selector(handleInterruptionNotification:) | |
64 name:AVAudioSessionInterruptionNotification | |
65 object:nil]; | |
66 [center addObserver:self | |
67 selector:@selector(handleRouteChangeNotification:) | |
68 name:AVAudioSessionRouteChangeNotification | |
69 object:nil]; | |
70 [center addObserver:self | |
71 selector:@selector(handleMediaServicesWereLost:) | |
72 name:AVAudioSessionMediaServicesWereLostNotification | |
73 object:nil]; | |
74 [center addObserver:self | |
75 selector:@selector(handleMediaServicesWereReset:) | |
76 name:AVAudioSessionMediaServicesWereResetNotification | |
77 object:nil]; | |
78 // Posted on the main thread when the primary audio from other applications | |
79 // starts and stops. Foreground applications may use this notification as a | |
80 // hint to enable or disable audio that is secondary. | |
81 [center addObserver:self | |
82 selector:@selector(handleSilenceSecondaryAudioHintNotification:) | |
83 name:AVAudioSessionSilenceSecondaryAudioHintNotification | |
84 object:nil]; | |
85 // Also track foreground event in order to deal with interruption ended situ
ation. | |
86 [center addObserver:self | |
87 selector:@selector(handleApplicationDidBecomeActive:) | |
88 name:UIApplicationDidBecomeActiveNotification | |
89 object:nil]; | |
90 [_session addObserver:self | |
91 forKeyPath:kRTCAudioSessionOutputVolumeSelector | |
92 options:NSKeyValueObservingOptionNew | NSKeyValueObservingOpti
onOld | |
93 context:nil]; | |
94 | |
95 RTCLog(@"RTCAudioSession (%p): init.", self); | |
96 } | |
97 return self; | |
98 } | |
99 | |
100 - (void)dealloc { | |
101 [[NSNotificationCenter defaultCenter] removeObserver:self]; | |
102 [_session removeObserver:self forKeyPath:kRTCAudioSessionOutputVolumeSelector
context:nil]; | |
103 RTCLog(@"RTCAudioSession (%p): dealloc.", self); | |
104 } | |
105 | |
106 - (NSString *)description { | |
107 NSString *format = | |
108 @"RTCAudioSession: {\n" | |
109 " category: %@\n" | |
110 " categoryOptions: %ld\n" | |
111 " mode: %@\n" | |
112 " isActive: %d\n" | |
113 " sampleRate: %.2f\n" | |
114 " IOBufferDuration: %f\n" | |
115 " outputNumberOfChannels: %ld\n" | |
116 " inputNumberOfChannels: %ld\n" | |
117 " outputLatency: %f\n" | |
118 " inputLatency: %f\n" | |
119 " outputVolume: %f\n" | |
120 "}"; | |
121 NSString *description = [NSString stringWithFormat:format, | |
122 self.category, (long)self.categoryOptions, self.mode, | |
123 self.isActive, self.sampleRate, self.IOBufferDuration, | |
124 self.outputNumberOfChannels, self.inputNumberOfChannels, | |
125 self.outputLatency, self.inputLatency, self.outputVolume]; | |
126 return description; | |
127 } | |
128 | |
129 - (void)setIsActive:(BOOL)isActive { | |
130 @synchronized(self) { | |
131 _isActive = isActive; | |
132 } | |
133 } | |
134 | |
135 - (BOOL)isActive { | |
136 @synchronized(self) { | |
137 return _isActive; | |
138 } | |
139 } | |
140 | |
141 - (BOOL)isLocked { | |
142 return _lockRecursionCount > 0; | |
143 } | |
144 | |
145 - (void)setUseManualAudio:(BOOL)useManualAudio { | |
146 @synchronized(self) { | |
147 if (_useManualAudio == useManualAudio) { | |
148 return; | |
149 } | |
150 _useManualAudio = useManualAudio; | |
151 } | |
152 [self updateCanPlayOrRecord]; | |
153 } | |
154 | |
155 - (BOOL)useManualAudio { | |
156 @synchronized(self) { | |
157 return _useManualAudio; | |
158 } | |
159 } | |
160 | |
161 - (void)setIsAudioEnabled:(BOOL)isAudioEnabled { | |
162 @synchronized(self) { | |
163 if (_isAudioEnabled == isAudioEnabled) { | |
164 return; | |
165 } | |
166 _isAudioEnabled = isAudioEnabled; | |
167 } | |
168 [self updateCanPlayOrRecord]; | |
169 } | |
170 | |
171 - (BOOL)isAudioEnabled { | |
172 @synchronized(self) { | |
173 return _isAudioEnabled; | |
174 } | |
175 } | |
176 | |
177 // TODO(tkchin): Check for duplicates. | |
178 - (void)addDelegate:(id<RTCAudioSessionDelegate>)delegate { | |
179 RTCLog(@"Adding delegate: (%p)", delegate); | |
180 if (!delegate) { | |
181 return; | |
182 } | |
183 @synchronized(self) { | |
184 _delegates.push_back(delegate); | |
185 [self removeZeroedDelegates]; | |
186 } | |
187 } | |
188 | |
189 - (void)removeDelegate:(id<RTCAudioSessionDelegate>)delegate { | |
190 RTCLog(@"Removing delegate: (%p)", delegate); | |
191 if (!delegate) { | |
192 return; | |
193 } | |
194 @synchronized(self) { | |
195 _delegates.erase(std::remove(_delegates.begin(), | |
196 _delegates.end(), | |
197 delegate), | |
198 _delegates.end()); | |
199 [self removeZeroedDelegates]; | |
200 } | |
201 } | |
202 | |
203 #pragma clang diagnostic push | |
204 #pragma clang diagnostic ignored "-Wthread-safety-analysis" | |
205 | |
206 - (void)lockForConfiguration { | |
207 _crit.Enter(); | |
208 rtc::AtomicOps::Increment(&_lockRecursionCount); | |
209 } | |
210 | |
211 - (void)unlockForConfiguration { | |
212 // Don't let threads other than the one that called lockForConfiguration | |
213 // unlock. | |
214 if (_crit.TryEnter()) { | |
215 rtc::AtomicOps::Decrement(&_lockRecursionCount); | |
216 // One unlock for the tryLock, and another one to actually unlock. If this | |
217 // was called without anyone calling lock, we will hit an assertion. | |
218 _crit.Leave(); | |
219 _crit.Leave(); | |
220 } | |
221 } | |
222 | |
223 #pragma clang diagnostic pop | |
224 | |
225 #pragma mark - AVAudioSession proxy methods | |
226 | |
227 - (NSString *)category { | |
228 return self.session.category; | |
229 } | |
230 | |
231 - (AVAudioSessionCategoryOptions)categoryOptions { | |
232 return self.session.categoryOptions; | |
233 } | |
234 | |
235 - (NSString *)mode { | |
236 return self.session.mode; | |
237 } | |
238 | |
239 - (BOOL)secondaryAudioShouldBeSilencedHint { | |
240 return self.session.secondaryAudioShouldBeSilencedHint; | |
241 } | |
242 | |
243 - (AVAudioSessionRouteDescription *)currentRoute { | |
244 return self.session.currentRoute; | |
245 } | |
246 | |
247 - (NSInteger)maximumInputNumberOfChannels { | |
248 return self.session.maximumInputNumberOfChannels; | |
249 } | |
250 | |
251 - (NSInteger)maximumOutputNumberOfChannels { | |
252 return self.session.maximumOutputNumberOfChannels; | |
253 } | |
254 | |
255 - (float)inputGain { | |
256 return self.session.inputGain; | |
257 } | |
258 | |
259 - (BOOL)inputGainSettable { | |
260 return self.session.inputGainSettable; | |
261 } | |
262 | |
263 - (BOOL)inputAvailable { | |
264 return self.session.inputAvailable; | |
265 } | |
266 | |
267 - (NSArray<AVAudioSessionDataSourceDescription *> *)inputDataSources { | |
268 return self.session.inputDataSources; | |
269 } | |
270 | |
271 - (AVAudioSessionDataSourceDescription *)inputDataSource { | |
272 return self.session.inputDataSource; | |
273 } | |
274 | |
275 - (NSArray<AVAudioSessionDataSourceDescription *> *)outputDataSources { | |
276 return self.session.outputDataSources; | |
277 } | |
278 | |
279 - (AVAudioSessionDataSourceDescription *)outputDataSource { | |
280 return self.session.outputDataSource; | |
281 } | |
282 | |
283 - (double)sampleRate { | |
284 return self.session.sampleRate; | |
285 } | |
286 | |
287 - (double)preferredSampleRate { | |
288 return self.session.preferredSampleRate; | |
289 } | |
290 | |
291 - (NSInteger)inputNumberOfChannels { | |
292 return self.session.inputNumberOfChannels; | |
293 } | |
294 | |
295 - (NSInteger)outputNumberOfChannels { | |
296 return self.session.outputNumberOfChannels; | |
297 } | |
298 | |
299 - (float)outputVolume { | |
300 return self.session.outputVolume; | |
301 } | |
302 | |
303 - (NSTimeInterval)inputLatency { | |
304 return self.session.inputLatency; | |
305 } | |
306 | |
307 - (NSTimeInterval)outputLatency { | |
308 return self.session.outputLatency; | |
309 } | |
310 | |
311 - (NSTimeInterval)IOBufferDuration { | |
312 return self.session.IOBufferDuration; | |
313 } | |
314 | |
315 - (NSTimeInterval)preferredIOBufferDuration { | |
316 return self.session.preferredIOBufferDuration; | |
317 } | |
318 | |
319 // TODO(tkchin): Simplify the amount of locking happening here. Likely that we | |
320 // can just do atomic increments / decrements. | |
321 - (BOOL)setActive:(BOOL)active | |
322 error:(NSError **)outError { | |
323 if (![self checkLock:outError]) { | |
324 return NO; | |
325 } | |
326 int activationCount = _activationCount; | |
327 if (!active && activationCount == 0) { | |
328 RTCLogWarning(@"Attempting to deactivate without prior activation."); | |
329 } | |
330 BOOL success = YES; | |
331 BOOL isActive = self.isActive; | |
332 // Keep a local error so we can log it. | |
333 NSError *error = nil; | |
334 BOOL shouldSetActive = | |
335 (active && !isActive) || (!active && isActive && activationCount == 1); | |
336 // Attempt to activate if we're not active. | |
337 // Attempt to deactivate if we're active and it's the last unbalanced call. | |
338 if (shouldSetActive) { | |
339 AVAudioSession *session = self.session; | |
340 // AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation is used to ensure | |
341 // that other audio sessions that were interrupted by our session can return | |
342 // to their active state. It is recommended for VoIP apps to use this | |
343 // option. | |
344 AVAudioSessionSetActiveOptions options = | |
345 active ? 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation; | |
346 success = [session setActive:active | |
347 withOptions:options | |
348 error:&error]; | |
349 if (outError) { | |
350 *outError = error; | |
351 } | |
352 } | |
353 if (success) { | |
354 if (shouldSetActive) { | |
355 self.isActive = active; | |
356 } | |
357 if (active) { | |
358 [self incrementActivationCount]; | |
359 } | |
360 } else { | |
361 RTCLogError(@"Failed to setActive:%d. Error: %@", | |
362 active, error.localizedDescription); | |
363 } | |
364 // Decrement activation count on deactivation whether or not it succeeded. | |
365 if (!active) { | |
366 [self decrementActivationCount]; | |
367 } | |
368 RTCLog(@"Number of current activations: %d", _activationCount); | |
369 return success; | |
370 } | |
371 | |
372 - (BOOL)setCategory:(NSString *)category | |
373 withOptions:(AVAudioSessionCategoryOptions)options | |
374 error:(NSError **)outError { | |
375 if (![self checkLock:outError]) { | |
376 return NO; | |
377 } | |
378 return [self.session setCategory:category withOptions:options error:outError]; | |
379 } | |
380 | |
381 - (BOOL)setMode:(NSString *)mode error:(NSError **)outError { | |
382 if (![self checkLock:outError]) { | |
383 return NO; | |
384 } | |
385 return [self.session setMode:mode error:outError]; | |
386 } | |
387 | |
388 - (BOOL)setInputGain:(float)gain error:(NSError **)outError { | |
389 if (![self checkLock:outError]) { | |
390 return NO; | |
391 } | |
392 return [self.session setInputGain:gain error:outError]; | |
393 } | |
394 | |
395 - (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError { | |
396 if (![self checkLock:outError]) { | |
397 return NO; | |
398 } | |
399 return [self.session setPreferredSampleRate:sampleRate error:outError]; | |
400 } | |
401 | |
402 - (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration | |
403 error:(NSError **)outError { | |
404 if (![self checkLock:outError]) { | |
405 return NO; | |
406 } | |
407 return [self.session setPreferredIOBufferDuration:duration error:outError]; | |
408 } | |
409 | |
410 - (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count | |
411 error:(NSError **)outError { | |
412 if (![self checkLock:outError]) { | |
413 return NO; | |
414 } | |
415 return [self.session setPreferredInputNumberOfChannels:count error:outError]; | |
416 } | |
417 - (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count | |
418 error:(NSError **)outError { | |
419 if (![self checkLock:outError]) { | |
420 return NO; | |
421 } | |
422 return [self.session setPreferredOutputNumberOfChannels:count error:outError]; | |
423 } | |
424 | |
425 - (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride | |
426 error:(NSError **)outError { | |
427 if (![self checkLock:outError]) { | |
428 return NO; | |
429 } | |
430 return [self.session overrideOutputAudioPort:portOverride error:outError]; | |
431 } | |
432 | |
433 - (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort | |
434 error:(NSError **)outError { | |
435 if (![self checkLock:outError]) { | |
436 return NO; | |
437 } | |
438 return [self.session setPreferredInput:inPort error:outError]; | |
439 } | |
440 | |
441 - (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource | |
442 error:(NSError **)outError { | |
443 if (![self checkLock:outError]) { | |
444 return NO; | |
445 } | |
446 return [self.session setInputDataSource:dataSource error:outError]; | |
447 } | |
448 | |
449 - (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource | |
450 error:(NSError **)outError { | |
451 if (![self checkLock:outError]) { | |
452 return NO; | |
453 } | |
454 return [self.session setOutputDataSource:dataSource error:outError]; | |
455 } | |
456 | |
457 #pragma mark - Notifications | |
458 | |
459 - (void)handleInterruptionNotification:(NSNotification *)notification { | |
460 NSNumber* typeNumber = | |
461 notification.userInfo[AVAudioSessionInterruptionTypeKey]; | |
462 AVAudioSessionInterruptionType type = | |
463 (AVAudioSessionInterruptionType)typeNumber.unsignedIntegerValue; | |
464 switch (type) { | |
465 case AVAudioSessionInterruptionTypeBegan: | |
466 RTCLog(@"Audio session interruption began."); | |
467 self.isActive = NO; | |
468 self.isInterrupted = YES; | |
469 [self notifyDidBeginInterruption]; | |
470 break; | |
471 case AVAudioSessionInterruptionTypeEnded: { | |
472 RTCLog(@"Audio session interruption ended."); | |
473 self.isInterrupted = NO; | |
474 [self updateAudioSessionAfterEvent]; | |
475 NSNumber *optionsNumber = | |
476 notification.userInfo[AVAudioSessionInterruptionOptionKey]; | |
477 AVAudioSessionInterruptionOptions options = | |
478 optionsNumber.unsignedIntegerValue; | |
479 BOOL shouldResume = | |
480 options & AVAudioSessionInterruptionOptionShouldResume; | |
481 [self notifyDidEndInterruptionWithShouldResumeSession:shouldResume]; | |
482 break; | |
483 } | |
484 } | |
485 } | |
486 | |
487 - (void)handleRouteChangeNotification:(NSNotification *)notification { | |
488 // Get reason for current route change. | |
489 NSNumber* reasonNumber = | |
490 notification.userInfo[AVAudioSessionRouteChangeReasonKey]; | |
491 AVAudioSessionRouteChangeReason reason = | |
492 (AVAudioSessionRouteChangeReason)reasonNumber.unsignedIntegerValue; | |
493 RTCLog(@"Audio route changed:"); | |
494 switch (reason) { | |
495 case AVAudioSessionRouteChangeReasonUnknown: | |
496 RTCLog(@"Audio route changed: ReasonUnknown"); | |
497 break; | |
498 case AVAudioSessionRouteChangeReasonNewDeviceAvailable: | |
499 RTCLog(@"Audio route changed: NewDeviceAvailable"); | |
500 break; | |
501 case AVAudioSessionRouteChangeReasonOldDeviceUnavailable: | |
502 RTCLog(@"Audio route changed: OldDeviceUnavailable"); | |
503 break; | |
504 case AVAudioSessionRouteChangeReasonCategoryChange: | |
505 RTCLog(@"Audio route changed: CategoryChange to :%@", | |
506 self.session.category); | |
507 break; | |
508 case AVAudioSessionRouteChangeReasonOverride: | |
509 RTCLog(@"Audio route changed: Override"); | |
510 break; | |
511 case AVAudioSessionRouteChangeReasonWakeFromSleep: | |
512 RTCLog(@"Audio route changed: WakeFromSleep"); | |
513 break; | |
514 case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory: | |
515 RTCLog(@"Audio route changed: NoSuitableRouteForCategory"); | |
516 break; | |
517 case AVAudioSessionRouteChangeReasonRouteConfigurationChange: | |
518 RTCLog(@"Audio route changed: RouteConfigurationChange"); | |
519 break; | |
520 } | |
521 AVAudioSessionRouteDescription* previousRoute = | |
522 notification.userInfo[AVAudioSessionRouteChangePreviousRouteKey]; | |
523 // Log previous route configuration. | |
524 RTCLog(@"Previous route: %@\nCurrent route:%@", | |
525 previousRoute, self.session.currentRoute); | |
526 [self notifyDidChangeRouteWithReason:reason previousRoute:previousRoute]; | |
527 } | |
528 | |
529 - (void)handleMediaServicesWereLost:(NSNotification *)notification { | |
530 RTCLog(@"Media services were lost."); | |
531 [self updateAudioSessionAfterEvent]; | |
532 [self notifyMediaServicesWereLost]; | |
533 } | |
534 | |
535 - (void)handleMediaServicesWereReset:(NSNotification *)notification { | |
536 RTCLog(@"Media services were reset."); | |
537 [self updateAudioSessionAfterEvent]; | |
538 [self notifyMediaServicesWereReset]; | |
539 } | |
540 | |
541 - (void)handleSilenceSecondaryAudioHintNotification:(NSNotification *)notificati
on { | |
542 // TODO(henrika): just adding logs here for now until we know if we are ever | |
543 // see this notification and might be affected by it or if further actions | |
544 // are required. | |
545 NSNumber *typeNumber = | |
546 notification.userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey]; | |
547 AVAudioSessionSilenceSecondaryAudioHintType type = | |
548 (AVAudioSessionSilenceSecondaryAudioHintType)typeNumber.unsignedIntegerVal
ue; | |
549 switch (type) { | |
550 case AVAudioSessionSilenceSecondaryAudioHintTypeBegin: | |
551 RTCLog(@"Another application's primary audio has started."); | |
552 break; | |
553 case AVAudioSessionSilenceSecondaryAudioHintTypeEnd: | |
554 RTCLog(@"Another application's primary audio has stopped."); | |
555 break; | |
556 } | |
557 } | |
558 | |
559 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { | |
560 RTCLog(@"Application became active after an interruption. Treating as interrup
tion " | |
561 " end. isInterrupted changed from %d to 0.", self.isInterrupted); | |
562 if (self.isInterrupted) { | |
563 self.isInterrupted = NO; | |
564 [self updateAudioSessionAfterEvent]; | |
565 } | |
566 // Always treat application becoming active as an interruption end event. | |
567 [self notifyDidEndInterruptionWithShouldResumeSession:YES]; | |
568 } | |
569 | |
570 #pragma mark - Private | |
571 | |
572 + (NSError *)lockError { | |
573 NSDictionary *userInfo = @{ | |
574 NSLocalizedDescriptionKey: | |
575 @"Must call lockForConfiguration before calling this method." | |
576 }; | |
577 NSError *error = | |
578 [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain | |
579 code:kRTCAudioSessionErrorLockRequired | |
580 userInfo:userInfo]; | |
581 return error; | |
582 } | |
583 | |
584 - (std::vector<__weak id<RTCAudioSessionDelegate> >)delegates { | |
585 @synchronized(self) { | |
586 // Note: this returns a copy. | |
587 return _delegates; | |
588 } | |
589 } | |
590 | |
591 // TODO(tkchin): check for duplicates. | |
592 - (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate { | |
593 @synchronized(self) { | |
594 _delegates.insert(_delegates.begin(), delegate); | |
595 } | |
596 } | |
597 | |
598 - (void)removeZeroedDelegates { | |
599 @synchronized(self) { | |
600 _delegates.erase( | |
601 std::remove_if(_delegates.begin(), | |
602 _delegates.end(), | |
603 [](id delegate) -> bool { return delegate == nil; }), | |
604 _delegates.end()); | |
605 } | |
606 } | |
607 | |
608 - (int)activationCount { | |
609 return _activationCount; | |
610 } | |
611 | |
612 - (int)incrementActivationCount { | |
613 RTCLog(@"Incrementing activation count."); | |
614 return rtc::AtomicOps::Increment(&_activationCount); | |
615 } | |
616 | |
617 - (NSInteger)decrementActivationCount { | |
618 RTCLog(@"Decrementing activation count."); | |
619 return rtc::AtomicOps::Decrement(&_activationCount); | |
620 } | |
621 | |
622 - (int)webRTCSessionCount { | |
623 return _webRTCSessionCount; | |
624 } | |
625 | |
626 - (BOOL)canPlayOrRecord { | |
627 return !self.useManualAudio || self.isAudioEnabled; | |
628 } | |
629 | |
630 - (BOOL)isInterrupted { | |
631 @synchronized(self) { | |
632 return _isInterrupted; | |
633 } | |
634 } | |
635 | |
636 - (void)setIsInterrupted:(BOOL)isInterrupted { | |
637 @synchronized(self) { | |
638 if (_isInterrupted == isInterrupted) { | |
639 return; | |
640 } | |
641 _isInterrupted = isInterrupted; | |
642 } | |
643 } | |
644 | |
645 - (BOOL)checkLock:(NSError **)outError { | |
646 // Check ivar instead of trying to acquire lock so that we won't accidentally | |
647 // acquire lock if it hasn't already been called. | |
648 if (!self.isLocked) { | |
649 if (outError) { | |
650 *outError = [RTCAudioSession lockError]; | |
651 } | |
652 return NO; | |
653 } | |
654 return YES; | |
655 } | |
656 | |
657 - (BOOL)beginWebRTCSession:(NSError **)outError { | |
658 if (outError) { | |
659 *outError = nil; | |
660 } | |
661 if (![self checkLock:outError]) { | |
662 return NO; | |
663 } | |
664 rtc::AtomicOps::Increment(&_webRTCSessionCount); | |
665 [self notifyDidStartPlayOrRecord]; | |
666 return YES; | |
667 } | |
668 | |
669 - (BOOL)endWebRTCSession:(NSError **)outError { | |
670 if (outError) { | |
671 *outError = nil; | |
672 } | |
673 if (![self checkLock:outError]) { | |
674 return NO; | |
675 } | |
676 rtc::AtomicOps::Decrement(&_webRTCSessionCount); | |
677 [self notifyDidStopPlayOrRecord]; | |
678 return YES; | |
679 } | |
680 | |
681 - (BOOL)configureWebRTCSession:(NSError **)outError { | |
682 if (outError) { | |
683 *outError = nil; | |
684 } | |
685 if (![self checkLock:outError]) { | |
686 return NO; | |
687 } | |
688 RTCLog(@"Configuring audio session for WebRTC."); | |
689 | |
690 // Configure the AVAudioSession and activate it. | |
691 // Provide an error even if there isn't one so we can log it. | |
692 NSError *error = nil; | |
693 RTCAudioSessionConfiguration *webRTCConfig = | |
694 [RTCAudioSessionConfiguration webRTCConfiguration]; | |
695 if (![self setConfiguration:webRTCConfig active:YES error:&error]) { | |
696 RTCLogError(@"Failed to set WebRTC audio configuration: %@", | |
697 error.localizedDescription); | |
698 // Do not call setActive:NO if setActive:YES failed. | |
699 if (outError) { | |
700 *outError = error; | |
701 } | |
702 return NO; | |
703 } | |
704 | |
705 // Ensure that the device currently supports audio input. | |
706 // TODO(tkchin): Figure out if this is really necessary. | |
707 if (!self.inputAvailable) { | |
708 RTCLogError(@"No audio input path is available!"); | |
709 [self unconfigureWebRTCSession:nil]; | |
710 if (outError) { | |
711 *outError = [self configurationErrorWithDescription:@"No input path."]; | |
712 } | |
713 return NO; | |
714 } | |
715 | |
716 // It can happen (e.g. in combination with BT devices) that the attempt to set | |
717 // the preferred sample rate for WebRTC (48kHz) fails. If so, make a new | |
718 // configuration attempt using the sample rate that worked using the active | |
719 // audio session. A typical case is that only 8 or 16kHz can be set, e.g. in | |
720 // combination with BT headsets. Using this "trick" seems to avoid a state | |
721 // where Core Audio asks for a different number of audio frames than what the | |
722 // session's I/O buffer duration corresponds to. | |
723 // TODO(henrika): this fix resolves bugs.webrtc.org/6004 but it has only been | |
724 // tested on a limited set of iOS devices and BT devices. | |
725 double sessionSampleRate = self.sampleRate; | |
726 double preferredSampleRate = webRTCConfig.sampleRate; | |
727 if (sessionSampleRate != preferredSampleRate) { | |
728 RTCLogWarning( | |
729 @"Current sample rate (%.2f) is not the preferred rate (%.2f)", | |
730 sessionSampleRate, preferredSampleRate); | |
731 if (![self setPreferredSampleRate:sessionSampleRate | |
732 error:&error]) { | |
733 RTCLogError(@"Failed to set preferred sample rate: %@", | |
734 error.localizedDescription); | |
735 if (outError) { | |
736 *outError = error; | |
737 } | |
738 } | |
739 } | |
740 | |
741 return YES; | |
742 } | |
743 | |
744 - (BOOL)unconfigureWebRTCSession:(NSError **)outError { | |
745 if (outError) { | |
746 *outError = nil; | |
747 } | |
748 if (![self checkLock:outError]) { | |
749 return NO; | |
750 } | |
751 RTCLog(@"Unconfiguring audio session for WebRTC."); | |
752 [self setActive:NO error:outError]; | |
753 | |
754 return YES; | |
755 } | |
756 | |
757 - (NSError *)configurationErrorWithDescription:(NSString *)description { | |
758 NSDictionary* userInfo = @{ | |
759 NSLocalizedDescriptionKey: description, | |
760 }; | |
761 return [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain | |
762 code:kRTCAudioSessionErrorConfiguration | |
763 userInfo:userInfo]; | |
764 } | |
765 | |
766 - (void)updateAudioSessionAfterEvent { | |
767 BOOL shouldActivate = self.activationCount > 0; | |
768 AVAudioSessionSetActiveOptions options = shouldActivate ? | |
769 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation; | |
770 NSError *error = nil; | |
771 if ([self.session setActive:shouldActivate | |
772 withOptions:options | |
773 error:&error]) { | |
774 self.isActive = shouldActivate; | |
775 } else { | |
776 RTCLogError(@"Failed to set session active to %d. Error:%@", | |
777 shouldActivate, error.localizedDescription); | |
778 } | |
779 } | |
780 | |
781 - (void)updateCanPlayOrRecord { | |
782 BOOL canPlayOrRecord = NO; | |
783 BOOL shouldNotify = NO; | |
784 @synchronized(self) { | |
785 canPlayOrRecord = !self.useManualAudio || self.isAudioEnabled; | |
786 if (_canPlayOrRecord == canPlayOrRecord) { | |
787 return; | |
788 } | |
789 _canPlayOrRecord = canPlayOrRecord; | |
790 shouldNotify = YES; | |
791 } | |
792 if (shouldNotify) { | |
793 [self notifyDidChangeCanPlayOrRecord:canPlayOrRecord]; | |
794 } | |
795 } | |
796 | |
797 - (void)audioSessionDidActivate:(AVAudioSession *)session { | |
798 if (_session != session) { | |
799 RTCLogError(@"audioSessionDidActivate called on different AVAudioSession"); | |
800 } | |
801 [self incrementActivationCount]; | |
802 self.isActive = YES; | |
803 } | |
804 | |
805 - (void)audioSessionDidDeactivate:(AVAudioSession *)session { | |
806 if (_session != session) { | |
807 RTCLogError(@"audioSessionDidDeactivate called on different AVAudioSession")
; | |
808 } | |
809 self.isActive = NO; | |
810 [self decrementActivationCount]; | |
811 } | |
812 | |
813 - (void)observeValueForKeyPath:(NSString *)keyPath | |
814 ofObject:(id)object | |
815 change:(NSDictionary *)change | |
816 context:(void *)context { | |
817 if (object == _session) { | |
818 NSNumber *newVolume = change[NSKeyValueChangeNewKey]; | |
819 RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue); | |
820 [self notifyDidChangeOutputVolume:newVolume.floatValue]; | |
821 } else { | |
822 [super observeValueForKeyPath:keyPath | |
823 ofObject:object | |
824 change:change | |
825 context:context]; | |
826 } | |
827 } | |
828 | |
829 - (void)notifyDidBeginInterruption { | |
830 for (auto delegate : self.delegates) { | |
831 SEL sel = @selector(audioSessionDidBeginInterruption:); | |
832 if ([delegate respondsToSelector:sel]) { | |
833 [delegate audioSessionDidBeginInterruption:self]; | |
834 } | |
835 } | |
836 } | |
837 | |
838 - (void)notifyDidEndInterruptionWithShouldResumeSession: | |
839 (BOOL)shouldResumeSession { | |
840 for (auto delegate : self.delegates) { | |
841 SEL sel = @selector(audioSessionDidEndInterruption:shouldResumeSession:); | |
842 if ([delegate respondsToSelector:sel]) { | |
843 [delegate audioSessionDidEndInterruption:self | |
844 shouldResumeSession:shouldResumeSession]; | |
845 } | |
846 } | |
847 } | |
848 | |
849 - (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason | |
850 previousRoute:(AVAudioSessionRouteDescription *)previousRoute { | |
851 for (auto delegate : self.delegates) { | |
852 SEL sel = @selector(audioSessionDidChangeRoute:reason:previousRoute:); | |
853 if ([delegate respondsToSelector:sel]) { | |
854 [delegate audioSessionDidChangeRoute:self | |
855 reason:reason | |
856 previousRoute:previousRoute]; | |
857 } | |
858 } | |
859 } | |
860 | |
861 - (void)notifyMediaServicesWereLost { | |
862 for (auto delegate : self.delegates) { | |
863 SEL sel = @selector(audioSessionMediaServerTerminated:); | |
864 if ([delegate respondsToSelector:sel]) { | |
865 [delegate audioSessionMediaServerTerminated:self]; | |
866 } | |
867 } | |
868 } | |
869 | |
870 - (void)notifyMediaServicesWereReset { | |
871 for (auto delegate : self.delegates) { | |
872 SEL sel = @selector(audioSessionMediaServerReset:); | |
873 if ([delegate respondsToSelector:sel]) { | |
874 [delegate audioSessionMediaServerReset:self]; | |
875 } | |
876 } | |
877 } | |
878 | |
879 - (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord { | |
880 for (auto delegate : self.delegates) { | |
881 SEL sel = @selector(audioSession:didChangeCanPlayOrRecord:); | |
882 if ([delegate respondsToSelector:sel]) { | |
883 [delegate audioSession:self didChangeCanPlayOrRecord:canPlayOrRecord]; | |
884 } | |
885 } | |
886 } | |
887 | |
888 - (void)notifyDidStartPlayOrRecord { | |
889 for (auto delegate : self.delegates) { | |
890 SEL sel = @selector(audioSessionDidStartPlayOrRecord:); | |
891 if ([delegate respondsToSelector:sel]) { | |
892 [delegate audioSessionDidStartPlayOrRecord:self]; | |
893 } | |
894 } | |
895 } | |
896 | |
897 - (void)notifyDidStopPlayOrRecord { | |
898 for (auto delegate : self.delegates) { | |
899 SEL sel = @selector(audioSessionDidStopPlayOrRecord:); | |
900 if ([delegate respondsToSelector:sel]) { | |
901 [delegate audioSessionDidStopPlayOrRecord:self]; | |
902 } | |
903 } | |
904 } | |
905 | |
906 - (void)notifyDidChangeOutputVolume:(float)volume { | |
907 for (auto delegate : self.delegates) { | |
908 SEL sel = @selector(audioSession:didChangeOutputVolume:); | |
909 if ([delegate respondsToSelector:sel]) { | |
910 [delegate audioSession:self didChangeOutputVolume:volume]; | |
911 } | |
912 } | |
913 } | |
914 | |
915 @end | |
OLD | NEW |