Index: webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm |
diff --git a/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm b/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm |
index 87fa5b6cdd3dae8762822702f480c831353dcfc9..bffc659de2e57b9c0d5ac564fc43c229699af340 100644 |
--- a/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm |
+++ b/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm |
@@ -66,7 +66,6 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2; |
selector:@selector(handleRouteChangeNotification:) |
name:AVAudioSessionRouteChangeNotification |
object:nil]; |
- // TODO(tkchin): Maybe listen to SilenceSecondaryAudioHintNotification. |
[center addObserver:self |
selector:@selector(handleMediaServicesWereLost:) |
name:AVAudioSessionMediaServicesWereLostNotification |
@@ -75,6 +74,13 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2; |
selector:@selector(handleMediaServicesWereReset:) |
name:AVAudioSessionMediaServicesWereResetNotification |
object:nil]; |
+ // Posted on the main thread when the primary audio from other applications |
+ // starts and stops. Foreground applications may use this notification as a |
+ // hint to enable or disable audio that is secondary. |
+ [center addObserver:self |
+ selector:@selector(handleSilenceSecondaryAudioHintNotification:) |
+ name:AVAudioSessionSilenceSecondaryAudioHintNotification |
+ object:nil]; |
// Also track foreground event in order to deal with interruption ended situation. |
[center addObserver:self |
selector:@selector(handleApplicationDidBecomeActive:) |
@@ -516,6 +522,24 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2; |
[self notifyMediaServicesWereReset]; |
} |
+- (void)handleSilenceSecondaryAudioHintNotification:(NSNotification*)notification { |
tkchin_webrtc
2016/09/26 09:54:30
NSNotification *)
henrika_webrtc
2016/09/27 11:39:35
Done.
|
+ // TODO(henrika): just adding logs here for now until we know if we are ever |
+ // see this notification and might be affected by it or if further actions |
+ // are required. |
+ NSNumber* typeNumber = |
tkchin_webrtc
2016/09/26 09:54:29
NSNumber *typeNumber
henrika_webrtc
2016/09/27 11:39:35
Done.
|
+ notification.userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey]; |
+ AVAudioSessionSilenceSecondaryAudioHintType type = |
+ (AVAudioSessionSilenceSecondaryAudioHintType)typeNumber.unsignedIntegerValue; |
+ switch (type) { |
+ case AVAudioSessionSilenceSecondaryAudioHintTypeBegin: |
+ RTCLog(@"Another application's primary audio has started."); |
+ break; |
+ case AVAudioSessionSilenceSecondaryAudioHintTypeEnd: |
+ RTCLog(@"Another application's primary audio has stopped."); |
+ break; |
+ } |
+} |
+ |
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification { |
if (self.isInterrupted) { |
RTCLog(@"Application became active after an interruption. Treating as interruption end."); |