Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(471)

Side by Side Diff: webrtc/modules/audio_device/ios/audio_device_ios.mm

Issue 1796983004: Use RTCAudioSessionDelegate in AudioDeviceIOS. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Update a comment Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #if !defined(__has_feature) || !__has_feature(objc_arc) 11 #if !defined(__has_feature) || !__has_feature(objc_arc)
12 #error "This file requires ARC support." 12 #error "This file requires ARC support."
13 #endif 13 #endif
14 14
15 #import <AVFoundation/AVFoundation.h> 15 #import <AVFoundation/AVFoundation.h>
16 #import <Foundation/Foundation.h> 16 #import <Foundation/Foundation.h>
17 17
18 #include "webrtc/modules/audio_device/ios/audio_device_ios.h" 18 #include "webrtc/modules/audio_device/ios/audio_device_ios.h"
19 19
20 #include "webrtc/base/atomicops.h" 20 #include "webrtc/base/atomicops.h"
21 #include "webrtc/base/bind.h"
21 #include "webrtc/base/checks.h" 22 #include "webrtc/base/checks.h"
22 #include "webrtc/base/criticalsection.h" 23 #include "webrtc/base/criticalsection.h"
23 #include "webrtc/base/logging.h" 24 #include "webrtc/base/logging.h"
25 #include "webrtc/base/thread.h"
24 #include "webrtc/base/thread_annotations.h" 26 #include "webrtc/base/thread_annotations.h"
25 #include "webrtc/modules/audio_device/fine_audio_buffer.h" 27 #include "webrtc/modules/audio_device/fine_audio_buffer.h"
26 #include "webrtc/modules/utility/include/helpers_ios.h" 28 #include "webrtc/modules/utility/include/helpers_ios.h"
27 29
28 #import "webrtc/base/objc/RTCLogging.h" 30 #import "webrtc/base/objc/RTCLogging.h"
29 #import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h" 31 #import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
32 #import "webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h"
30 #import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h" 33 #import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
34 #import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.h"
31 35
32 namespace webrtc { 36 namespace webrtc {
33 37
34 #define LOGI() LOG(LS_INFO) << "AudioDeviceIOS::" 38 #define LOGI() LOG(LS_INFO) << "AudioDeviceIOS::"
35 39
36 #define LOG_AND_RETURN_IF_ERROR(error, message) \ 40 #define LOG_AND_RETURN_IF_ERROR(error, message) \
37 do { \ 41 do { \
38 OSStatus err = error; \ 42 OSStatus err = error; \
39 if (err) { \ 43 if (err) { \
40 LOG(LS_ERROR) << message << ": " << err; \ 44 LOG(LS_ERROR) << message << ": " << err; \
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
99 LOG(LS_INFO) << " OS version: " << ios::GetOSVersionString(); 103 LOG(LS_INFO) << " OS version: " << ios::GetOSVersionString();
100 LOG(LS_INFO) << " processing cores: " << ios::GetProcessorCount(); 104 LOG(LS_INFO) << " processing cores: " << ios::GetProcessorCount();
101 #if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 105 #if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
102 LOG(LS_INFO) << " low power mode: " << ios::GetLowPowerModeEnabled(); 106 LOG(LS_INFO) << " low power mode: " << ios::GetLowPowerModeEnabled();
103 #endif 107 #endif
104 } 108 }
105 } 109 }
106 #endif // !defined(NDEBUG) 110 #endif // !defined(NDEBUG)
107 111
108 AudioDeviceIOS::AudioDeviceIOS() 112 AudioDeviceIOS::AudioDeviceIOS()
109 : audio_device_buffer_(nullptr), 113 : async_invoker_(new rtc::AsyncInvoker()),
110 vpio_unit_(nullptr), 114 audio_device_buffer_(nullptr),
111 recording_(0), 115 vpio_unit_(nullptr),
112 playing_(0), 116 recording_(0),
113 initialized_(false), 117 playing_(0),
114 rec_is_initialized_(false), 118 initialized_(false),
115 play_is_initialized_(false), 119 rec_is_initialized_(false),
116 audio_interruption_observer_(nullptr), 120 play_is_initialized_(false),
117 route_change_observer_(nullptr) { 121 is_interrupted_(false) {
118 LOGI() << "ctor" << ios::GetCurrentThreadDescription(); 122 LOGI() << "ctor" << ios::GetCurrentThreadDescription();
123 thread_ = rtc::Thread::Current();
124 audio_session_observer_ =
125 [[RTCAudioSessionDelegateAdapter alloc] initWithObserver:this];
119 } 126 }
120 127
121 AudioDeviceIOS::~AudioDeviceIOS() { 128 AudioDeviceIOS::~AudioDeviceIOS() {
122 LOGI() << "~dtor" << ios::GetCurrentThreadDescription(); 129 LOGI() << "~dtor" << ios::GetCurrentThreadDescription();
130 audio_session_observer_ = nil;
123 RTC_DCHECK(thread_checker_.CalledOnValidThread()); 131 RTC_DCHECK(thread_checker_.CalledOnValidThread());
124 Terminate(); 132 Terminate();
125 } 133 }
126 134
127 void AudioDeviceIOS::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { 135 void AudioDeviceIOS::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
128 LOGI() << "AttachAudioBuffer"; 136 LOGI() << "AttachAudioBuffer";
129 RTC_DCHECK(audioBuffer); 137 RTC_DCHECK(audioBuffer);
130 RTC_DCHECK(thread_checker_.CalledOnValidThread()); 138 RTC_DCHECK(thread_checker_.CalledOnValidThread());
131 audio_device_buffer_ = audioBuffer; 139 audio_device_buffer_ = audioBuffer;
132 } 140 }
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
325 } 333 }
326 334
327 int AudioDeviceIOS::GetRecordAudioParameters(AudioParameters* params) const { 335 int AudioDeviceIOS::GetRecordAudioParameters(AudioParameters* params) const {
328 LOGI() << "GetRecordAudioParameters"; 336 LOGI() << "GetRecordAudioParameters";
329 RTC_DCHECK(record_parameters_.is_valid()); 337 RTC_DCHECK(record_parameters_.is_valid());
330 RTC_DCHECK(thread_checker_.CalledOnValidThread()); 338 RTC_DCHECK(thread_checker_.CalledOnValidThread());
331 *params = record_parameters_; 339 *params = record_parameters_;
332 return 0; 340 return 0;
333 } 341 }
334 342
343 void AudioDeviceIOS::OnInterruptionBegin() {
344 RTC_DCHECK(thread_);
345 if (thread_->IsCurrent()) {
346 HandleInterruptionBegin();
347 return;
348 }
349 async_invoker_->AsyncInvoke<void>(
350 thread_,
351 rtc::Bind(&webrtc::AudioDeviceIOS::HandleInterruptionBegin, this));
352 }
353
354 void AudioDeviceIOS::OnInterruptionEnd() {
355 RTC_DCHECK(thread_);
356 if (thread_->IsCurrent()) {
357 HandleInterruptionEnd();
358 return;
359 }
360 async_invoker_->AsyncInvoke<void>(
361 thread_,
362 rtc::Bind(&webrtc::AudioDeviceIOS::HandleInterruptionEnd, this));
363 }
364
365 void AudioDeviceIOS::OnValidRouteChange() {
366 RTC_DCHECK(thread_);
367 if (thread_->IsCurrent()) {
368 HandleValidRouteChange();
369 return;
370 }
371 async_invoker_->AsyncInvoke<void>(
372 thread_,
373 rtc::Bind(&webrtc::AudioDeviceIOS::HandleValidRouteChange, this));
374 }
375
376 void AudioDeviceIOS::HandleInterruptionBegin() {
377 RTC_DCHECK(thread_checker_.CalledOnValidThread());
378 RTCLog(@"Stopping the audio unit due to interruption begin.");
379 LOG_IF_ERROR(AudioOutputUnitStop(vpio_unit_),
380 "Failed to stop the the Voice-Processing I/O unit");
381 is_interrupted_ = true;
382
Chuck 2016/03/15 20:55:49 nit: extra line
tkchin_webrtc 2016/03/15 21:10:53 Done.
383 }
384
385 void AudioDeviceIOS::HandleInterruptionEnd() {
386 RTC_DCHECK(thread_checker_.CalledOnValidThread());
387 RTCLog(@"Starting the audio unit due to interruption end.");
388 LOG_IF_ERROR(AudioOutputUnitStart(vpio_unit_),
389 "Failed to start the the Voice-Processing I/O unit");
390 is_interrupted_ = false;
391
392 }
393
394 void AudioDeviceIOS::HandleValidRouteChange() {
395 RTC_DCHECK(thread_checker_.CalledOnValidThread());
396
397 // Don't do anything if we're interrupted.
398 if (is_interrupted_) {
399 return;
400 }
401
402 // Only restart audio for a valid route change if the session sample rate
403 // has changed.
404 RTCAudioSession* session = [RTCAudioSession sharedInstance];
405 const double current_sample_rate = playout_parameters_.sample_rate();
406 const double session_sample_rate = session.sampleRate;
407 if (current_sample_rate != session_sample_rate) {
408 RTCLog(@"Route changed caused sample rate to change from %f to %f. "
409 "Restarting audio unit.", current_sample_rate, session_sample_rate);
410 if (!RestartAudioUnitWithNewFormat(session_sample_rate)) {
411 RTCLogError(@"Audio restart failed.");
412 }
413 }
414 }
415
335 void AudioDeviceIOS::UpdateAudioDeviceBuffer() { 416 void AudioDeviceIOS::UpdateAudioDeviceBuffer() {
336 LOGI() << "UpdateAudioDevicebuffer"; 417 LOGI() << "UpdateAudioDevicebuffer";
337 // AttachAudioBuffer() is called at construction by the main class but check 418 // AttachAudioBuffer() is called at construction by the main class but check
338 // just in case. 419 // just in case.
339 RTC_DCHECK(audio_device_buffer_) << "AttachAudioBuffer must be called first"; 420 RTC_DCHECK(audio_device_buffer_) << "AttachAudioBuffer must be called first";
340 // Inform the audio device buffer (ADB) about the new audio format. 421 // Inform the audio device buffer (ADB) about the new audio format.
341 audio_device_buffer_->SetPlayoutSampleRate(playout_parameters_.sample_rate()); 422 audio_device_buffer_->SetPlayoutSampleRate(playout_parameters_.sample_rate());
342 audio_device_buffer_->SetPlayoutChannels(playout_parameters_.channels()); 423 audio_device_buffer_->SetPlayoutChannels(playout_parameters_.channels());
343 audio_device_buffer_->SetRecordingSampleRate( 424 audio_device_buffer_->SetRecordingSampleRate(
344 record_parameters_.sample_rate()); 425 record_parameters_.sample_rate());
345 audio_device_buffer_->SetRecordingChannels(record_parameters_.channels()); 426 audio_device_buffer_->SetRecordingChannels(record_parameters_.channels());
346 } 427 }
347 428
348 void AudioDeviceIOS::RegisterNotificationObservers() {
349 LOGI() << "RegisterNotificationObservers";
350 // This code block will be called when AVAudioSessionInterruptionNotification
351 // is observed.
352 void (^interrupt_block)(NSNotification*) = ^(NSNotification* notification) {
353 NSNumber* type_number =
354 notification.userInfo[AVAudioSessionInterruptionTypeKey];
355 AVAudioSessionInterruptionType type =
356 (AVAudioSessionInterruptionType)type_number.unsignedIntegerValue;
357 LOG(LS_INFO) << "Audio session interruption:";
358 switch (type) {
359 case AVAudioSessionInterruptionTypeBegan:
360 // The system has deactivated our audio session.
361 // Stop the active audio unit.
362 LOG(LS_INFO) << " Began => stopping the audio unit";
363 LOG_IF_ERROR(AudioOutputUnitStop(vpio_unit_),
364 "Failed to stop the the Voice-Processing I/O unit");
365 break;
366 case AVAudioSessionInterruptionTypeEnded:
367 // The interruption has ended. Restart the audio session and start the
368 // initialized audio unit again.
369 LOG(LS_INFO) << " Ended => restarting audio session and audio unit";
370 NSError* error = nil;
371 BOOL success = NO;
372 AVAudioSession* session = [AVAudioSession sharedInstance];
373 success = [session setActive:YES error:&error];
374 if (CheckAndLogError(success, error)) {
375 LOG_IF_ERROR(AudioOutputUnitStart(vpio_unit_),
376 "Failed to start the the Voice-Processing I/O unit");
377 }
378 break;
379 }
380 };
381
382 // This code block will be called when AVAudioSessionRouteChangeNotification
383 // is observed.
384 void (^route_change_block)(NSNotification*) =
385 ^(NSNotification* notification) {
386 // Get reason for current route change.
387 NSNumber* reason_number =
388 notification.userInfo[AVAudioSessionRouteChangeReasonKey];
389 AVAudioSessionRouteChangeReason reason =
390 (AVAudioSessionRouteChangeReason)reason_number.unsignedIntegerValue;
391 bool valid_route_change = true;
392 LOG(LS_INFO) << "Route change:";
393 switch (reason) {
394 case AVAudioSessionRouteChangeReasonUnknown:
395 LOG(LS_INFO) << " ReasonUnknown";
396 break;
397 case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
398 LOG(LS_INFO) << " NewDeviceAvailable";
399 break;
400 case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
401 LOG(LS_INFO) << " OldDeviceUnavailable";
402 break;
403 case AVAudioSessionRouteChangeReasonCategoryChange:
404 // It turns out that we see this notification (at least in iOS 9.2)
405 // when making a switch from a BT device to e.g. Speaker using the
406 // iOS Control Center and that we therefore must check if the sample
407 // rate has changed. And if so is the case, restart the audio unit.
408 LOG(LS_INFO) << " CategoryChange";
409 LOG(LS_INFO) << " New category: " << ios::GetAudioSessionCategory();
410 break;
411 case AVAudioSessionRouteChangeReasonOverride:
412 LOG(LS_INFO) << " Override";
413 break;
414 case AVAudioSessionRouteChangeReasonWakeFromSleep:
415 LOG(LS_INFO) << " WakeFromSleep";
416 break;
417 case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
418 LOG(LS_INFO) << " NoSuitableRouteForCategory";
419 break;
420 case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
421 // The set of input and output ports has not changed, but their
422 // configuration has, e.g., a port’s selected data source has
423 // changed. Ignore this type of route change since we are focusing
424 // on detecting headset changes.
425 LOG(LS_INFO) << " RouteConfigurationChange (ignored)";
426 valid_route_change = false;
427 break;
428 }
429
430 if (valid_route_change) {
431 // Log previous route configuration.
432 AVAudioSessionRouteDescription* prev_route =
433 notification.userInfo[AVAudioSessionRouteChangePreviousRouteKey];
434 LOG(LS_INFO) << "Previous route:";
435 LOG(LS_INFO) << ios::StdStringFromNSString(
436 [NSString stringWithFormat:@"%@", prev_route]);
437
438 // Only restart audio for a valid route change and if the
439 // session sample rate has changed.
440 RTCAudioSession* session = [RTCAudioSession sharedInstance];
441 const double session_sample_rate = session.sampleRate;
442 LOG(LS_INFO) << "session sample rate: " << session_sample_rate;
443 if (playout_parameters_.sample_rate() != session_sample_rate) {
444 if (!RestartAudioUnitWithNewFormat(session_sample_rate)) {
445 LOG(LS_ERROR) << "Audio restart failed";
446 }
447 }
448 }
449 };
450
451 // Get the default notification center of the current process.
452 NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
453
454 // Add AVAudioSessionInterruptionNotification observer.
455 id interruption_observer =
456 [center addObserverForName:AVAudioSessionInterruptionNotification
457 object:nil
458 queue:[NSOperationQueue mainQueue]
459 usingBlock:interrupt_block];
460 // Add AVAudioSessionRouteChangeNotification observer.
461 id route_change_observer =
462 [center addObserverForName:AVAudioSessionRouteChangeNotification
463 object:nil
464 queue:[NSOperationQueue mainQueue]
465 usingBlock:route_change_block];
466
467 // Increment refcount on observers using ARC bridge. Instance variable is a
468 // void* instead of an id because header is included in other pure C++
469 // files.
470 audio_interruption_observer_ = (__bridge_retained void*)interruption_observer;
471 route_change_observer_ = (__bridge_retained void*)route_change_observer;
472 }
473
474 void AudioDeviceIOS::UnregisterNotificationObservers() {
475 LOGI() << "UnregisterNotificationObservers";
476 // Transfer ownership of observer back to ARC, which will deallocate the
477 // observer once it exits this scope.
478 NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
479 if (audio_interruption_observer_ != nullptr) {
480 id observer = (__bridge_transfer id)audio_interruption_observer_;
481 [center removeObserver:observer];
482 audio_interruption_observer_ = nullptr;
483 }
484 if (route_change_observer_ != nullptr) {
485 id observer = (__bridge_transfer id)route_change_observer_;
486 [center removeObserver:observer];
487 route_change_observer_ = nullptr;
488 }
489 }
490
491 void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() { 429 void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
492 LOGI() << "SetupAudioBuffersForActiveAudioSession"; 430 LOGI() << "SetupAudioBuffersForActiveAudioSession";
493 // Verify the current values once the audio session has been activated. 431 // Verify the current values once the audio session has been activated.
494 RTCAudioSession* session = [RTCAudioSession sharedInstance]; 432 RTCAudioSession* session = [RTCAudioSession sharedInstance];
495 LOG(LS_INFO) << " sample rate: " << session.sampleRate; 433 double sample_rate = session.sampleRate;
496 LOG(LS_INFO) << " IO buffer duration: " << session.IOBufferDuration; 434 NSTimeInterval io_buffer_duration = session.IOBufferDuration;
435 LOG(LS_INFO) << " sample rate: " << sample_rate;
436 LOG(LS_INFO) << " IO buffer duration: " << io_buffer_duration;
497 LOG(LS_INFO) << " output channels: " << session.outputNumberOfChannels; 437 LOG(LS_INFO) << " output channels: " << session.outputNumberOfChannels;
498 LOG(LS_INFO) << " input channels: " << session.inputNumberOfChannels; 438 LOG(LS_INFO) << " input channels: " << session.inputNumberOfChannels;
499 LOG(LS_INFO) << " output latency: " << session.outputLatency; 439 LOG(LS_INFO) << " output latency: " << session.outputLatency;
500 LOG(LS_INFO) << " input latency: " << session.inputLatency; 440 LOG(LS_INFO) << " input latency: " << session.inputLatency;
501 441
502 // Log a warning message for the case when we are unable to set the preferred 442 // Log a warning message for the case when we are unable to set the preferred
503 // hardware sample rate but continue and use the non-ideal sample rate after 443 // hardware sample rate but continue and use the non-ideal sample rate after
504 // reinitializing the audio parameters. Most BT headsets only support 8kHz or 444 // reinitializing the audio parameters. Most BT headsets only support 8kHz or
505 // 16kHz. 445 // 16kHz.
506 RTCAudioSessionConfiguration* webRTCConfig = 446 RTCAudioSessionConfiguration* webRTCConfig =
507 [RTCAudioSessionConfiguration webRTCConfiguration]; 447 [RTCAudioSessionConfiguration webRTCConfiguration];
508 if (session.sampleRate != webRTCConfig.sampleRate) { 448 if (sample_rate != webRTCConfig.sampleRate) {
509 LOG(LS_WARNING) << "Unable to set the preferred sample rate"; 449 LOG(LS_WARNING) << "Unable to set the preferred sample rate";
510 } 450 }
511 451
512 // At this stage, we also know the exact IO buffer duration and can add 452 // At this stage, we also know the exact IO buffer duration and can add
513 // that info to the existing audio parameters where it is converted into 453 // that info to the existing audio parameters where it is converted into
514 // number of audio frames. 454 // number of audio frames.
515 // Example: IO buffer size = 0.008 seconds <=> 128 audio frames at 16kHz. 455 // Example: IO buffer size = 0.008 seconds <=> 128 audio frames at 16kHz.
516 // Hence, 128 is the size we expect to see in upcoming render callbacks. 456 // Hence, 128 is the size we expect to see in upcoming render callbacks.
517 playout_parameters_.reset(session.sampleRate, playout_parameters_.channels(), 457 playout_parameters_.reset(sample_rate, playout_parameters_.channels(),
518 session.IOBufferDuration); 458 io_buffer_duration);
519 RTC_DCHECK(playout_parameters_.is_complete()); 459 RTC_DCHECK(playout_parameters_.is_complete());
520 record_parameters_.reset(session.sampleRate, record_parameters_.channels(), 460 record_parameters_.reset(sample_rate, record_parameters_.channels(),
521 session.IOBufferDuration); 461 io_buffer_duration);
522 RTC_DCHECK(record_parameters_.is_complete()); 462 RTC_DCHECK(record_parameters_.is_complete());
523 LOG(LS_INFO) << " frames per I/O buffer: " 463 LOG(LS_INFO) << " frames per I/O buffer: "
524 << playout_parameters_.frames_per_buffer(); 464 << playout_parameters_.frames_per_buffer();
525 LOG(LS_INFO) << " bytes per I/O buffer: " 465 LOG(LS_INFO) << " bytes per I/O buffer: "
526 << playout_parameters_.GetBytesPerBuffer(); 466 << playout_parameters_.GetBytesPerBuffer();
527 RTC_DCHECK_EQ(playout_parameters_.GetBytesPerBuffer(), 467 RTC_DCHECK_EQ(playout_parameters_.GetBytesPerBuffer(),
528 record_parameters_.GetBytesPerBuffer()); 468 record_parameters_.GetBytesPerBuffer());
529 469
530 // Update the ADB parameters since the sample rate might have changed. 470 // Update the ADB parameters since the sample rate might have changed.
531 UpdateAudioDeviceBuffer(); 471 UpdateAudioDeviceBuffer();
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after
777 [session lockForConfiguration]; 717 [session lockForConfiguration];
778 NSError* error = nil; 718 NSError* error = nil;
779 if (![session configureWebRTCSession:&error]) { 719 if (![session configureWebRTCSession:&error]) {
780 RTCLogError(@"Failed to configure WebRTC session: %@", 720 RTCLogError(@"Failed to configure WebRTC session: %@",
781 error.localizedDescription); 721 error.localizedDescription);
782 [session unlockForConfiguration]; 722 [session unlockForConfiguration];
783 return false; 723 return false;
784 } 724 }
785 725
786 // Start observing audio session interruptions and route changes. 726 // Start observing audio session interruptions and route changes.
787 RegisterNotificationObservers(); 727 [session pushDelegate:audio_session_observer_];
788 728
789 // Ensure that we got what what we asked for in our active audio session. 729 // Ensure that we got what what we asked for in our active audio session.
790 SetupAudioBuffersForActiveAudioSession(); 730 SetupAudioBuffersForActiveAudioSession();
791 731
792 // Create, setup and initialize a new Voice-Processing I/O unit. 732 // Create, setup and initialize a new Voice-Processing I/O unit.
793 if (!SetupAndInitializeVoiceProcessingAudioUnit()) { 733 if (!SetupAndInitializeVoiceProcessingAudioUnit()) {
794 [session setActive:NO error:nil]; 734 [session setActive:NO error:nil];
795 [session unlockForConfiguration]; 735 [session unlockForConfiguration];
796 return false; 736 return false;
797 } 737 }
(...skipping 11 matching lines...) Expand all
809 LOG_F(LS_ERROR) << "AudioOutputUnitStop failed: " << result; 749 LOG_F(LS_ERROR) << "AudioOutputUnitStop failed: " << result;
810 } 750 }
811 result = AudioUnitUninitialize(vpio_unit_); 751 result = AudioUnitUninitialize(vpio_unit_);
812 if (result != noErr) { 752 if (result != noErr) {
813 LOG_F(LS_ERROR) << "AudioUnitUninitialize failed: " << result; 753 LOG_F(LS_ERROR) << "AudioUnitUninitialize failed: " << result;
814 } 754 }
815 DisposeAudioUnit(); 755 DisposeAudioUnit();
816 } 756 }
817 757
818 // Remove audio session notification observers. 758 // Remove audio session notification observers.
819 UnregisterNotificationObservers(); 759 RTCAudioSession* session = [RTCAudioSession sharedInstance];
760 [session removeDelegate:audio_session_observer_];
820 761
821 // All I/O should be stopped or paused prior to deactivating the audio 762 // All I/O should be stopped or paused prior to deactivating the audio
822 // session, hence we deactivate as last action. 763 // session, hence we deactivate as last action.
823 RTCAudioSession* session = [RTCAudioSession sharedInstance];
824 [session lockForConfiguration]; 764 [session lockForConfiguration];
825 [session setActive:NO error:nil]; 765 [session setActive:NO error:nil];
826 [session unlockForConfiguration]; 766 [session unlockForConfiguration];
827 } 767 }
828 768
829 void AudioDeviceIOS::DisposeAudioUnit() { 769 void AudioDeviceIOS::DisposeAudioUnit() {
830 if (nullptr == vpio_unit_) 770 if (nullptr == vpio_unit_)
831 return; 771 return;
832 OSStatus result = AudioComponentInstanceDispose(vpio_unit_); 772 OSStatus result = AudioComponentInstanceDispose(vpio_unit_);
833 if (result != noErr) { 773 if (result != noErr) {
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
928 // Read decoded 16-bit PCM samples from WebRTC (using a size that matches 868 // Read decoded 16-bit PCM samples from WebRTC (using a size that matches
929 // the native I/O audio unit) to a preallocated intermediate buffer and 869 // the native I/O audio unit) to a preallocated intermediate buffer and
930 // copy the result to the audio buffer in the |io_data| destination. 870 // copy the result to the audio buffer in the |io_data| destination.
931 SInt8* source = playout_audio_buffer_.get(); 871 SInt8* source = playout_audio_buffer_.get();
932 fine_audio_buffer_->GetPlayoutData(source); 872 fine_audio_buffer_->GetPlayoutData(source);
933 memcpy(destination, source, dataSizeInBytes); 873 memcpy(destination, source, dataSizeInBytes);
934 return noErr; 874 return noErr;
935 } 875 }
936 876
937 } // namespace webrtc 877 } // namespace webrtc
OLDNEW
« no previous file with comments | « webrtc/modules/audio_device/ios/audio_device_ios.h ('k') | webrtc/modules/audio_device/ios/audio_session_observer.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698