OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/base/checks.h" | 11 #include "webrtc/base/checks.h" |
12 #include "webrtc/base/logging.h" | 12 #include "webrtc/base/logging.h" |
| 13 #include "webrtc/base/checks.h" |
13 #include "webrtc/base/refcount.h" | 14 #include "webrtc/base/refcount.h" |
14 #include "webrtc/base/timeutils.h" | 15 #include "webrtc/base/timeutils.h" |
15 #include "webrtc/common_audio/signal_processing/include/signal_processing_librar
y.h" | 16 #include "webrtc/common_audio/signal_processing/include/signal_processing_librar
y.h" |
16 #include "webrtc/modules/audio_device/audio_device_config.h" | 17 #include "webrtc/modules/audio_device/audio_device_config.h" |
17 #include "webrtc/modules/audio_device/audio_device_generic.h" | 18 #include "webrtc/modules/audio_device/audio_device_generic.h" |
18 #include "webrtc/modules/audio_device/audio_device_impl.h" | 19 #include "webrtc/modules/audio_device/audio_device_impl.h" |
19 #include "webrtc/system_wrappers/include/metrics.h" | 20 #include "webrtc/system_wrappers/include/metrics.h" |
20 | 21 |
21 #include <assert.h> | 22 #include <assert.h> |
22 #include <string.h> | 23 #include <string.h> |
23 | 24 |
24 #if defined(_WIN32) | 25 #if defined(_WIN32) |
25 #include "audio_device_wave_win.h" | 26 #include "audio_device_wave_win.h" |
26 #if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD) | 27 #if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD) |
27 #include "audio_device_core_win.h" | 28 #include "audio_device_core_win.h" |
28 #endif | 29 #endif |
29 #elif defined(WEBRTC_ANDROID) | 30 #elif defined(WEBRTC_ANDROID) |
30 #include <stdlib.h> | 31 #include <stdlib.h> |
31 #include "webrtc/modules/audio_device/android/audio_device_template.h" | 32 #include "webrtc/modules/audio_device/android/audio_device_template.h" |
32 #include "webrtc/modules/audio_device/android/audio_manager.h" | 33 #include "webrtc/modules/audio_device/android/audio_manager.h" |
33 #include "webrtc/modules/audio_device/android/audio_record_jni.h" | 34 #include "webrtc/modules/audio_device/android/audio_record_jni.h" |
34 #include "webrtc/modules/audio_device/android/audio_track_jni.h" | 35 #include "webrtc/modules/audio_device/android/audio_track_jni.h" |
35 #include "webrtc/modules/audio_device/android/opensles_player.h" | 36 #include "webrtc/modules/audio_device/android/opensles_player.h" |
| 37 #include "webrtc/modules/audio_device/android/opensles_recorder.h" |
36 #elif defined(WEBRTC_LINUX) | 38 #elif defined(WEBRTC_LINUX) |
37 #if defined(LINUX_ALSA) | 39 #if defined(LINUX_ALSA) |
38 #include "audio_device_alsa_linux.h" | 40 #include "audio_device_alsa_linux.h" |
39 #endif | 41 #endif |
40 #if defined(LINUX_PULSE) | 42 #if defined(LINUX_PULSE) |
41 #include "audio_device_pulse_linux.h" | 43 #include "audio_device_pulse_linux.h" |
42 #endif | 44 #endif |
43 #elif defined(WEBRTC_IOS) | 45 #elif defined(WEBRTC_IOS) |
44 #include "audio_device_ios.h" | 46 #include "audio_device_ios.h" |
45 #elif defined(WEBRTC_MAC) | 47 #elif defined(WEBRTC_MAC) |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
231 } | 233 } |
232 } | 234 } |
233 #endif // defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD) | 235 #endif // defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD) |
234 #endif // #if defined(_WIN32) | 236 #endif // #if defined(_WIN32) |
235 | 237 |
236 #if defined(WEBRTC_ANDROID) | 238 #if defined(WEBRTC_ANDROID) |
237 // Create an Android audio manager. | 239 // Create an Android audio manager. |
238 _audioManagerAndroid.reset(new AudioManager()); | 240 _audioManagerAndroid.reset(new AudioManager()); |
239 // Select best possible combination of audio layers. | 241 // Select best possible combination of audio layers. |
240 if (audioLayer == kPlatformDefaultAudio) { | 242 if (audioLayer == kPlatformDefaultAudio) { |
241 if (_audioManagerAndroid->IsLowLatencyPlayoutSupported()) { | 243 if (_audioManagerAndroid->IsLowLatencyPlayoutSupported() && |
242 // Always use OpenSL ES for output on devices that supports the | 244 _audioManagerAndroid->IsLowLatencyRecordSupported()) { |
| 245 // Use OpenSL ES for both playout and recording. |
| 246 audioLayer = kAndroidOpenSLESAudio; |
| 247 } else if (_audioManagerAndroid->IsLowLatencyPlayoutSupported() && |
| 248 !_audioManagerAndroid->IsLowLatencyRecordSupported()) { |
| 249 // Use OpenSL ES for output on devices that only supports the |
243 // low-latency output audio path. | 250 // low-latency output audio path. |
244 audioLayer = kAndroidJavaInputAndOpenSLESOutputAudio; | 251 audioLayer = kAndroidJavaInputAndOpenSLESOutputAudio; |
245 } else { | 252 } else { |
246 // Use Java-based audio in both directions when low-latency output | 253 // Use Java-based audio in both directions when low-latency output is |
247 // is not supported. | 254 // not supported. |
248 audioLayer = kAndroidJavaAudio; | 255 audioLayer = kAndroidJavaAudio; |
249 } | 256 } |
250 } | 257 } |
251 AudioManager* audio_manager = _audioManagerAndroid.get(); | 258 AudioManager* audio_manager = _audioManagerAndroid.get(); |
252 if (audioLayer == kAndroidJavaAudio) { | 259 if (audioLayer == kAndroidJavaAudio) { |
253 // Java audio for both input and output audio. | 260 // Java audio for both input and output audio. |
254 ptrAudioDevice = new AudioDeviceTemplate<AudioRecordJni, AudioTrackJni>( | 261 ptrAudioDevice = new AudioDeviceTemplate<AudioRecordJni, AudioTrackJni>( |
255 audioLayer, audio_manager); | 262 audioLayer, audio_manager); |
| 263 } else if (audioLayer == kAndroidOpenSLESAudio) { |
| 264 // OpenSL ES based audio for both input and output audio. |
| 265 ptrAudioDevice = new AudioDeviceTemplate<OpenSLESRecorder, OpenSLESPlayer>( |
| 266 audioLayer, audio_manager); |
256 } else if (audioLayer == kAndroidJavaInputAndOpenSLESOutputAudio) { | 267 } else if (audioLayer == kAndroidJavaInputAndOpenSLESOutputAudio) { |
257 // Java audio for input and OpenSL ES for output audio (i.e. mixed APIs). | 268 // Java audio for input and OpenSL ES for output audio (i.e. mixed APIs). |
258 // This combination provides low-latency output audio and at the same | 269 // This combination provides low-latency output audio and at the same |
259 // time support for HW AEC using the AudioRecord Java API. | 270 // time support for HW AEC using the AudioRecord Java API. |
260 ptrAudioDevice = new AudioDeviceTemplate<AudioRecordJni, OpenSLESPlayer>( | 271 ptrAudioDevice = new AudioDeviceTemplate<AudioRecordJni, OpenSLESPlayer>( |
261 audioLayer, audio_manager); | 272 audioLayer, audio_manager); |
262 } else { | 273 } else { |
263 // Invalid audio layer. | 274 // Invalid audio layer. |
264 ptrAudioDevice = NULL; | 275 ptrAudioDevice = nullptr; |
265 } | 276 } |
266 // END #if defined(WEBRTC_ANDROID) | 277 // END #if defined(WEBRTC_ANDROID) |
267 | 278 |
268 // Create the *Linux* implementation of the Audio Device | 279 // Create the *Linux* implementation of the Audio Device |
269 // | 280 // |
270 #elif defined(WEBRTC_LINUX) | 281 #elif defined(WEBRTC_LINUX) |
271 if ((audioLayer == kLinuxPulseAudio) || | 282 if ((audioLayer == kLinuxPulseAudio) || |
272 (audioLayer == kPlatformDefaultAudio)) { | 283 (audioLayer == kPlatformDefaultAudio)) { |
273 #if defined(LINUX_PULSE) | 284 #if defined(LINUX_PULSE) |
274 LOG(INFO) << "attempting to use the Linux PulseAudio APIs..."; | 285 LOG(INFO) << "attempting to use the Linux PulseAudio APIs..."; |
(...skipping 1590 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1865 // PlatformAudioLayer | 1876 // PlatformAudioLayer |
1866 // ---------------------------------------------------------------------------- | 1877 // ---------------------------------------------------------------------------- |
1867 | 1878 |
1868 AudioDeviceModule::AudioLayer AudioDeviceModuleImpl::PlatformAudioLayer() | 1879 AudioDeviceModule::AudioLayer AudioDeviceModuleImpl::PlatformAudioLayer() |
1869 const { | 1880 const { |
1870 LOG(INFO) << __FUNCTION__; | 1881 LOG(INFO) << __FUNCTION__; |
1871 return _platformAudioLayer; | 1882 return _platformAudioLayer; |
1872 } | 1883 } |
1873 | 1884 |
1874 } // namespace webrtc | 1885 } // namespace webrtc |
OLD | NEW |