| Index: webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
|
| diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
|
| index 64c9c7c6f8aeaec27548dae5b32169c2a138209e..19ee09a46678bef7a29161d344a8b0135230140d 100644
|
| --- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
|
| +++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
|
| @@ -45,8 +45,7 @@ public class WebRtcAudioManager {
|
| // specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS.
|
| // Allows an app to take control over which devices to exlude from using
|
| // the OpenSL ES audio output path
|
| - public static synchronized void setBlacklistDeviceForOpenSLESUsage(
|
| - boolean enable) {
|
| + public static synchronized void setBlacklistDeviceForOpenSLESUsage(boolean enable) {
|
| blacklistDeviceForOpenSLESUsageIsOverridden = true;
|
| blacklistDeviceForOpenSLESUsage = enable;
|
| }
|
| @@ -62,10 +61,7 @@ public class WebRtcAudioManager {
|
|
|
| // List of possible audio modes.
|
| private static final String[] AUDIO_MODES = new String[] {
|
| - "MODE_NORMAL",
|
| - "MODE_RINGTONE",
|
| - "MODE_IN_CALL",
|
| - "MODE_IN_COMMUNICATION",
|
| + "MODE_NORMAL", "MODE_RINGTONE", "MODE_IN_CALL", "MODE_IN_COMMUNICATION",
|
| };
|
|
|
| // Private utility class that periodically checks and logs the volume level
|
| @@ -85,9 +81,8 @@ public class WebRtcAudioManager {
|
|
|
| public void start() {
|
| timer = new Timer(THREAD_NAME);
|
| - timer.schedule(new LogVolumeTask(
|
| - audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
|
| - audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)),
|
| + timer.schedule(new LogVolumeTask(audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
|
| + audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)),
|
| 0, TIMER_PERIOD_IN_SECONDS * 1000);
|
| }
|
|
|
| @@ -104,12 +99,12 @@ public class WebRtcAudioManager {
|
| final int mode = audioManager.getMode();
|
| if (mode == AudioManager.MODE_RINGTONE) {
|
| Logging.d(TAG, "STREAM_RING stream volume: "
|
| - + audioManager.getStreamVolume(AudioManager.STREAM_RING)
|
| - + " (max=" + maxRingVolume + ")");
|
| + + audioManager.getStreamVolume(AudioManager.STREAM_RING) + " (max="
|
| + + maxRingVolume + ")");
|
| } else if (mode == AudioManager.MODE_IN_COMMUNICATION) {
|
| Logging.d(TAG, "VOICE_CALL stream volume: "
|
| - + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL)
|
| - + " (max=" + maxVoiceCallVolume + ")");
|
| + + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL) + " (max="
|
| + + maxVoiceCallVolume + ")");
|
| }
|
| }
|
| }
|
| @@ -147,16 +142,15 @@ public class WebRtcAudioManager {
|
| Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
|
| this.context = context;
|
| this.nativeAudioManager = nativeAudioManager;
|
| - audioManager = (AudioManager) context.getSystemService(
|
| - Context.AUDIO_SERVICE);
|
| + audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
|
| if (DEBUG) {
|
| WebRtcAudioUtils.logDeviceInfo(TAG);
|
| }
|
| volumeLogger = new VolumeLogger(audioManager);
|
| storeAudioParameters();
|
| nativeCacheAudioParameters(sampleRate, channels, hardwareAEC, hardwareAGC, hardwareNS,
|
| - lowLatencyOutput, lowLatencyInput, proAudio, outputBufferSize, inputBufferSize,
|
| - nativeAudioManager);
|
| + lowLatencyOutput, lowLatencyInput, proAudio, outputBufferSize, inputBufferSize,
|
| + nativeAudioManager);
|
| }
|
|
|
| private boolean init() {
|
| @@ -183,9 +177,9 @@ public class WebRtcAudioManager {
|
| }
|
|
|
| private boolean isDeviceBlacklistedForOpenSLESUsage() {
|
| - boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden ?
|
| - blacklistDeviceForOpenSLESUsage :
|
| - WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage();
|
| + boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden
|
| + ? blacklistDeviceForOpenSLESUsage
|
| + : WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage();
|
| if (blacklisted) {
|
| Logging.e(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!");
|
| }
|
| @@ -203,24 +197,21 @@ public class WebRtcAudioManager {
|
| lowLatencyOutput = isLowLatencyOutputSupported();
|
| lowLatencyInput = isLowLatencyInputSupported();
|
| proAudio = isProAudioSupported();
|
| - outputBufferSize = lowLatencyOutput ?
|
| - getLowLatencyOutputFramesPerBuffer() :
|
| - getMinOutputFrameSize(sampleRate, channels);
|
| + outputBufferSize = lowLatencyOutput ? getLowLatencyOutputFramesPerBuffer()
|
| + : getMinOutputFrameSize(sampleRate, channels);
|
| inputBufferSize = lowLatencyInput ? getLowLatencyInputFramesPerBuffer()
|
| : getMinInputFrameSize(sampleRate, channels);
|
| }
|
|
|
| // Gets the current earpiece state.
|
| private boolean hasEarpiece() {
|
| - return context.getPackageManager().hasSystemFeature(
|
| - PackageManager.FEATURE_TELEPHONY);
|
| + return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
|
| }
|
|
|
| // Returns true if low-latency audio output is supported.
|
| private boolean isLowLatencyOutputSupported() {
|
| - return isOpenSLESSupported() &&
|
| - context.getPackageManager().hasSystemFeature(
|
| - PackageManager.FEATURE_AUDIO_LOW_LATENCY);
|
| + return isOpenSLESSupported()
|
| + && context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
|
| }
|
|
|
| // Returns true if low-latency audio input is supported.
|
| @@ -231,16 +222,14 @@ public class WebRtcAudioManager {
|
| // as well. The NDK doc states that: "As of API level 21, lower latency
|
| // audio input is supported on select devices. To take advantage of this
|
| // feature, first confirm that lower latency output is available".
|
| - return WebRtcAudioUtils.runningOnLollipopOrHigher() &&
|
| - isLowLatencyOutputSupported();
|
| + return WebRtcAudioUtils.runningOnLollipopOrHigher() && isLowLatencyOutputSupported();
|
| }
|
|
|
| // Returns true if the device has professional audio level of functionality
|
| // and therefore supports the lowest possible round-trip latency.
|
| private boolean isProAudioSupported() {
|
| return WebRtcAudioUtils.runningOnMarshmallowOrHigher()
|
| - && context.getPackageManager().hasSystemFeature(
|
| - PackageManager.FEATURE_AUDIO_PRO);
|
| + && context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_PRO);
|
| }
|
|
|
| // Returns the native output sample rate for this device's output stream.
|
| @@ -254,8 +243,8 @@ public class WebRtcAudioManager {
|
| // Default can be overriden by WebRtcAudioUtils.setDefaultSampleRateHz().
|
| // If so, use that value and return here.
|
| if (WebRtcAudioUtils.isDefaultSampleRateOverridden()) {
|
| - Logging.d(TAG, "Default sample rate is overriden to " +
|
| - WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz");
|
| + Logging.d(TAG, "Default sample rate is overriden to "
|
| + + WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz");
|
| return WebRtcAudioUtils.getDefaultSampleRateHz();
|
| }
|
| // No overrides available. Deliver best possible estimate based on default
|
| @@ -272,11 +261,9 @@ public class WebRtcAudioManager {
|
|
|
| @TargetApi(17)
|
| private int getSampleRateOnJellyBeanMR10OrHigher() {
|
| - String sampleRateString = audioManager.getProperty(
|
| - AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
| - return (sampleRateString == null)
|
| - ? WebRtcAudioUtils.getDefaultSampleRateHz()
|
| - : Integer.parseInt(sampleRateString);
|
| + String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
| + return (sampleRateString == null) ? WebRtcAudioUtils.getDefaultSampleRateHz()
|
| + : Integer.parseInt(sampleRateString);
|
| }
|
|
|
| // Returns the native output buffer size for low-latency output streams.
|
| @@ -286,10 +273,9 @@ public class WebRtcAudioManager {
|
| if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
|
| return DEFAULT_FRAME_PER_BUFFER;
|
| }
|
| - String framesPerBuffer = audioManager.getProperty(
|
| - AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
|
| - return framesPerBuffer == null ?
|
| - DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
|
| + String framesPerBuffer =
|
| + audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
|
| + return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
|
| }
|
|
|
| // Returns true if the device supports an audio effect (AEC, AGC or NS).
|
| @@ -322,8 +308,8 @@ public class WebRtcAudioManager {
|
| return -1;
|
| }
|
| return AudioTrack.getMinBufferSize(
|
| - sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT) /
|
| - bytesPerFrame;
|
| + sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
|
| + / bytesPerFrame;
|
| }
|
|
|
| // Returns the native input buffer size for input streams.
|
| @@ -338,9 +324,9 @@ public class WebRtcAudioManager {
|
| private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) {
|
| final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
|
| assertTrue(numChannels == CHANNELS);
|
| - return AudioRecord.getMinBufferSize(sampleRateInHz,
|
| - AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) /
|
| - bytesPerFrame;
|
| + return AudioRecord.getMinBufferSize(
|
| + sampleRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)
|
| + / bytesPerFrame;
|
| }
|
|
|
| // Returns true if OpenSL ES audio is supported.
|
| @@ -357,7 +343,6 @@ public class WebRtcAudioManager {
|
| }
|
|
|
| private native void nativeCacheAudioParameters(int sampleRate, int channels, boolean hardwareAEC,
|
| - boolean hardwareAGC, boolean hardwareNS, boolean lowLatencyOutput,
|
| - boolean lowLatencyInput, boolean proAudio, int outputBufferSize, int inputBufferSize,
|
| - long nativeAudioManager);
|
| + boolean hardwareAGC, boolean hardwareNS, boolean lowLatencyOutput, boolean lowLatencyInput,
|
| + boolean proAudio, int outputBufferSize, int inputBufferSize, long nativeAudioManager);
|
| }
|
|
|