Index: webrtc/modules/audio_device/android/audio_manager.h |
diff --git a/webrtc/modules/audio_device/android/audio_manager.h b/webrtc/modules/audio_device/android/audio_manager.h |
index 9cceaacfca7236f64bd4662a68ac5910018c8ba3..5f23147b8a6fa8bc767d15f74e4dc91d56a90f6a 100644 |
--- a/webrtc/modules/audio_device/android/audio_manager.h |
+++ b/webrtc/modules/audio_device/android/audio_manager.h |
@@ -93,6 +93,8 @@ class AudioManager { |
// webrtc::kHighLatencyModeDelayEstimateInMilliseconds. |
int GetDelayEstimateInMilliseconds() const; |
+ int OutputStreamType() const { return output_stream_type_; } |
+ |
private: |
// Called from Java side so we can cache the native audio parameters. |
// This method will be called by the WebRtcAudioManager constructor, i.e. |
@@ -107,6 +109,7 @@ class AudioManager { |
jboolean low_latency_output, |
jint output_buffer_size, |
jint input_buffer_size, |
+ jint output_stream_type, |
jlong native_audio_manager); |
void OnCacheAudioParameters(JNIEnv* env, |
jint sample_rate, |
@@ -116,7 +119,8 @@ class AudioManager { |
jboolean hardware_ns, |
jboolean low_latency_output, |
jint output_buffer_size, |
- jint input_buffer_size); |
+ jint input_buffer_size, |
+ jint output_stream_type); |
// Stores thread ID in the constructor. |
// We can then use ThreadChecker::CalledOnValidThread() to ensure that |
@@ -155,6 +159,13 @@ class AudioManager { |
// device supports low-latency output or not. |
int delay_estimate_in_milliseconds_; |
+ // Contains the output stream type provided to this class at construction by |
+ // the AudioManager in Java land. Possible values are: |
+ // - AudioManager.STREAM_VOICE_CALL = 0 |
+ // - AudioManager.STREAM_RING = 2 |
+ // - AudioManager.STREAM_MUSIC = 3 |
+ int output_stream_type_; |
+ |
// Contains native parameters (e.g. sample rate, channel configuration). |
// Set at construction in OnCacheAudioParameters() which is called from |
// Java on the same thread as this object is created on. |