Index: webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java |
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java |
index e99b9d74930abde62d94e3ce8e209b5a12feb4b0..43c1a19c463bc65189fc7cb43971bceb3d1a5067 100644 |
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java |
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java |
@@ -61,7 +61,7 @@ class WebRtcAudioTrack { |
@Override |
public void run() { |
Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); |
- Logd("AudioTrackThread" + WebRtcAudioUtils.getThreadInfo()); |
+ Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo()); |
try { |
// In MODE_STREAM mode we can optionally prime the output buffer by |
@@ -71,7 +71,7 @@ class WebRtcAudioTrack { |
audioTrack.play(); |
assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING); |
} catch (IllegalStateException e) { |
- Loge("AudioTrack.play failed: " + e.getMessage()); |
+ Logging.e(TAG, "AudioTrack.play failed: " + e.getMessage()); |
return; |
} |
@@ -99,7 +99,7 @@ class WebRtcAudioTrack { |
sizeInBytes); |
} |
if (bytesWritten != sizeInBytes) { |
- Loge("AudioTrack.write failed: " + bytesWritten); |
+ Logging.e(TAG, "AudioTrack.write failed: " + bytesWritten); |
if (bytesWritten == AudioTrack.ERROR_INVALID_OPERATION) { |
keepAlive = false; |
} |
@@ -117,7 +117,7 @@ class WebRtcAudioTrack { |
try { |
audioTrack.stop(); |
} catch (IllegalStateException e) { |
- Loge("AudioTrack.stop failed: " + e.getMessage()); |
+ Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage()); |
} |
assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_STOPPED); |
audioTrack.flush(); |
@@ -136,7 +136,7 @@ class WebRtcAudioTrack { |
} |
WebRtcAudioTrack(Context context, long nativeAudioTrack) { |
- Logd("ctor" + WebRtcAudioUtils.getThreadInfo()); |
+ Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); |
this.context = context; |
this.nativeAudioTrack = nativeAudioTrack; |
audioManager = (AudioManager) context.getSystemService( |
@@ -147,12 +147,12 @@ class WebRtcAudioTrack { |
} |
private void initPlayout(int sampleRate, int channels) { |
- Logd("initPlayout(sampleRate=" + sampleRate + ", channels=" + |
- channels + ")"); |
+ Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" |
+ + channels + ")"); |
final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8); |
byteBuffer = byteBuffer.allocateDirect( |
bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND)); |
- Logd("byteBuffer.capacity: " + byteBuffer.capacity()); |
+ Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity()); |
// Rather than passing the ByteBuffer with every callback (requiring |
// the potentially expensive GetDirectBufferAddress) we simply have the |
// the native class cache the address to the memory once. |
@@ -166,7 +166,7 @@ class WebRtcAudioTrack { |
sampleRate, |
AudioFormat.CHANNEL_OUT_MONO, |
AudioFormat.ENCODING_PCM_16BIT); |
- Logd("AudioTrack.getMinBufferSize: " + minBufferSizeInBytes); |
+ Logging.d(TAG, "AudioTrack.getMinBufferSize: " + minBufferSizeInBytes); |
assertTrue(audioTrack == null); |
// For the streaming mode, data must be written to the audio sink in |
@@ -184,7 +184,7 @@ class WebRtcAudioTrack { |
minBufferSizeInBytes, |
AudioTrack.MODE_STREAM); |
} catch (IllegalArgumentException e) { |
- Logd(e.getMessage()); |
+ Logging.d(TAG, e.getMessage()); |
return; |
} |
assertTrue(audioTrack.getState() == AudioTrack.STATE_INITIALIZED); |
@@ -193,7 +193,7 @@ class WebRtcAudioTrack { |
} |
private boolean startPlayout() { |
- Logd("startPlayout"); |
+ Logging.d(TAG, "startPlayout"); |
assertTrue(audioTrack != null); |
assertTrue(audioThread == null); |
audioThread = new AudioTrackThread("AudioTrackJavaThread"); |
@@ -202,7 +202,7 @@ class WebRtcAudioTrack { |
} |
private boolean stopPlayout() { |
- Logd("stopPlayout"); |
+ Logging.d(TAG, "stopPlayout"); |
assertTrue(audioThread != null); |
audioThread.joinThread(); |
audioThread = null; |
@@ -215,18 +215,18 @@ class WebRtcAudioTrack { |
/** Get max possible volume index for a phone call audio stream. */ |
private int getStreamMaxVolume() { |
- Logd("getStreamMaxVolume"); |
+ Logging.d(TAG, "getStreamMaxVolume"); |
assertTrue(audioManager != null); |
return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL); |
} |
/** Set current volume level for a phone call audio stream. */ |
private boolean setStreamVolume(int volume) { |
- Logd("setStreamVolume(" + volume + ")"); |
+ Logging.d(TAG, "setStreamVolume(" + volume + ")"); |
assertTrue(audioManager != null); |
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) { |
if (audioManager.isVolumeFixed()) { |
- Loge("The device implements a fixed volume policy."); |
+ Logging.e(TAG, "The device implements a fixed volume policy."); |
return false; |
} |
} |
@@ -236,7 +236,7 @@ class WebRtcAudioTrack { |
/** Get current volume level for a phone call audio stream. */ |
private int getStreamVolume() { |
- Logd("getStreamVolume"); |
+ Logging.d(TAG, "getStreamVolume"); |
assertTrue(audioManager != null); |
return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL); |
} |
@@ -248,14 +248,6 @@ class WebRtcAudioTrack { |
} |
} |
- private static void Logd(String msg) { |
- Logging.d(TAG, msg); |
- } |
- |
- private static void Loge(String msg) { |
- Logging.e(TAG, msg); |
- } |
- |
private native void nativeCacheDirectBufferAddress( |
ByteBuffer byteBuffer, long nativeAudioRecord); |