| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 69 AudioTrackJni::AudioTrackJni(AudioManager* audio_manager) | 69 AudioTrackJni::AudioTrackJni(AudioManager* audio_manager) |
| 70 : j_environment_(JVM::GetInstance()->environment()), | 70 : j_environment_(JVM::GetInstance()->environment()), |
| 71 audio_parameters_(audio_manager->GetPlayoutAudioParameters()), | 71 audio_parameters_(audio_manager->GetPlayoutAudioParameters()), |
| 72 direct_buffer_address_(nullptr), | 72 direct_buffer_address_(nullptr), |
| 73 direct_buffer_capacity_in_bytes_(0), | 73 direct_buffer_capacity_in_bytes_(0), |
| 74 frames_per_buffer_(0), | 74 frames_per_buffer_(0), |
| 75 initialized_(false), | 75 initialized_(false), |
| 76 playing_(false), | 76 playing_(false), |
| 77 audio_device_buffer_(nullptr) { | 77 audio_device_buffer_(nullptr) { |
| 78 ALOGD("ctor%s", GetThreadInfo().c_str()); | 78 ALOGD("ctor%s", GetThreadInfo().c_str()); |
| 79 DCHECK(audio_parameters_.is_valid()); | 79 RTC_DCHECK(audio_parameters_.is_valid()); |
| 80 CHECK(j_environment_); | 80 RTC_CHECK(j_environment_); |
| 81 JNINativeMethod native_methods[] = { | 81 JNINativeMethod native_methods[] = { |
| 82 {"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V", | 82 {"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V", |
| 83 reinterpret_cast<void*>( | 83 reinterpret_cast<void*>( |
| 84 &webrtc::AudioTrackJni::CacheDirectBufferAddress)}, | 84 &webrtc::AudioTrackJni::CacheDirectBufferAddress)}, |
| 85 {"nativeGetPlayoutData", "(IJ)V", | 85 {"nativeGetPlayoutData", "(IJ)V", |
| 86 reinterpret_cast<void*>(&webrtc::AudioTrackJni::GetPlayoutData)}}; | 86 reinterpret_cast<void*>(&webrtc::AudioTrackJni::GetPlayoutData)}}; |
| 87 j_native_registration_ = j_environment_->RegisterNatives( | 87 j_native_registration_ = j_environment_->RegisterNatives( |
| 88 "org/webrtc/voiceengine/WebRtcAudioTrack", | 88 "org/webrtc/voiceengine/WebRtcAudioTrack", |
| 89 native_methods, arraysize(native_methods)); | 89 native_methods, arraysize(native_methods)); |
| 90 j_audio_track_.reset(new JavaAudioTrack( | 90 j_audio_track_.reset(new JavaAudioTrack( |
| 91 j_native_registration_.get(), | 91 j_native_registration_.get(), |
| 92 j_native_registration_->NewObject( | 92 j_native_registration_->NewObject( |
| 93 "<init>", "(Landroid/content/Context;J)V", | 93 "<init>", "(Landroid/content/Context;J)V", |
| 94 JVM::GetInstance()->context(), PointerTojlong(this)))); | 94 JVM::GetInstance()->context(), PointerTojlong(this)))); |
| 95 // Detach from this thread since we want to use the checker to verify calls | 95 // Detach from this thread since we want to use the checker to verify calls |
| 96 // from the Java based audio thread. | 96 // from the Java based audio thread. |
| 97 thread_checker_java_.DetachFromThread(); | 97 thread_checker_java_.DetachFromThread(); |
| 98 } | 98 } |
| 99 | 99 |
| 100 AudioTrackJni::~AudioTrackJni() { | 100 AudioTrackJni::~AudioTrackJni() { |
| 101 ALOGD("~dtor%s", GetThreadInfo().c_str()); | 101 ALOGD("~dtor%s", GetThreadInfo().c_str()); |
| 102 DCHECK(thread_checker_.CalledOnValidThread()); | 102 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 103 Terminate(); | 103 Terminate(); |
| 104 } | 104 } |
| 105 | 105 |
| 106 int32_t AudioTrackJni::Init() { | 106 int32_t AudioTrackJni::Init() { |
| 107 ALOGD("Init%s", GetThreadInfo().c_str()); | 107 ALOGD("Init%s", GetThreadInfo().c_str()); |
| 108 DCHECK(thread_checker_.CalledOnValidThread()); | 108 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 109 return 0; | 109 return 0; |
| 110 } | 110 } |
| 111 | 111 |
| 112 int32_t AudioTrackJni::Terminate() { | 112 int32_t AudioTrackJni::Terminate() { |
| 113 ALOGD("Terminate%s", GetThreadInfo().c_str()); | 113 ALOGD("Terminate%s", GetThreadInfo().c_str()); |
| 114 DCHECK(thread_checker_.CalledOnValidThread()); | 114 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 115 StopPlayout(); | 115 StopPlayout(); |
| 116 return 0; | 116 return 0; |
| 117 } | 117 } |
| 118 | 118 |
| 119 int32_t AudioTrackJni::InitPlayout() { | 119 int32_t AudioTrackJni::InitPlayout() { |
| 120 ALOGD("InitPlayout%s", GetThreadInfo().c_str()); | 120 ALOGD("InitPlayout%s", GetThreadInfo().c_str()); |
| 121 DCHECK(thread_checker_.CalledOnValidThread()); | 121 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 122 DCHECK(!initialized_); | 122 RTC_DCHECK(!initialized_); |
| 123 DCHECK(!playing_); | 123 RTC_DCHECK(!playing_); |
| 124 j_audio_track_->InitPlayout( | 124 j_audio_track_->InitPlayout( |
| 125 audio_parameters_.sample_rate(), audio_parameters_.channels()); | 125 audio_parameters_.sample_rate(), audio_parameters_.channels()); |
| 126 initialized_ = true; | 126 initialized_ = true; |
| 127 return 0; | 127 return 0; |
| 128 } | 128 } |
| 129 | 129 |
| 130 int32_t AudioTrackJni::StartPlayout() { | 130 int32_t AudioTrackJni::StartPlayout() { |
| 131 ALOGD("StartPlayout%s", GetThreadInfo().c_str()); | 131 ALOGD("StartPlayout%s", GetThreadInfo().c_str()); |
| 132 DCHECK(thread_checker_.CalledOnValidThread()); | 132 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 133 DCHECK(initialized_); | 133 RTC_DCHECK(initialized_); |
| 134 DCHECK(!playing_); | 134 RTC_DCHECK(!playing_); |
| 135 if (!j_audio_track_->StartPlayout()) { | 135 if (!j_audio_track_->StartPlayout()) { |
| 136 ALOGE("StartPlayout failed!"); | 136 ALOGE("StartPlayout failed!"); |
| 137 return -1; | 137 return -1; |
| 138 } | 138 } |
| 139 playing_ = true; | 139 playing_ = true; |
| 140 return 0; | 140 return 0; |
| 141 } | 141 } |
| 142 | 142 |
| 143 int32_t AudioTrackJni::StopPlayout() { | 143 int32_t AudioTrackJni::StopPlayout() { |
| 144 ALOGD("StopPlayout%s", GetThreadInfo().c_str()); | 144 ALOGD("StopPlayout%s", GetThreadInfo().c_str()); |
| 145 DCHECK(thread_checker_.CalledOnValidThread()); | 145 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 146 if (!initialized_ || !playing_) { | 146 if (!initialized_ || !playing_) { |
| 147 return 0; | 147 return 0; |
| 148 } | 148 } |
| 149 if (!j_audio_track_->StopPlayout()) { | 149 if (!j_audio_track_->StopPlayout()) { |
| 150 ALOGE("StopPlayout failed!"); | 150 ALOGE("StopPlayout failed!"); |
| 151 return -1; | 151 return -1; |
| 152 } | 152 } |
| 153 // If we don't detach here, we will hit a DCHECK in OnDataIsRecorded() next | 153 // If we don't detach here, we will hit a RTC_DCHECK in OnDataIsRecorded() |
| 154 // time StartRecording() is called since it will create a new Java thread. | 154 // next time StartRecording() is called since it will create a new Java |
| 155 // thread. |
| 155 thread_checker_java_.DetachFromThread(); | 156 thread_checker_java_.DetachFromThread(); |
| 156 initialized_ = false; | 157 initialized_ = false; |
| 157 playing_ = false; | 158 playing_ = false; |
| 158 return 0; | 159 return 0; |
| 159 } | 160 } |
| 160 | 161 |
| 161 int AudioTrackJni::SpeakerVolumeIsAvailable(bool& available) { | 162 int AudioTrackJni::SpeakerVolumeIsAvailable(bool& available) { |
| 162 available = true; | 163 available = true; |
| 163 return 0; | 164 return 0; |
| 164 } | 165 } |
| 165 | 166 |
| 166 int AudioTrackJni::SetSpeakerVolume(uint32_t volume) { | 167 int AudioTrackJni::SetSpeakerVolume(uint32_t volume) { |
| 167 ALOGD("SetSpeakerVolume(%d)%s", volume, GetThreadInfo().c_str()); | 168 ALOGD("SetSpeakerVolume(%d)%s", volume, GetThreadInfo().c_str()); |
| 168 DCHECK(thread_checker_.CalledOnValidThread()); | 169 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 169 return j_audio_track_->SetStreamVolume(volume) ? 0 : -1; | 170 return j_audio_track_->SetStreamVolume(volume) ? 0 : -1; |
| 170 } | 171 } |
| 171 | 172 |
| 172 int AudioTrackJni::MaxSpeakerVolume(uint32_t& max_volume) const { | 173 int AudioTrackJni::MaxSpeakerVolume(uint32_t& max_volume) const { |
| 173 ALOGD("MaxSpeakerVolume%s", GetThreadInfo().c_str()); | 174 ALOGD("MaxSpeakerVolume%s", GetThreadInfo().c_str()); |
| 174 DCHECK(thread_checker_.CalledOnValidThread()); | 175 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 175 max_volume = j_audio_track_->GetStreamMaxVolume(); | 176 max_volume = j_audio_track_->GetStreamMaxVolume(); |
| 176 return 0; | 177 return 0; |
| 177 } | 178 } |
| 178 | 179 |
| 179 int AudioTrackJni::MinSpeakerVolume(uint32_t& min_volume) const { | 180 int AudioTrackJni::MinSpeakerVolume(uint32_t& min_volume) const { |
| 180 ALOGD("MaxSpeakerVolume%s", GetThreadInfo().c_str()); | 181 ALOGD("MaxSpeakerVolume%s", GetThreadInfo().c_str()); |
| 181 DCHECK(thread_checker_.CalledOnValidThread()); | 182 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 182 min_volume = 0; | 183 min_volume = 0; |
| 183 return 0; | 184 return 0; |
| 184 } | 185 } |
| 185 | 186 |
| 186 int AudioTrackJni::SpeakerVolume(uint32_t& volume) const { | 187 int AudioTrackJni::SpeakerVolume(uint32_t& volume) const { |
| 187 ALOGD("SpeakerVolume%s", GetThreadInfo().c_str()); | 188 ALOGD("SpeakerVolume%s", GetThreadInfo().c_str()); |
| 188 DCHECK(thread_checker_.CalledOnValidThread()); | 189 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 189 volume = j_audio_track_->GetStreamVolume(); | 190 volume = j_audio_track_->GetStreamVolume(); |
| 190 return 0; | 191 return 0; |
| 191 } | 192 } |
| 192 | 193 |
| 193 // TODO(henrika): possibly add stereo support. | 194 // TODO(henrika): possibly add stereo support. |
| 194 void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { | 195 void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { |
| 195 ALOGD("AttachAudioBuffer%s", GetThreadInfo().c_str()); | 196 ALOGD("AttachAudioBuffer%s", GetThreadInfo().c_str()); |
| 196 DCHECK(thread_checker_.CalledOnValidThread()); | 197 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 197 audio_device_buffer_ = audioBuffer; | 198 audio_device_buffer_ = audioBuffer; |
| 198 const int sample_rate_hz = audio_parameters_.sample_rate(); | 199 const int sample_rate_hz = audio_parameters_.sample_rate(); |
| 199 ALOGD("SetPlayoutSampleRate(%d)", sample_rate_hz); | 200 ALOGD("SetPlayoutSampleRate(%d)", sample_rate_hz); |
| 200 audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz); | 201 audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz); |
| 201 const int channels = audio_parameters_.channels(); | 202 const int channels = audio_parameters_.channels(); |
| 202 ALOGD("SetPlayoutChannels(%d)", channels); | 203 ALOGD("SetPlayoutChannels(%d)", channels); |
| 203 audio_device_buffer_->SetPlayoutChannels(channels); | 204 audio_device_buffer_->SetPlayoutChannels(channels); |
| 204 } | 205 } |
| 205 | 206 |
| 206 void JNICALL AudioTrackJni::CacheDirectBufferAddress( | 207 void JNICALL AudioTrackJni::CacheDirectBufferAddress( |
| 207 JNIEnv* env, jobject obj, jobject byte_buffer, jlong nativeAudioTrack) { | 208 JNIEnv* env, jobject obj, jobject byte_buffer, jlong nativeAudioTrack) { |
| 208 webrtc::AudioTrackJni* this_object = | 209 webrtc::AudioTrackJni* this_object = |
| 209 reinterpret_cast<webrtc::AudioTrackJni*> (nativeAudioTrack); | 210 reinterpret_cast<webrtc::AudioTrackJni*> (nativeAudioTrack); |
| 210 this_object->OnCacheDirectBufferAddress(env, byte_buffer); | 211 this_object->OnCacheDirectBufferAddress(env, byte_buffer); |
| 211 } | 212 } |
| 212 | 213 |
| 213 void AudioTrackJni::OnCacheDirectBufferAddress( | 214 void AudioTrackJni::OnCacheDirectBufferAddress( |
| 214 JNIEnv* env, jobject byte_buffer) { | 215 JNIEnv* env, jobject byte_buffer) { |
| 215 ALOGD("OnCacheDirectBufferAddress"); | 216 ALOGD("OnCacheDirectBufferAddress"); |
| 216 DCHECK(thread_checker_.CalledOnValidThread()); | 217 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 217 direct_buffer_address_ = | 218 direct_buffer_address_ = |
| 218 env->GetDirectBufferAddress(byte_buffer); | 219 env->GetDirectBufferAddress(byte_buffer); |
| 219 jlong capacity = env->GetDirectBufferCapacity(byte_buffer); | 220 jlong capacity = env->GetDirectBufferCapacity(byte_buffer); |
| 220 ALOGD("direct buffer capacity: %lld", capacity); | 221 ALOGD("direct buffer capacity: %lld", capacity); |
| 221 direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity); | 222 direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity); |
| 222 frames_per_buffer_ = direct_buffer_capacity_in_bytes_ / kBytesPerFrame; | 223 frames_per_buffer_ = direct_buffer_capacity_in_bytes_ / kBytesPerFrame; |
| 223 ALOGD("frames_per_buffer: %" PRIuS, frames_per_buffer_); | 224 ALOGD("frames_per_buffer: %" PRIuS, frames_per_buffer_); |
| 224 } | 225 } |
| 225 | 226 |
| 226 void JNICALL AudioTrackJni::GetPlayoutData( | 227 void JNICALL AudioTrackJni::GetPlayoutData( |
| 227 JNIEnv* env, jobject obj, jint length, jlong nativeAudioTrack) { | 228 JNIEnv* env, jobject obj, jint length, jlong nativeAudioTrack) { |
| 228 webrtc::AudioTrackJni* this_object = | 229 webrtc::AudioTrackJni* this_object = |
| 229 reinterpret_cast<webrtc::AudioTrackJni*> (nativeAudioTrack); | 230 reinterpret_cast<webrtc::AudioTrackJni*> (nativeAudioTrack); |
| 230 this_object->OnGetPlayoutData(static_cast<size_t>(length)); | 231 this_object->OnGetPlayoutData(static_cast<size_t>(length)); |
| 231 } | 232 } |
| 232 | 233 |
| 233 // This method is called on a high-priority thread from Java. The name of | 234 // This method is called on a high-priority thread from Java. The name of |
| 234 // the thread is 'AudioRecordTrack'. | 235 // the thread is 'AudioRecordTrack'. |
| 235 void AudioTrackJni::OnGetPlayoutData(size_t length) { | 236 void AudioTrackJni::OnGetPlayoutData(size_t length) { |
| 236 DCHECK(thread_checker_java_.CalledOnValidThread()); | 237 RTC_DCHECK(thread_checker_java_.CalledOnValidThread()); |
| 237 DCHECK_EQ(frames_per_buffer_, length / kBytesPerFrame); | 238 RTC_DCHECK_EQ(frames_per_buffer_, length / kBytesPerFrame); |
| 238 if (!audio_device_buffer_) { | 239 if (!audio_device_buffer_) { |
| 239 ALOGE("AttachAudioBuffer has not been called!"); | 240 ALOGE("AttachAudioBuffer has not been called!"); |
| 240 return; | 241 return; |
| 241 } | 242 } |
| 242 // Pull decoded data (in 16-bit PCM format) from jitter buffer. | 243 // Pull decoded data (in 16-bit PCM format) from jitter buffer. |
| 243 int samples = audio_device_buffer_->RequestPlayoutData(frames_per_buffer_); | 244 int samples = audio_device_buffer_->RequestPlayoutData(frames_per_buffer_); |
| 244 if (samples <= 0) { | 245 if (samples <= 0) { |
| 245 ALOGE("AudioDeviceBuffer::RequestPlayoutData failed!"); | 246 ALOGE("AudioDeviceBuffer::RequestPlayoutData failed!"); |
| 246 return; | 247 return; |
| 247 } | 248 } |
| 248 DCHECK_EQ(static_cast<size_t>(samples), frames_per_buffer_); | 249 RTC_DCHECK_EQ(static_cast<size_t>(samples), frames_per_buffer_); |
| 249 // Copy decoded data into common byte buffer to ensure that it can be | 250 // Copy decoded data into common byte buffer to ensure that it can be |
| 250 // written to the Java based audio track. | 251 // written to the Java based audio track. |
| 251 samples = audio_device_buffer_->GetPlayoutData(direct_buffer_address_); | 252 samples = audio_device_buffer_->GetPlayoutData(direct_buffer_address_); |
| 252 DCHECK_EQ(length, kBytesPerFrame * samples); | 253 RTC_DCHECK_EQ(length, kBytesPerFrame * samples); |
| 253 } | 254 } |
| 254 | 255 |
| 255 } // namespace webrtc | 256 } // namespace webrtc |
| OLD | NEW |