| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 65 audio_manager_(audio_manager), | 65 audio_manager_(audio_manager), |
| 66 audio_parameters_(audio_manager->GetRecordAudioParameters()), | 66 audio_parameters_(audio_manager->GetRecordAudioParameters()), |
| 67 total_delay_in_milliseconds_(0), | 67 total_delay_in_milliseconds_(0), |
| 68 direct_buffer_address_(nullptr), | 68 direct_buffer_address_(nullptr), |
| 69 direct_buffer_capacity_in_bytes_(0), | 69 direct_buffer_capacity_in_bytes_(0), |
| 70 frames_per_buffer_(0), | 70 frames_per_buffer_(0), |
| 71 initialized_(false), | 71 initialized_(false), |
| 72 recording_(false), | 72 recording_(false), |
| 73 audio_device_buffer_(nullptr) { | 73 audio_device_buffer_(nullptr) { |
| 74 ALOGD("ctor%s", GetThreadInfo().c_str()); | 74 ALOGD("ctor%s", GetThreadInfo().c_str()); |
| 75 DCHECK(audio_parameters_.is_valid()); | 75 RTC_DCHECK(audio_parameters_.is_valid()); |
| 76 CHECK(j_environment_); | 76 RTC_CHECK(j_environment_); |
| 77 JNINativeMethod native_methods[] = { | 77 JNINativeMethod native_methods[] = { |
| 78 {"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V", | 78 {"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V", |
| 79 reinterpret_cast<void*>( | 79 reinterpret_cast<void*>( |
| 80 &webrtc::AudioRecordJni::CacheDirectBufferAddress)}, | 80 &webrtc::AudioRecordJni::CacheDirectBufferAddress)}, |
| 81 {"nativeDataIsRecorded", "(IJ)V", | 81 {"nativeDataIsRecorded", "(IJ)V", |
| 82 reinterpret_cast<void*>(&webrtc::AudioRecordJni::DataIsRecorded)}}; | 82 reinterpret_cast<void*>(&webrtc::AudioRecordJni::DataIsRecorded)}}; |
| 83 j_native_registration_ = j_environment_->RegisterNatives( | 83 j_native_registration_ = j_environment_->RegisterNatives( |
| 84 "org/webrtc/voiceengine/WebRtcAudioRecord", | 84 "org/webrtc/voiceengine/WebRtcAudioRecord", |
| 85 native_methods, arraysize(native_methods)); | 85 native_methods, arraysize(native_methods)); |
| 86 j_audio_record_.reset(new JavaAudioRecord( | 86 j_audio_record_.reset(new JavaAudioRecord( |
| 87 j_native_registration_.get(), | 87 j_native_registration_.get(), |
| 88 j_native_registration_->NewObject( | 88 j_native_registration_->NewObject( |
| 89 "<init>", "(Landroid/content/Context;J)V", | 89 "<init>", "(Landroid/content/Context;J)V", |
| 90 JVM::GetInstance()->context(), PointerTojlong(this)))); | 90 JVM::GetInstance()->context(), PointerTojlong(this)))); |
| 91 // Detach from this thread since we want to use the checker to verify calls | 91 // Detach from this thread since we want to use the checker to verify calls |
| 92 // from the Java based audio thread. | 92 // from the Java based audio thread. |
| 93 thread_checker_java_.DetachFromThread(); | 93 thread_checker_java_.DetachFromThread(); |
| 94 } | 94 } |
| 95 | 95 |
| 96 AudioRecordJni::~AudioRecordJni() { | 96 AudioRecordJni::~AudioRecordJni() { |
| 97 ALOGD("~dtor%s", GetThreadInfo().c_str()); | 97 ALOGD("~dtor%s", GetThreadInfo().c_str()); |
| 98 DCHECK(thread_checker_.CalledOnValidThread()); | 98 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 99 Terminate(); | 99 Terminate(); |
| 100 } | 100 } |
| 101 | 101 |
| 102 int32_t AudioRecordJni::Init() { | 102 int32_t AudioRecordJni::Init() { |
| 103 ALOGD("Init%s", GetThreadInfo().c_str()); | 103 ALOGD("Init%s", GetThreadInfo().c_str()); |
| 104 DCHECK(thread_checker_.CalledOnValidThread()); | 104 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 105 return 0; | 105 return 0; |
| 106 } | 106 } |
| 107 | 107 |
| 108 int32_t AudioRecordJni::Terminate() { | 108 int32_t AudioRecordJni::Terminate() { |
| 109 ALOGD("Terminate%s", GetThreadInfo().c_str()); | 109 ALOGD("Terminate%s", GetThreadInfo().c_str()); |
| 110 DCHECK(thread_checker_.CalledOnValidThread()); | 110 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 111 StopRecording(); | 111 StopRecording(); |
| 112 return 0; | 112 return 0; |
| 113 } | 113 } |
| 114 | 114 |
| 115 int32_t AudioRecordJni::InitRecording() { | 115 int32_t AudioRecordJni::InitRecording() { |
| 116 ALOGD("InitRecording%s", GetThreadInfo().c_str()); | 116 ALOGD("InitRecording%s", GetThreadInfo().c_str()); |
| 117 DCHECK(thread_checker_.CalledOnValidThread()); | 117 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 118 DCHECK(!initialized_); | 118 RTC_DCHECK(!initialized_); |
| 119 DCHECK(!recording_); | 119 RTC_DCHECK(!recording_); |
| 120 int frames_per_buffer = j_audio_record_->InitRecording( | 120 int frames_per_buffer = j_audio_record_->InitRecording( |
| 121 audio_parameters_.sample_rate(), audio_parameters_.channels()); | 121 audio_parameters_.sample_rate(), audio_parameters_.channels()); |
| 122 if (frames_per_buffer < 0) { | 122 if (frames_per_buffer < 0) { |
| 123 ALOGE("InitRecording failed!"); | 123 ALOGE("InitRecording failed!"); |
| 124 return -1; | 124 return -1; |
| 125 } | 125 } |
| 126 frames_per_buffer_ = static_cast<size_t>(frames_per_buffer); | 126 frames_per_buffer_ = static_cast<size_t>(frames_per_buffer); |
| 127 ALOGD("frames_per_buffer: %" PRIuS, frames_per_buffer_); | 127 ALOGD("frames_per_buffer: %" PRIuS, frames_per_buffer_); |
| 128 CHECK_EQ(direct_buffer_capacity_in_bytes_, | 128 RTC_CHECK_EQ(direct_buffer_capacity_in_bytes_, |
| 129 frames_per_buffer_ * kBytesPerFrame); | 129 frames_per_buffer_ * kBytesPerFrame); |
| 130 CHECK_EQ(frames_per_buffer_, audio_parameters_.frames_per_10ms_buffer()); | 130 RTC_CHECK_EQ(frames_per_buffer_, audio_parameters_.frames_per_10ms_buffer()); |
| 131 initialized_ = true; | 131 initialized_ = true; |
| 132 return 0; | 132 return 0; |
| 133 } | 133 } |
| 134 | 134 |
| 135 int32_t AudioRecordJni::StartRecording() { | 135 int32_t AudioRecordJni::StartRecording() { |
| 136 ALOGD("StartRecording%s", GetThreadInfo().c_str()); | 136 ALOGD("StartRecording%s", GetThreadInfo().c_str()); |
| 137 DCHECK(thread_checker_.CalledOnValidThread()); | 137 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 138 DCHECK(initialized_); | 138 RTC_DCHECK(initialized_); |
| 139 DCHECK(!recording_); | 139 RTC_DCHECK(!recording_); |
| 140 if (!j_audio_record_->StartRecording()) { | 140 if (!j_audio_record_->StartRecording()) { |
| 141 ALOGE("StartRecording failed!"); | 141 ALOGE("StartRecording failed!"); |
| 142 return -1; | 142 return -1; |
| 143 } | 143 } |
| 144 recording_ = true; | 144 recording_ = true; |
| 145 return 0; | 145 return 0; |
| 146 } | 146 } |
| 147 | 147 |
| 148 int32_t AudioRecordJni::StopRecording() { | 148 int32_t AudioRecordJni::StopRecording() { |
| 149 ALOGD("StopRecording%s", GetThreadInfo().c_str()); | 149 ALOGD("StopRecording%s", GetThreadInfo().c_str()); |
| 150 DCHECK(thread_checker_.CalledOnValidThread()); | 150 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 151 if (!initialized_ || !recording_) { | 151 if (!initialized_ || !recording_) { |
| 152 return 0; | 152 return 0; |
| 153 } | 153 } |
| 154 if (!j_audio_record_->StopRecording()) { | 154 if (!j_audio_record_->StopRecording()) { |
| 155 ALOGE("StopRecording failed!"); | 155 ALOGE("StopRecording failed!"); |
| 156 return -1; | 156 return -1; |
| 157 } | 157 } |
| 158 // If we don't detach here, we will hit a DCHECK in OnDataIsRecorded() next | 158 // If we don't detach here, we will hit a RTC_DCHECK in OnDataIsRecorded() |
| 159 // time StartRecording() is called since it will create a new Java thread. | 159 // next time StartRecording() is called since it will create a new Java |
| 160 // thread. |
| 160 thread_checker_java_.DetachFromThread(); | 161 thread_checker_java_.DetachFromThread(); |
| 161 initialized_ = false; | 162 initialized_ = false; |
| 162 recording_ = false; | 163 recording_ = false; |
| 163 return 0; | 164 return 0; |
| 164 } | 165 } |
| 165 | 166 |
| 166 void AudioRecordJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { | 167 void AudioRecordJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { |
| 167 ALOGD("AttachAudioBuffer"); | 168 ALOGD("AttachAudioBuffer"); |
| 168 DCHECK(thread_checker_.CalledOnValidThread()); | 169 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 169 audio_device_buffer_ = audioBuffer; | 170 audio_device_buffer_ = audioBuffer; |
| 170 const int sample_rate_hz = audio_parameters_.sample_rate(); | 171 const int sample_rate_hz = audio_parameters_.sample_rate(); |
| 171 ALOGD("SetRecordingSampleRate(%d)", sample_rate_hz); | 172 ALOGD("SetRecordingSampleRate(%d)", sample_rate_hz); |
| 172 audio_device_buffer_->SetRecordingSampleRate(sample_rate_hz); | 173 audio_device_buffer_->SetRecordingSampleRate(sample_rate_hz); |
| 173 const int channels = audio_parameters_.channels(); | 174 const int channels = audio_parameters_.channels(); |
| 174 ALOGD("SetRecordingChannels(%d)", channels); | 175 ALOGD("SetRecordingChannels(%d)", channels); |
| 175 audio_device_buffer_->SetRecordingChannels(channels); | 176 audio_device_buffer_->SetRecordingChannels(channels); |
| 176 total_delay_in_milliseconds_ = | 177 total_delay_in_milliseconds_ = |
| 177 audio_manager_->GetDelayEstimateInMilliseconds(); | 178 audio_manager_->GetDelayEstimateInMilliseconds(); |
| 178 DCHECK_GT(total_delay_in_milliseconds_, 0); | 179 RTC_DCHECK_GT(total_delay_in_milliseconds_, 0); |
| 179 ALOGD("total_delay_in_milliseconds: %d", total_delay_in_milliseconds_); | 180 ALOGD("total_delay_in_milliseconds: %d", total_delay_in_milliseconds_); |
| 180 } | 181 } |
| 181 | 182 |
| 182 int32_t AudioRecordJni::EnableBuiltInAEC(bool enable) { | 183 int32_t AudioRecordJni::EnableBuiltInAEC(bool enable) { |
| 183 ALOGD("EnableBuiltInAEC%s", GetThreadInfo().c_str()); | 184 ALOGD("EnableBuiltInAEC%s", GetThreadInfo().c_str()); |
| 184 DCHECK(thread_checker_.CalledOnValidThread()); | 185 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 185 return j_audio_record_->EnableBuiltInAEC(enable) ? 0 : -1; | 186 return j_audio_record_->EnableBuiltInAEC(enable) ? 0 : -1; |
| 186 } | 187 } |
| 187 | 188 |
| 188 void JNICALL AudioRecordJni::CacheDirectBufferAddress( | 189 void JNICALL AudioRecordJni::CacheDirectBufferAddress( |
| 189 JNIEnv* env, jobject obj, jobject byte_buffer, jlong nativeAudioRecord) { | 190 JNIEnv* env, jobject obj, jobject byte_buffer, jlong nativeAudioRecord) { |
| 190 webrtc::AudioRecordJni* this_object = | 191 webrtc::AudioRecordJni* this_object = |
| 191 reinterpret_cast<webrtc::AudioRecordJni*> (nativeAudioRecord); | 192 reinterpret_cast<webrtc::AudioRecordJni*> (nativeAudioRecord); |
| 192 this_object->OnCacheDirectBufferAddress(env, byte_buffer); | 193 this_object->OnCacheDirectBufferAddress(env, byte_buffer); |
| 193 } | 194 } |
| 194 | 195 |
| 195 void AudioRecordJni::OnCacheDirectBufferAddress( | 196 void AudioRecordJni::OnCacheDirectBufferAddress( |
| 196 JNIEnv* env, jobject byte_buffer) { | 197 JNIEnv* env, jobject byte_buffer) { |
| 197 ALOGD("OnCacheDirectBufferAddress"); | 198 ALOGD("OnCacheDirectBufferAddress"); |
| 198 DCHECK(thread_checker_.CalledOnValidThread()); | 199 RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
| 199 DCHECK(!direct_buffer_address_); | 200 RTC_DCHECK(!direct_buffer_address_); |
| 200 direct_buffer_address_ = | 201 direct_buffer_address_ = |
| 201 env->GetDirectBufferAddress(byte_buffer); | 202 env->GetDirectBufferAddress(byte_buffer); |
| 202 jlong capacity = env->GetDirectBufferCapacity(byte_buffer); | 203 jlong capacity = env->GetDirectBufferCapacity(byte_buffer); |
| 203 ALOGD("direct buffer capacity: %lld", capacity); | 204 ALOGD("direct buffer capacity: %lld", capacity); |
| 204 direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity); | 205 direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity); |
| 205 } | 206 } |
| 206 | 207 |
| 207 void JNICALL AudioRecordJni::DataIsRecorded( | 208 void JNICALL AudioRecordJni::DataIsRecorded( |
| 208 JNIEnv* env, jobject obj, jint length, jlong nativeAudioRecord) { | 209 JNIEnv* env, jobject obj, jint length, jlong nativeAudioRecord) { |
| 209 webrtc::AudioRecordJni* this_object = | 210 webrtc::AudioRecordJni* this_object = |
| 210 reinterpret_cast<webrtc::AudioRecordJni*> (nativeAudioRecord); | 211 reinterpret_cast<webrtc::AudioRecordJni*> (nativeAudioRecord); |
| 211 this_object->OnDataIsRecorded(length); | 212 this_object->OnDataIsRecorded(length); |
| 212 } | 213 } |
| 213 | 214 |
| 214 // This method is called on a high-priority thread from Java. The name of | 215 // This method is called on a high-priority thread from Java. The name of |
| 215 // the thread is 'AudioRecordThread'. | 216 // the thread is 'AudioRecordThread'. |
| 216 void AudioRecordJni::OnDataIsRecorded(int length) { | 217 void AudioRecordJni::OnDataIsRecorded(int length) { |
| 217 DCHECK(thread_checker_java_.CalledOnValidThread()); | 218 RTC_DCHECK(thread_checker_java_.CalledOnValidThread()); |
| 218 if (!audio_device_buffer_) { | 219 if (!audio_device_buffer_) { |
| 219 ALOGE("AttachAudioBuffer has not been called!"); | 220 ALOGE("AttachAudioBuffer has not been called!"); |
| 220 return; | 221 return; |
| 221 } | 222 } |
| 222 audio_device_buffer_->SetRecordedBuffer(direct_buffer_address_, | 223 audio_device_buffer_->SetRecordedBuffer(direct_buffer_address_, |
| 223 frames_per_buffer_); | 224 frames_per_buffer_); |
| 224 // We provide one (combined) fixed delay estimate for the APM and use the | 225 // We provide one (combined) fixed delay estimate for the APM and use the |
| 225 // |playDelayMs| parameter only. Components like the AEC only sees the sum | 226 // |playDelayMs| parameter only. Components like the AEC only sees the sum |
| 226 // of |playDelayMs| and |recDelayMs|, hence the distributions does not matter. | 227 // of |playDelayMs| and |recDelayMs|, hence the distributions does not matter. |
| 227 audio_device_buffer_->SetVQEData(total_delay_in_milliseconds_, | 228 audio_device_buffer_->SetVQEData(total_delay_in_milliseconds_, |
| 228 0, // recDelayMs | 229 0, // recDelayMs |
| 229 0); // clockDrift | 230 0); // clockDrift |
| 230 if (audio_device_buffer_->DeliverRecordedData() == -1) { | 231 if (audio_device_buffer_->DeliverRecordedData() == -1) { |
| 231 ALOGE("AudioDeviceBuffer::DeliverRecordedData failed!"); | 232 ALOGE("AudioDeviceBuffer::DeliverRecordedData failed!"); |
| 232 } | 233 } |
| 233 } | 234 } |
| 234 | 235 |
| 235 } // namespace webrtc | 236 } // namespace webrtc |
| OLD | NEW |