Index: webrtc/modules/audio_processing/echo_cancellation_impl.cc |
diff --git a/webrtc/modules/audio_processing/echo_cancellation_impl.cc b/webrtc/modules/audio_processing/echo_cancellation_impl.cc |
index bdcfa2ad075877d6c772b2b6492c781d151150b9..1422cb8d72791e4d5d2c2ee3108cbb9851deb581 100644 |
--- a/webrtc/modules/audio_processing/echo_cancellation_impl.cc |
+++ b/webrtc/modules/audio_processing/echo_cancellation_impl.cc |
@@ -55,6 +55,9 @@ AudioProcessing::Error MapError(int err) { |
} |
} // namespace |
+const size_t EchoCancellationImpl::kAllowedValuesOfSamplesPerFrame1; |
+const size_t EchoCancellationImpl::kAllowedValuesOfSamplesPerFrame2; |
+ |
EchoCancellationImpl::EchoCancellationImpl(const AudioProcessing* apm, |
CriticalSectionWrapper* crit) |
: ProcessingComponent(), |
@@ -68,7 +71,9 @@ EchoCancellationImpl::EchoCancellationImpl(const AudioProcessing* apm, |
stream_has_echo_(false), |
delay_logging_enabled_(false), |
extended_filter_enabled_(false), |
- delay_agnostic_enabled_(false) { |
+ delay_agnostic_enabled_(false), |
+ render_queue_element_max_size_(0) { |
+ AllocateRenderQueue(); |
} |
EchoCancellationImpl::~EchoCancellationImpl() {} |
@@ -85,25 +90,70 @@ int EchoCancellationImpl::ProcessRenderAudio(const AudioBuffer* audio) { |
// The ordering convention must be followed to pass to the correct AEC. |
size_t handle_index = 0; |
+ int buffer_index = 0; |
for (int i = 0; i < apm_->num_output_channels(); i++) { |
for (int j = 0; j < audio->num_channels(); j++) { |
Handle* my_handle = static_cast<Handle*>(handle(handle_index)); |
- err = WebRtcAec_BufferFarend( |
- my_handle, |
- audio->split_bands_const_f(j)[kBand0To8kHz], |
+ // Retrieve any error code produced by the buffering of the farend |
+ // signal |
+ err = WebRtcAec_GetBufferFarendError( |
+ my_handle, audio->split_bands_const_f(j)[kBand0To8kHz], |
audio->num_frames_per_band()); |
if (err != apm_->kNoError) { |
return MapError(err); // TODO(ajm): warning possible? |
} |
+ // Buffer the samples in the render queue. |
+ RTC_DCHECK((buffer_index + audio->num_frames_per_band()) <= |
+ render_queue_element_max_size_); |
+ |
+ memcpy(&render_queue_buffer_[buffer_index], |
+ audio->split_bands_const_f(j)[kBand0To8kHz], |
+ (audio->num_frames_per_band() * |
+ sizeof(*audio->split_bands_const_f(j)[kBand0To8kHz]))); |
+ |
+ buffer_index += audio->num_frames_per_band(); |
kwiberg-webrtc
2015/10/27 10:43:07
Can't you use the nurmal features of std::vector t
peah-webrtc
2015/10/29 11:36:56
Done.
|
+ |
handle_index++; |
} |
} |
+ render_queue_buffer_.resize(buffer_index); |
+ render_signal_queue_->Insert(&render_queue_buffer_); |
kwiberg-webrtc
2015/10/27 10:43:07
Doesn't Insert return a status bool that you'd wan
peah-webrtc
2015/10/29 11:36:56
Done.
|
+ |
return apm_->kNoError; |
} |
+// Read chunks of data that were received and queued on the render side from |
+// a queue. All the data chunks are buffered into the farend signal of the AEC. |
+void EchoCancellationImpl::ReadQueuedRenderData() { |
+ if (!is_component_enabled()) { |
+ return; |
+ } |
+ |
+ bool samples_read = render_signal_queue_->Remove(&capture_queue_buffer_); |
+ while (samples_read) { |
kwiberg-webrtc
2015/10/27 10:43:07
Just
while (render_signal_queue_->Remove(&captu
peah-webrtc
2015/10/29 11:36:56
Done.
|
+ size_t handle_index = 0; |
+ int buffer_index = 0; |
+ const int num_frames_per_band = |
+ capture_queue_buffer_.size() / |
+ (apm_->num_output_channels() * apm_->num_reverse_channels()); |
+ for (int i = 0; i < apm_->num_output_channels(); i++) { |
+ for (int j = 0; j < apm_->num_reverse_channels(); j++) { |
+ Handle* my_handle = static_cast<Handle*>(handle(handle_index)); |
+ (void)WebRtcAec_BufferFarend(my_handle, |
+ &capture_queue_buffer_[buffer_index], |
+ num_frames_per_band); |
+ |
+ buffer_index += num_frames_per_band; |
+ handle_index++; |
+ } |
+ } |
+ samples_read = render_signal_queue_->Remove(&capture_queue_buffer_); |
+ } |
+} |
+ |
int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) { |
if (!is_component_enabled()) { |
return apm_->kNoError; |
@@ -333,9 +383,36 @@ int EchoCancellationImpl::Initialize() { |
return err; |
} |
+ AllocateRenderQueue(); |
+ |
return apm_->kNoError; |
} |
+void EchoCancellationImpl::AllocateRenderQueue() { |
+ const size_t max_frame_size = std::max(kAllowedValuesOfSamplesPerFrame1, |
+ kAllowedValuesOfSamplesPerFrame2); |
+ const size_t min_frame_size = std::min(kAllowedValuesOfSamplesPerFrame1, |
+ kAllowedValuesOfSamplesPerFrame2); |
+ |
+ render_queue_element_max_size_ = |
+ (max_frame_size * num_handles_required()); |
kwiberg-webrtc
2015/10/27 10:43:07
Unnecessary parentheses.
peah-webrtc
2015/10/29 11:36:56
Done.
|
+ |
+ const size_t render_queue_element_min_size = |
+ (min_frame_size * num_handles_required()); |
kwiberg-webrtc
2015/10/27 10:43:07
Unnecessary parentheses.
peah-webrtc
2015/10/29 11:36:56
Done.
|
+ |
+ std::vector<float> template_queue_element(render_queue_element_max_size_); |
+ |
+ render_signal_queue_.reset( |
+ new SwapQueue<std::vector<float>, AecRenderQueueItemVerifier>( |
+ kMaxNumFramesToBuffer, |
+ AecRenderQueueItemVerifier(render_queue_element_min_size, |
+ render_queue_element_max_size_), |
+ template_queue_element)); |
+ |
+ render_queue_buffer_.resize(render_queue_element_max_size_); |
+ capture_queue_buffer_.resize(render_queue_element_max_size_); |
+} |
+ |
void EchoCancellationImpl::SetExtraOptions(const Config& config) { |
extended_filter_enabled_ = config.Get<ExtendedFilter>().enabled; |
delay_agnostic_enabled_ = config.Get<DelayAgnostic>().enabled; |
@@ -368,7 +445,6 @@ int EchoCancellationImpl::ConfigureHandle(void* handle) const { |
config.nlpMode = MapSetting(suppression_level_); |
config.skewMode = drift_compensation_enabled_; |
config.delay_logging = delay_logging_enabled_; |
- |
WebRtcAec_enable_extended_filter( |
WebRtcAec_aec_core(static_cast<Handle*>(handle)), |
extended_filter_enabled_ ? 1 : 0); |
@@ -387,4 +463,5 @@ int EchoCancellationImpl::GetHandleError(void* handle) const { |
assert(handle != NULL); |
return AudioProcessing::kUnspecifiedError; |
} |
+ |
} // namespace webrtc |