Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(163)

Unified Diff: webrtc/modules/audio_processing/audio_processing_impl.cc

Issue 2778783002: AecDump interface (Closed)
Patch Set: Update to changed CaptureStreamInfo interface. Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: webrtc/modules/audio_processing/audio_processing_impl.cc
diff --git a/webrtc/modules/audio_processing/audio_processing_impl.cc b/webrtc/modules/audio_processing/audio_processing_impl.cc
index 816210f34f8738eb7a042adee69f87f5ed1a99d8..8ac40e904e6fea942ab7c68c76b0ffece2b8c2c1 100644
--- a/webrtc/modules/audio_processing/audio_processing_impl.cc
+++ b/webrtc/modules/audio_processing/audio_processing_impl.cc
@@ -152,6 +152,23 @@ class HighPassFilterImpl : public HighPassFilter {
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(HighPassFilterImpl);
};
+webrtc::InternalAPMStreamsConfig ToStreamsConfig(
+ const ProcessingConfig& api_format) {
+ webrtc::InternalAPMStreamsConfig result;
+ result.input_sample_rate = api_format.input_stream().sample_rate_hz();
+ result.input_num_channels = api_format.input_stream().num_channels();
+ result.output_num_channels = api_format.output_stream().num_channels();
+ result.render_input_num_channels =
+ api_format.reverse_input_stream().num_channels();
+ result.render_input_sample_rate =
+ api_format.reverse_input_stream().sample_rate_hz();
+ result.output_sample_rate = api_format.output_stream().sample_rate_hz();
+ result.render_output_sample_rate =
+ api_format.reverse_output_stream().sample_rate_hz();
+ result.render_output_num_channels =
+ api_format.reverse_output_stream().num_channels();
+ return result;
+}
} // namespace
// Throughout webrtc, it's assumed that success is represented by zero.
@@ -526,7 +543,9 @@ int AudioProcessingImpl::InitializeLocked() {
}
}
#endif
-
+ if (aec_dump_) {
+ aec_dump_->WriteInitMessage(ToStreamsConfig(formats_.api_format));
+ }
return kNoError;
}
@@ -824,6 +843,11 @@ int AudioProcessingImpl::ProcessStream(const float* const* src,
}
#endif
+ AecDump::CaptureStreamInfo* stream_info;
+ if (aec_dump_) {
+ stream_info = RecordUnprocessedCaptureStream(src);
+ }
+
capture_.capture_audio->CopyFrom(src, formats_.api_format.input_stream());
RETURN_ON_ERR(ProcessCaptureStreamLocked());
capture_.capture_audio->CopyTo(formats_.api_format.output_stream(), dest);
@@ -841,7 +865,9 @@ int AudioProcessingImpl::ProcessStream(const float* const* src,
&crit_debug_, &debug_dump_.capture));
}
#endif
-
+ if (aec_dump_) {
+ RecordProcessedCaptureStream(dest, stream_info);
+ }
return kNoError;
}
@@ -1078,6 +1104,11 @@ int AudioProcessingImpl::ProcessStream(AudioFrame* frame) {
return kBadDataLengthError;
}
+ AecDump::CaptureStreamInfo* stream_info;
+ if (aec_dump_) {
+ stream_info = RecordUnprocessedCaptureStream(*frame);
peah-webrtc 2017/05/15 05:32:51 I think the usage of having stream_info as an outp
aleloi 2017/05/15 13:20:51 I've changed it now. It looks a little better now.
+ }
+
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
if (debug_dump_.debug_file->is_open()) {
RETURN_ON_ERR(WriteConfigMessage(false));
@@ -1095,6 +1126,9 @@ int AudioProcessingImpl::ProcessStream(AudioFrame* frame) {
capture_.capture_audio->InterleaveTo(
frame, submodule_states_.CaptureMultiBandProcessingActive());
+ if (aec_dump_) {
+ RecordProcessedCaptureStream(*frame, stream_info);
+ }
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
if (debug_dump_.debug_file->is_open()) {
audioproc::Stream* msg = debug_dump_.capture.event_msg->mutable_stream();
@@ -1376,7 +1410,14 @@ int AudioProcessingImpl::AnalyzeReverseStreamLocked(
&crit_debug_, &debug_dump_.render));
}
#endif
-
+ if (aec_dump_) {
+ const size_t channel_size =
+ formats_.api_format.reverse_input_stream().num_frames();
+ const size_t num_channels =
+ formats_.api_format.reverse_input_stream().num_channels();
+ aec_dump_->WriteRenderStreamMessage(
+ FloatAudioFrame(src, num_channels, channel_size));
+ }
render_.render_audio->CopyFrom(src,
formats_.api_format.reverse_input_stream());
return ProcessRenderStreamLocked();
@@ -1429,6 +1470,10 @@ int AudioProcessingImpl::ProcessReverseStream(AudioFrame* frame) {
&crit_debug_, &debug_dump_.render));
}
#endif
+ if (aec_dump_) {
+ aec_dump_->WriteRenderStreamMessage(*frame);
+ }
+
render_.render_audio->DeinterleaveFrom(frame);
RETURN_ON_ERR(ProcessRenderStreamLocked());
render_.render_audio->InterleaveTo(
@@ -1512,6 +1557,22 @@ int AudioProcessingImpl::delay_offset_ms() const {
return capture_.delay_offset_ms;
}
+void AudioProcessingImpl::AttachAecDump(std::unique_ptr<AecDump> aec_dump) {
+ rtc::CritScope cs_render(&crit_render_);
+ rtc::CritScope cs_capture(&crit_capture_);
+ RTC_DCHECK(aec_dump);
+ aec_dump_ = std::move(aec_dump);
+
+ aec_dump_->WriteConfig(CollectApmConfig(), true);
+ aec_dump_->WriteInitMessage(ToStreamsConfig(formats_.api_format));
+}
+
+void AudioProcessingImpl::DetachAecDump() {
+ rtc::CritScope cs_render(&crit_render_);
+ rtc::CritScope cs_capture(&crit_capture_);
+ aec_dump_.reset();
+}
+
int AudioProcessingImpl::StartDebugRecording(
const char filename[AudioProcessing::kMaxFilenameSize],
int64_t max_log_size_bytes) {
@@ -1586,6 +1647,7 @@ int AudioProcessingImpl::StopDebugRecording() {
// Run in a single-threaded manner.
rtc::CritScope cs_render(&crit_render_);
rtc::CritScope cs_capture(&crit_capture_);
+ DetachAecDump();
peah-webrtc 2017/05/15 05:32:51 Please move this to before the locks (as DetachAec
aleloi 2017/05/15 13:20:52 Done.
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
// We just return if recording hasn't started.
@@ -1837,6 +1899,122 @@ void AudioProcessingImpl::UpdateHistogramsOnCallEnd() {
capture_.last_aec_system_delay_ms = 0;
}
+InternalAPMConfig AudioProcessingImpl::CollectApmConfig() const {
+ std::string experiments_description =
+ public_submodules_->echo_cancellation->GetExperimentsDescription();
+ // TODO(peah): Add semicolon-separated concatenations of experiment
+ // descriptions for other submodules.
+ if (capture_nonlocked_.level_controller_enabled) {
+ experiments_description += "LevelController;";
+ }
+ if (constants_.agc_clipped_level_min != kClippedLevelMin) {
+ experiments_description += "AgcClippingLevelExperiment;";
+ }
+ if (capture_nonlocked_.echo_canceller3_enabled) {
+ experiments_description += "EchoCanceller3;";
+ }
+
+ InternalAPMConfig apm_config;
+
+ apm_config.aec_enabled = public_submodules_->echo_cancellation->is_enabled();
+ apm_config.aec_delay_agnostic_enabled =
+ public_submodules_->echo_cancellation->is_delay_agnostic_enabled();
+ apm_config.aec_drift_compensation_enabled =
+ public_submodules_->echo_cancellation->is_drift_compensation_enabled();
+ apm_config.aec_extended_filter_enabled =
+ public_submodules_->echo_cancellation->is_extended_filter_enabled();
+ apm_config.aec_suppression_level = static_cast<int>(
+ public_submodules_->echo_cancellation->suppression_level());
+
+ apm_config.aecm_enabled =
+ public_submodules_->echo_control_mobile->is_enabled();
+ apm_config.aecm_comfort_noise_enabled =
+ public_submodules_->echo_control_mobile->is_comfort_noise_enabled();
+ apm_config.aecm_routing_mode =
+ static_cast<int>(public_submodules_->echo_control_mobile->routing_mode());
+
+ apm_config.agc_enabled = public_submodules_->gain_control->is_enabled();
+ apm_config.agc_mode =
+ static_cast<int>(public_submodules_->gain_control->mode());
+ apm_config.agc_limiter_enabled =
+ public_submodules_->gain_control->is_limiter_enabled();
+ apm_config.noise_robust_agc_enabled = constants_.use_experimental_agc;
+
+ apm_config.hpf_enabled = config_.high_pass_filter.enabled;
+
+ apm_config.ns_enabled = public_submodules_->noise_suppression->is_enabled();
+ apm_config.ns_level =
+ static_cast<int>(public_submodules_->noise_suppression->level());
+
+ apm_config.transient_suppression_enabled =
+ capture_.transient_suppressor_enabled;
+ apm_config.intelligibility_enhancer_enabled =
+ capture_nonlocked_.intelligibility_enabled;
+ apm_config.experiments_description = experiments_description;
+ return apm_config;
+}
+
+AecDump::CaptureStreamInfo* AudioProcessingImpl::RecordUnprocessedCaptureStream(
+ const float* const* src) const {
+ RTC_DCHECK(aec_dump_);
+ aec_dump_->WriteConfig(CollectApmConfig(), false);
peah-webrtc 2017/05/15 05:32:51 How can this method be const? It does writing to a
aleloi 2017/05/15 13:20:51 Done.
+ auto* stream_info = aec_dump_->GetCaptureStreamInfo();
+ RTC_DCHECK(stream_info);
+
+ const size_t channel_size = formats_.api_format.input_stream().num_frames();
+ const size_t num_channels = formats_.api_format.input_stream().num_channels();
+ stream_info->AddInput(FloatAudioFrame(src, num_channels, channel_size));
peah-webrtc 2017/05/15 05:32:51 I'm not strongly against it, but I'd suggest dropp
peah-webrtc 2017/05/15 05:57:07 Thinking a bit more about FloatAudioFrame, I guess
peah-webrtc 2017/05/15 07:25:18 I now saw your comment about this in the upcoming
+ PopulateStreamInfoWithState(stream_info);
+ return stream_info;
+}
+
+AecDump::CaptureStreamInfo* AudioProcessingImpl::RecordUnprocessedCaptureStream(
+ const AudioFrame& capture_frame) const {
+ RTC_DCHECK(aec_dump_);
+ auto* stream_info = aec_dump_->GetCaptureStreamInfo();
+ RTC_DCHECK(stream_info);
+
+ stream_info->AddInput(capture_frame);
+ PopulateStreamInfoWithState(stream_info);
+ aec_dump_->WriteConfig(CollectApmConfig(), false);
+ return stream_info;
+}
+
+void AudioProcessingImpl::RecordProcessedCaptureStream(
+ const float* const* processed_capture_stream,
+ AecDump::CaptureStreamInfo* stream_info) const {
peah-webrtc 2017/05/15 05:32:51 +1, how can it be const? (and elsewhere)
aleloi 2017/05/15 13:20:52 Done.
+ RTC_DCHECK(stream_info);
+ RTC_DCHECK(aec_dump_);
+
+ const size_t channel_size = formats_.api_format.output_stream().num_frames();
+ const size_t num_channels =
+ formats_.api_format.output_stream().num_channels();
+ stream_info->AddOutput(
+ FloatAudioFrame(processed_capture_stream, num_channels, channel_size));
+ aec_dump_->WriteCaptureStreamMessage();
+}
+
+void AudioProcessingImpl::RecordProcessedCaptureStream(
+ const AudioFrame& processed_capture_frame,
+ AecDump::CaptureStreamInfo* stream_info) const {
+ RTC_DCHECK(stream_info);
+ RTC_DCHECK(aec_dump_);
+
+ stream_info->AddOutput(processed_capture_frame);
+ aec_dump_->WriteCaptureStreamMessage();
+}
+
+void AudioProcessingImpl::PopulateStreamInfoWithState(
+ AecDump::CaptureStreamInfo* stream_info) const {
+ RTC_DCHECK(stream_info);
+
+ stream_info->set_delay(capture_nonlocked_.stream_delay_ms);
+ stream_info->set_drift(
+ public_submodules_->echo_cancellation->stream_drift_samples());
+ stream_info->set_level(gain_control()->stream_analog_level());
+ stream_info->set_keypress(capture_.key_pressed);
+}
+
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
int AudioProcessingImpl::WriteMessageToDebugFile(
FileWrapper* debug_file,

Powered by Google App Engine
This is Rietveld 408576698