Chromium Code Reviews| Index: webrtc/modules/audio_processing/audio_processing_impl.cc |
| diff --git a/webrtc/modules/audio_processing/audio_processing_impl.cc b/webrtc/modules/audio_processing/audio_processing_impl.cc |
| index f4977d4b01f8a507545dc46ccc17a16a1ea60c43..21629c53158a2fc800a735d4baf923a0e1c38003 100644 |
| --- a/webrtc/modules/audio_processing/audio_processing_impl.cc |
| +++ b/webrtc/modules/audio_processing/audio_processing_impl.cc |
| @@ -223,13 +223,20 @@ AudioProcessingImpl::AudioProcessingImpl(const Config& config, |
| array_geometry_(config.Get<Beamforming>().array_geometry), |
| target_direction_(config.Get<Beamforming>().target_direction), |
| intelligibility_enabled_(config.Get<Intelligibility>().enabled) { |
| - echo_cancellation_ = new EchoCancellationImpl(this, crit_); |
| + render_thread_checker_.DetachFromThread(); |
| + signal_thread_checker_.DetachFromThread(); |
| + capture_thread_checker_.DetachFromThread(); |
| + |
| + echo_cancellation_ = |
| + new EchoCancellationImpl(this, crit_, &render_thread_checker_); |
| component_list_.push_back(echo_cancellation_); |
| - echo_control_mobile_ = new EchoControlMobileImpl(this, crit_); |
| + echo_control_mobile_ = |
| + new EchoControlMobileImpl(this, crit_, &render_thread_checker_); |
| component_list_.push_back(echo_control_mobile_); |
| - gain_control_ = new GainControlImpl(this, crit_); |
| + gain_control_ = new GainControlImpl(this, crit_, &render_thread_checker_, |
| + &capture_thread_checker_); |
| component_list_.push_back(gain_control_); |
| high_pass_filter_ = new HighPassFilterImpl(this, crit_); |
| @@ -274,6 +281,7 @@ AudioProcessingImpl::~AudioProcessingImpl() { |
| } |
| int AudioProcessingImpl::Initialize() { |
| + RTC_DCHECK(signal_thread_checker_.CalledOnValidThread()); |
| CriticalSectionScoped crit_scoped(crit_); |
| return InitializeLocked(); |
| } |
| @@ -284,6 +292,7 @@ int AudioProcessingImpl::Initialize(int input_sample_rate_hz, |
| ChannelLayout input_layout, |
| ChannelLayout output_layout, |
| ChannelLayout reverse_layout) { |
| + RTC_DCHECK(signal_thread_checker_.CalledOnValidThread()); |
| const ProcessingConfig processing_config = { |
| {{input_sample_rate_hz, |
| ChannelsFromLayout(input_layout), |
| @@ -302,6 +311,7 @@ int AudioProcessingImpl::Initialize(int input_sample_rate_hz, |
| } |
| int AudioProcessingImpl::Initialize(const ProcessingConfig& processing_config) { |
| + RTC_DCHECK(signal_thread_checker_.CalledOnValidThread()); |
| CriticalSectionScoped crit_scoped(crit_); |
| return InitializeLocked(processing_config); |
| } |
| @@ -310,6 +320,7 @@ int AudioProcessingImpl::Initialize(const ProcessingConfig& processing_config) { |
| // their current values. |
| int AudioProcessingImpl::MaybeInitializeLocked( |
| const ProcessingConfig& processing_config) { |
| + // Called from both threads. Thread check is therefore not possible. |
|
the sun
2015/11/23 12:46:21
Uhm, yes it is possible:
RTC_DCHECK(render_thread_
peah-webrtc
2015/11/23 13:49:40
Of course! Great! Added that!
Done.
|
| if (processing_config == shared_state_.api_format_) { |
| return kNoError; |
| } |
| @@ -379,6 +390,8 @@ int AudioProcessingImpl::InitializeLocked() { |
| } |
| int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) { |
| + // Called from both render and capture threads, thread checks is therefore not |
| + // possible. |
| for (const auto& stream : config.streams) { |
| if (stream.num_channels() < 0) { |
| return kBadNumberChannelsError; |
| @@ -453,9 +466,9 @@ int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) { |
| return InitializeLocked(); |
| } |
| - |
| void AudioProcessingImpl::SetExtraOptions(const Config& config) { |
| CriticalSectionScoped crit_scoped(crit_); |
| + RTC_DCHECK(signal_thread_checker_.CalledOnValidThread()); |
| for (auto item : component_list_) { |
| item->SetExtraOptions(config); |
| } |
| @@ -468,27 +481,33 @@ void AudioProcessingImpl::SetExtraOptions(const Config& config) { |
| int AudioProcessingImpl::proc_sample_rate_hz() const { |
| + // TODO(peah): Add threadchecker when possible. |
| return fwd_proc_format_.sample_rate_hz(); |
| } |
| int AudioProcessingImpl::proc_split_sample_rate_hz() const { |
| + // TODO(peah): Add threadchecker when possible. |
| return split_rate_; |
| } |
| int AudioProcessingImpl::num_reverse_channels() const { |
| + // TODO(peah): Add threadchecker when possible. |
| return rev_proc_format_.num_channels(); |
| } |
| int AudioProcessingImpl::num_input_channels() const { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| return shared_state_.api_format_.input_stream().num_channels(); |
| } |
| int AudioProcessingImpl::num_output_channels() const { |
| + // TODO(peah): Add appropriate thread checker when possible. |
| return shared_state_.api_format_.output_stream().num_channels(); |
| } |
| void AudioProcessingImpl::set_output_will_be_muted(bool muted) { |
| CriticalSectionScoped lock(crit_); |
| + RTC_DCHECK(signal_thread_checker_.CalledOnValidThread()); |
| output_will_be_muted_ = muted; |
| if (agc_manager_.get()) { |
| agc_manager_->SetCaptureMuted(output_will_be_muted_); |
| @@ -504,6 +523,7 @@ int AudioProcessingImpl::ProcessStream(const float* const* src, |
| ChannelLayout output_layout, |
| float* const* dest) { |
| CriticalSectionScoped crit_scoped(crit_); |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| StreamConfig input_stream = shared_state_.api_format_.input_stream(); |
| input_stream.set_sample_rate_hz(input_sample_rate_hz); |
| input_stream.set_num_channels(ChannelsFromLayout(input_layout)); |
| @@ -525,6 +545,7 @@ int AudioProcessingImpl::ProcessStream(const float* const* src, |
| const StreamConfig& output_config, |
| float* const* dest) { |
| CriticalSectionScoped crit_scoped(crit_); |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| if (!src || !dest) { |
| return kNullPointerError; |
| } |
| @@ -576,6 +597,7 @@ int AudioProcessingImpl::ProcessStream(const float* const* src, |
| int AudioProcessingImpl::ProcessStream(AudioFrame* frame) { |
| CriticalSectionScoped crit_scoped(crit_); |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| echo_cancellation_->ReadQueuedRenderData(); |
| echo_control_mobile_->ReadQueuedRenderData(); |
| gain_control_->ReadQueuedRenderData(); |
| @@ -639,6 +661,7 @@ int AudioProcessingImpl::ProcessStream(AudioFrame* frame) { |
| } |
| int AudioProcessingImpl::ProcessStreamLocked() { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| #ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP |
| if (debug_file_->Open()) { |
| audioproc::Stream* msg = event_msg_->mutable_stream(); |
| @@ -720,6 +743,7 @@ int AudioProcessingImpl::AnalyzeReverseStream(const float* const* data, |
| size_t samples_per_channel, |
| int rev_sample_rate_hz, |
| ChannelLayout layout) { |
| + RTC_DCHECK(render_thread_checker_.CalledOnValidThread()); |
| const StreamConfig reverse_config = { |
| rev_sample_rate_hz, ChannelsFromLayout(layout), LayoutHasKeyboard(layout), |
| }; |
| @@ -734,6 +758,7 @@ int AudioProcessingImpl::ProcessReverseStream( |
| const StreamConfig& reverse_input_config, |
| const StreamConfig& reverse_output_config, |
| float* const* dest) { |
| + RTC_DCHECK(render_thread_checker_.CalledOnValidThread()); |
| RETURN_ON_ERR( |
| AnalyzeReverseStream(src, reverse_input_config, reverse_output_config)); |
| if (is_rev_processed()) { |
| @@ -755,6 +780,7 @@ int AudioProcessingImpl::AnalyzeReverseStream( |
| const StreamConfig& reverse_input_config, |
| const StreamConfig& reverse_output_config) { |
| CriticalSectionScoped crit_scoped(crit_); |
| + RTC_DCHECK(render_thread_checker_.CalledOnValidThread()); |
| if (src == NULL) { |
| return kNullPointerError; |
| } |
| @@ -792,6 +818,7 @@ int AudioProcessingImpl::AnalyzeReverseStream( |
| } |
| int AudioProcessingImpl::ProcessReverseStream(AudioFrame* frame) { |
| + RTC_DCHECK(render_thread_checker_.CalledOnValidThread()); |
| RETURN_ON_ERR(AnalyzeReverseStream(frame)); |
| if (is_rev_processed()) { |
| render_audio_->InterleaveTo(frame, true); |
| @@ -801,6 +828,7 @@ int AudioProcessingImpl::ProcessReverseStream(AudioFrame* frame) { |
| } |
| int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) { |
| + RTC_DCHECK(render_thread_checker_.CalledOnValidThread()); |
| CriticalSectionScoped crit_scoped(crit_); |
| if (frame == NULL) { |
| return kNullPointerError; |
| @@ -853,6 +881,7 @@ int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) { |
| } |
| int AudioProcessingImpl::ProcessReverseStreamLocked() { |
| + RTC_DCHECK(render_thread_checker_.CalledOnValidThread()); |
| AudioBuffer* ra = render_audio_.get(); // For brevity. |
| if (rev_proc_format_.sample_rate_hz() == kSampleRate32kHz) { |
| ra->SplitIntoFrequencyBands(); |
| @@ -878,6 +907,7 @@ int AudioProcessingImpl::ProcessReverseStreamLocked() { |
| } |
| int AudioProcessingImpl::set_stream_delay_ms(int delay) { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| Error retval = kNoError; |
| was_stream_delay_set_ = true; |
| delay += delay_offset_ms_; |
| @@ -898,29 +928,35 @@ int AudioProcessingImpl::set_stream_delay_ms(int delay) { |
| } |
| int AudioProcessingImpl::stream_delay_ms() const { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| return stream_delay_ms_; |
| } |
| bool AudioProcessingImpl::was_stream_delay_set() const { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| return was_stream_delay_set_; |
| } |
| void AudioProcessingImpl::set_stream_key_pressed(bool key_pressed) { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| key_pressed_ = key_pressed; |
| } |
| void AudioProcessingImpl::set_delay_offset_ms(int offset) { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| CriticalSectionScoped crit_scoped(crit_); |
| delay_offset_ms_ = offset; |
| } |
| int AudioProcessingImpl::delay_offset_ms() const { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| return delay_offset_ms_; |
| } |
| int AudioProcessingImpl::StartDebugRecording( |
| const char filename[AudioProcessing::kMaxFilenameSize]) { |
| CriticalSectionScoped crit_scoped(crit_); |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| static_assert(kMaxFilenameSize == FileWrapper::kMaxFileNameSize, ""); |
| if (filename == NULL) { |
| @@ -950,6 +986,7 @@ int AudioProcessingImpl::StartDebugRecording( |
| int AudioProcessingImpl::StartDebugRecording(FILE* handle) { |
| CriticalSectionScoped crit_scoped(crit_); |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| if (handle == NULL) { |
| return kNullPointerError; |
| @@ -977,12 +1014,14 @@ int AudioProcessingImpl::StartDebugRecording(FILE* handle) { |
| int AudioProcessingImpl::StartDebugRecordingForPlatformFile( |
| rtc::PlatformFile handle) { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| FILE* stream = rtc::FdopenPlatformFileForWriting(handle); |
| return StartDebugRecording(stream); |
| } |
| int AudioProcessingImpl::StopDebugRecording() { |
| CriticalSectionScoped crit_scoped(crit_); |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| #ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP |
| // We just return if recording hasn't started. |
| @@ -1029,6 +1068,7 @@ VoiceDetection* AudioProcessingImpl::voice_detection() const { |
| } |
| bool AudioProcessingImpl::is_data_processed() const { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| if (beamformer_enabled_) { |
| return true; |
| } |
| @@ -1057,6 +1097,7 @@ bool AudioProcessingImpl::is_data_processed() const { |
| } |
| bool AudioProcessingImpl::output_copy_needed(bool is_data_processed) const { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| // Check if we've upmixed or downmixed the audio. |
| return ((shared_state_.api_format_.output_stream().num_channels() != |
| shared_state_.api_format_.input_stream().num_channels()) || |
| @@ -1064,12 +1105,14 @@ bool AudioProcessingImpl::output_copy_needed(bool is_data_processed) const { |
| } |
| bool AudioProcessingImpl::synthesis_needed(bool is_data_processed) const { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| return (is_data_processed && |
| (fwd_proc_format_.sample_rate_hz() == kSampleRate32kHz || |
| fwd_proc_format_.sample_rate_hz() == kSampleRate48kHz)); |
| } |
| bool AudioProcessingImpl::analysis_needed(bool is_data_processed) const { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| if (!is_data_processed && !voice_detection_->is_enabled() && |
| !transient_suppressor_enabled_) { |
| // Only level_estimator_ is enabled. |
| @@ -1083,15 +1126,18 @@ bool AudioProcessingImpl::analysis_needed(bool is_data_processed) const { |
| } |
| bool AudioProcessingImpl::is_rev_processed() const { |
| + RTC_DCHECK(render_thread_checker_.CalledOnValidThread()); |
| return intelligibility_enabled_ && intelligibility_enhancer_->active(); |
| } |
| bool AudioProcessingImpl::rev_conversion_needed() const { |
| + // Called from several threads, thread check not possible. |
| return (shared_state_.api_format_.reverse_input_stream() != |
| shared_state_.api_format_.reverse_output_stream()); |
| } |
| void AudioProcessingImpl::InitializeExperimentalAgc() { |
| + // Called from several threads, thread check not possible. |
| if (use_new_agc_) { |
| if (!agc_manager_.get()) { |
| agc_manager_.reset(new AgcManagerDirect(gain_control_, |
| @@ -1104,6 +1150,7 @@ void AudioProcessingImpl::InitializeExperimentalAgc() { |
| } |
| void AudioProcessingImpl::InitializeTransient() { |
| + // Called from several threads, thread check not possible. |
| if (transient_suppressor_enabled_) { |
| if (!transient_suppressor_.get()) { |
| transient_suppressor_.reset(new TransientSuppressor()); |
| @@ -1115,6 +1162,7 @@ void AudioProcessingImpl::InitializeTransient() { |
| } |
| void AudioProcessingImpl::InitializeBeamformer() { |
| + // Called from several threads, thread check not possible. |
| if (beamformer_enabled_) { |
| if (!beamformer_) { |
| beamformer_.reset( |
| @@ -1125,6 +1173,7 @@ void AudioProcessingImpl::InitializeBeamformer() { |
| } |
| void AudioProcessingImpl::InitializeIntelligibility() { |
| + // Called from several threads, thread check not possible. |
| if (intelligibility_enabled_) { |
| IntelligibilityEnhancer::Config config; |
| config.sample_rate_hz = split_rate_; |
| @@ -1135,6 +1184,7 @@ void AudioProcessingImpl::InitializeIntelligibility() { |
| } |
| void AudioProcessingImpl::MaybeUpdateHistograms() { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| static const int kMinDiffDelayMs = 60; |
| if (echo_cancellation()->is_enabled()) { |
| @@ -1181,6 +1231,7 @@ void AudioProcessingImpl::MaybeUpdateHistograms() { |
| } |
| void AudioProcessingImpl::UpdateHistogramsOnCallEnd() { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| CriticalSectionScoped crit_scoped(crit_); |
| if (stream_delay_jumps_ > -1) { |
| RTC_HISTOGRAM_ENUMERATION( |
| @@ -1200,6 +1251,7 @@ void AudioProcessingImpl::UpdateHistogramsOnCallEnd() { |
| #ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP |
| int AudioProcessingImpl::WriteMessageToDebugFile() { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| int32_t size = event_msg_->ByteSize(); |
| if (size <= 0) { |
| return kUnspecifiedError; |
| @@ -1227,6 +1279,7 @@ int AudioProcessingImpl::WriteMessageToDebugFile() { |
| } |
| int AudioProcessingImpl::WriteInitMessage() { |
| + // Called from both render and capture threads, not threadchecker possible. |
| event_msg_->set_type(audioproc::Event::INIT); |
| audioproc::Init* msg = event_msg_->mutable_init(); |
| msg->set_sample_rate( |
| @@ -1248,6 +1301,7 @@ int AudioProcessingImpl::WriteInitMessage() { |
| } |
| int AudioProcessingImpl::WriteConfigMessage(bool forced) { |
| + RTC_DCHECK(capture_thread_checker_.CalledOnValidThread()); |
| audioproc::Config config; |
| config.set_aec_enabled(echo_cancellation_->is_enabled()); |