Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include "webrtc/voice_engine/channel.h" | 11 #include "webrtc/voice_engine/channel.h" |
| 12 | 12 |
| 13 #include <algorithm> | 13 #include <algorithm> |
| 14 #include <utility> | 14 #include <utility> |
| 15 | 15 |
| 16 #include "webrtc/base/checks.h" | 16 #include "webrtc/base/checks.h" |
| 17 #include "webrtc/base/criticalsection.h" | |
| 17 #include "webrtc/base/format_macros.h" | 18 #include "webrtc/base/format_macros.h" |
| 18 #include "webrtc/base/logging.h" | 19 #include "webrtc/base/logging.h" |
| 19 #include "webrtc/base/thread_checker.h" | 20 #include "webrtc/base/thread_checker.h" |
| 20 #include "webrtc/base/timeutils.h" | 21 #include "webrtc/base/timeutils.h" |
| 21 #include "webrtc/common.h" | 22 #include "webrtc/common.h" |
| 22 #include "webrtc/config.h" | 23 #include "webrtc/config.h" |
| 23 #include "webrtc/modules/audio_device/include/audio_device.h" | 24 #include "webrtc/modules/audio_device/include/audio_device.h" |
| 24 #include "webrtc/modules/audio_processing/include/audio_processing.h" | 25 #include "webrtc/modules/audio_processing/include/audio_processing.h" |
| 25 #include "webrtc/modules/include/module_common_types.h" | 26 #include "webrtc/modules/include/module_common_types.h" |
| 26 #include "webrtc/modules/pacing/packet_router.h" | 27 #include "webrtc/modules/pacing/packet_router.h" |
| 27 #include "webrtc/modules/rtp_rtcp/include/receive_statistics.h" | 28 #include "webrtc/modules/rtp_rtcp/include/receive_statistics.h" |
| 28 #include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h" | 29 #include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h" |
| 29 #include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h" | 30 #include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h" |
| 30 #include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h" | 31 #include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h" |
| 31 #include "webrtc/modules/utility/include/audio_frame_operations.h" | 32 #include "webrtc/modules/utility/include/audio_frame_operations.h" |
| 32 #include "webrtc/modules/utility/include/process_thread.h" | 33 #include "webrtc/modules/utility/include/process_thread.h" |
| 33 #include "webrtc/system_wrappers/include/critical_section_wrapper.h" | |
| 34 #include "webrtc/system_wrappers/include/trace.h" | 34 #include "webrtc/system_wrappers/include/trace.h" |
| 35 #include "webrtc/voice_engine/include/voe_base.h" | 35 #include "webrtc/voice_engine/include/voe_base.h" |
| 36 #include "webrtc/voice_engine/include/voe_external_media.h" | 36 #include "webrtc/voice_engine/include/voe_external_media.h" |
| 37 #include "webrtc/voice_engine/include/voe_rtp_rtcp.h" | 37 #include "webrtc/voice_engine/include/voe_rtp_rtcp.h" |
| 38 #include "webrtc/voice_engine/output_mixer.h" | 38 #include "webrtc/voice_engine/output_mixer.h" |
| 39 #include "webrtc/voice_engine/statistics.h" | 39 #include "webrtc/voice_engine/statistics.h" |
| 40 #include "webrtc/voice_engine/transmit_mixer.h" | 40 #include "webrtc/voice_engine/transmit_mixer.h" |
| 41 #include "webrtc/voice_engine/utility.h" | 41 #include "webrtc/voice_engine/utility.h" |
| 42 | 42 |
| 43 #if defined(_WIN32) | 43 #if defined(_WIN32) |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 150 struct ChannelStatistics : public RtcpStatistics { | 150 struct ChannelStatistics : public RtcpStatistics { |
| 151 ChannelStatistics() : rtcp(), max_jitter(0) {} | 151 ChannelStatistics() : rtcp(), max_jitter(0) {} |
| 152 | 152 |
| 153 RtcpStatistics rtcp; | 153 RtcpStatistics rtcp; |
| 154 uint32_t max_jitter; | 154 uint32_t max_jitter; |
| 155 }; | 155 }; |
| 156 | 156 |
| 157 // Statistics callback, called at each generation of a new RTCP report block. | 157 // Statistics callback, called at each generation of a new RTCP report block. |
| 158 class StatisticsProxy : public RtcpStatisticsCallback { | 158 class StatisticsProxy : public RtcpStatisticsCallback { |
| 159 public: | 159 public: |
| 160 StatisticsProxy(uint32_t ssrc) | 160 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {} |
| 161 : stats_lock_(CriticalSectionWrapper::CreateCriticalSection()), | |
| 162 ssrc_(ssrc) {} | |
| 163 virtual ~StatisticsProxy() {} | 161 virtual ~StatisticsProxy() {} |
| 164 | 162 |
| 165 void StatisticsUpdated(const RtcpStatistics& statistics, | 163 void StatisticsUpdated(const RtcpStatistics& statistics, |
| 166 uint32_t ssrc) override { | 164 uint32_t ssrc) override { |
| 167 if (ssrc != ssrc_) | 165 if (ssrc != ssrc_) |
| 168 return; | 166 return; |
| 169 | 167 |
| 170 CriticalSectionScoped cs(stats_lock_.get()); | 168 rtc::CritScope cs(&stats_lock_); |
| 171 stats_.rtcp = statistics; | 169 stats_.rtcp = statistics; |
| 172 if (statistics.jitter > stats_.max_jitter) { | 170 if (statistics.jitter > stats_.max_jitter) { |
| 173 stats_.max_jitter = statistics.jitter; | 171 stats_.max_jitter = statistics.jitter; |
| 174 } | 172 } |
| 175 } | 173 } |
| 176 | 174 |
| 177 void CNameChanged(const char* cname, uint32_t ssrc) override {} | 175 void CNameChanged(const char* cname, uint32_t ssrc) override {} |
| 178 | 176 |
| 179 ChannelStatistics GetStats() { | 177 ChannelStatistics GetStats() { |
| 180 CriticalSectionScoped cs(stats_lock_.get()); | 178 rtc::CritScope cs(&stats_lock_); |
| 181 return stats_; | 179 return stats_; |
| 182 } | 180 } |
| 183 | 181 |
| 184 private: | 182 private: |
| 185 // StatisticsUpdated calls are triggered from threads in the RTP module, | 183 // StatisticsUpdated calls are triggered from threads in the RTP module, |
| 186 // while GetStats calls can be triggered from the public voice engine API, | 184 // while GetStats calls can be triggered from the public voice engine API, |
| 187 // hence synchronization is needed. | 185 // hence synchronization is needed. |
| 188 rtc::scoped_ptr<CriticalSectionWrapper> stats_lock_; | 186 mutable rtc::CriticalSection stats_lock_; |
|
the sun
2016/01/21 13:07:28
nit: No need for mutable, unless you make GetStats
tommi
2016/01/21 15:29:22
Done.
| |
| 189 const uint32_t ssrc_; | 187 const uint32_t ssrc_; |
| 190 ChannelStatistics stats_; | 188 ChannelStatistics stats_; |
| 191 }; | 189 }; |
| 192 | 190 |
| 193 class VoERtcpObserver : public RtcpBandwidthObserver { | 191 class VoERtcpObserver : public RtcpBandwidthObserver { |
| 194 public: | 192 public: |
| 195 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {} | 193 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {} |
| 196 virtual ~VoERtcpObserver() {} | 194 virtual ~VoERtcpObserver() {} |
| 197 | 195 |
| 198 void OnReceivedEstimatedBitrate(uint32_t bitrate) override { | 196 void OnReceivedEstimatedBitrate(uint32_t bitrate) override { |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 291 | 289 |
| 292 return 0; | 290 return 0; |
| 293 } | 291 } |
| 294 | 292 |
| 295 int32_t | 293 int32_t |
| 296 Channel::InFrameType(FrameType frame_type) | 294 Channel::InFrameType(FrameType frame_type) |
| 297 { | 295 { |
| 298 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 296 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 299 "Channel::InFrameType(frame_type=%d)", frame_type); | 297 "Channel::InFrameType(frame_type=%d)", frame_type); |
| 300 | 298 |
| 301 CriticalSectionScoped cs(&_callbackCritSect); | 299 rtc::CritScope cs(&_callbackCritSect); |
| 302 _sendFrameType = (frame_type == kAudioFrameSpeech); | 300 _sendFrameType = (frame_type == kAudioFrameSpeech); |
| 303 return 0; | 301 return 0; |
| 304 } | 302 } |
| 305 | 303 |
| 306 int32_t | 304 int32_t |
| 307 Channel::OnRxVadDetected(int vadDecision) | 305 Channel::OnRxVadDetected(int vadDecision) |
| 308 { | 306 { |
| 309 CriticalSectionScoped cs(&_callbackCritSect); | 307 rtc::CritScope cs(&_callbackCritSect); |
| 310 if (_rxVadObserverPtr) | 308 if (_rxVadObserverPtr) |
| 311 { | 309 { |
| 312 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision); | 310 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision); |
| 313 } | 311 } |
| 314 | 312 |
| 315 return 0; | 313 return 0; |
| 316 } | 314 } |
| 317 | 315 |
| 318 bool Channel::SendRtp(const uint8_t* data, | 316 bool Channel::SendRtp(const uint8_t* data, |
| 319 size_t len, | 317 size_t len, |
| 320 const PacketOptions& options) { | 318 const PacketOptions& options) { |
| 321 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), | 319 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), |
| 322 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len); | 320 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len); |
| 323 | 321 |
| 324 CriticalSectionScoped cs(&_callbackCritSect); | 322 rtc::CritScope cs(&_callbackCritSect); |
| 325 | 323 |
| 326 if (_transportPtr == NULL) | 324 if (_transportPtr == NULL) |
| 327 { | 325 { |
| 328 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId), | 326 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId), |
| 329 "Channel::SendPacket() failed to send RTP packet due to" | 327 "Channel::SendPacket() failed to send RTP packet due to" |
| 330 " invalid transport object"); | 328 " invalid transport object"); |
| 331 return false; | 329 return false; |
| 332 } | 330 } |
| 333 | 331 |
| 334 uint8_t* bufferToSendPtr = (uint8_t*)data; | 332 uint8_t* bufferToSendPtr = (uint8_t*)data; |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 345 } | 343 } |
| 346 return true; | 344 return true; |
| 347 } | 345 } |
| 348 | 346 |
| 349 bool | 347 bool |
| 350 Channel::SendRtcp(const uint8_t *data, size_t len) | 348 Channel::SendRtcp(const uint8_t *data, size_t len) |
| 351 { | 349 { |
| 352 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), | 350 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), |
| 353 "Channel::SendRtcp(len=%" PRIuS ")", len); | 351 "Channel::SendRtcp(len=%" PRIuS ")", len); |
| 354 | 352 |
| 355 CriticalSectionScoped cs(&_callbackCritSect); | 353 rtc::CritScope cs(&_callbackCritSect); |
| 356 if (_transportPtr == NULL) | 354 if (_transportPtr == NULL) |
| 357 { | 355 { |
| 358 WEBRTC_TRACE(kTraceError, kTraceVoice, | 356 WEBRTC_TRACE(kTraceError, kTraceVoice, |
| 359 VoEId(_instanceId,_channelId), | 357 VoEId(_instanceId,_channelId), |
| 360 "Channel::SendRtcp() failed to send RTCP packet" | 358 "Channel::SendRtcp() failed to send RTCP packet" |
| 361 " due to invalid transport object"); | 359 " due to invalid transport object"); |
| 362 return false; | 360 return false; |
| 363 } | 361 } |
| 364 | 362 |
| 365 uint8_t* bufferToSendPtr = (uint8_t*)data; | 363 uint8_t* bufferToSendPtr = (uint8_t*)data; |
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 559 LOG(LS_ERROR) << "ProcessStream() error: " << err; | 557 LOG(LS_ERROR) << "ProcessStream() error: " << err; |
| 560 assert(false); | 558 assert(false); |
| 561 } | 559 } |
| 562 } | 560 } |
| 563 | 561 |
| 564 { | 562 { |
| 565 // Pass the audio buffers to an optional sink callback, before applying | 563 // Pass the audio buffers to an optional sink callback, before applying |
| 566 // scaling/panning, as that applies to the mix operation. | 564 // scaling/panning, as that applies to the mix operation. |
| 567 // External recipients of the audio (e.g. via AudioTrack), will do their | 565 // External recipients of the audio (e.g. via AudioTrack), will do their |
| 568 // own mixing/dynamic processing. | 566 // own mixing/dynamic processing. |
| 569 CriticalSectionScoped cs(&_callbackCritSect); | 567 rtc::CritScope cs(&_callbackCritSect); |
| 570 if (audio_sink_) { | 568 if (audio_sink_) { |
| 571 AudioSinkInterface::Data data( | 569 AudioSinkInterface::Data data( |
| 572 &audioFrame->data_[0], | 570 &audioFrame->data_[0], |
| 573 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_, | 571 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_, |
| 574 audioFrame->num_channels_, audioFrame->timestamp_); | 572 audioFrame->num_channels_, audioFrame->timestamp_); |
| 575 audio_sink_->OnData(data); | 573 audio_sink_->OnData(data); |
| 576 } | 574 } |
| 577 } | 575 } |
| 578 | 576 |
| 579 float output_gain = 1.0f; | 577 float output_gain = 1.0f; |
| 580 float left_pan = 1.0f; | 578 float left_pan = 1.0f; |
| 581 float right_pan = 1.0f; | 579 float right_pan = 1.0f; |
| 582 { | 580 { |
| 583 CriticalSectionScoped cs(&volume_settings_critsect_); | 581 rtc::CritScope cs(&volume_settings_critsect_); |
| 584 output_gain = _outputGain; | 582 output_gain = _outputGain; |
| 585 left_pan = _panLeft; | 583 left_pan = _panLeft; |
| 586 right_pan= _panRight; | 584 right_pan= _panRight; |
| 587 } | 585 } |
| 588 | 586 |
| 589 // Output volume scaling | 587 // Output volume scaling |
| 590 if (output_gain < 0.99f || output_gain > 1.01f) | 588 if (output_gain < 0.99f || output_gain > 1.01f) |
| 591 { | 589 { |
| 592 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame); | 590 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame); |
| 593 } | 591 } |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 613 | 611 |
| 614 // Mix decoded PCM output with file if file mixing is enabled | 612 // Mix decoded PCM output with file if file mixing is enabled |
| 615 if (state.output_file_playing) | 613 if (state.output_file_playing) |
| 616 { | 614 { |
| 617 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_); | 615 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_); |
| 618 } | 616 } |
| 619 | 617 |
| 620 // External media | 618 // External media |
| 621 if (_outputExternalMedia) | 619 if (_outputExternalMedia) |
| 622 { | 620 { |
| 623 CriticalSectionScoped cs(&_callbackCritSect); | 621 rtc::CritScope cs(&_callbackCritSect); |
| 624 const bool isStereo = (audioFrame->num_channels_ == 2); | 622 const bool isStereo = (audioFrame->num_channels_ == 2); |
| 625 if (_outputExternalMediaCallbackPtr) | 623 if (_outputExternalMediaCallbackPtr) |
| 626 { | 624 { |
| 627 _outputExternalMediaCallbackPtr->Process( | 625 _outputExternalMediaCallbackPtr->Process( |
| 628 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_, | 626 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_, |
| 629 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_, | 627 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_, |
| 630 isStereo); | 628 isStereo); |
| 631 } | 629 } |
| 632 } | 630 } |
| 633 | 631 |
| 634 // Record playout if enabled | 632 // Record playout if enabled |
| 635 { | 633 { |
| 636 CriticalSectionScoped cs(&_fileCritSect); | 634 rtc::CritScope cs(&_fileCritSect); |
| 637 | 635 |
| 638 if (_outputFileRecording && _outputFileRecorderPtr) | 636 if (_outputFileRecording && _outputFileRecorderPtr) |
| 639 { | 637 { |
| 640 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame); | 638 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame); |
| 641 } | 639 } |
| 642 } | 640 } |
| 643 | 641 |
| 644 // Measure audio level (0-9) | 642 // Measure audio level (0-9) |
| 645 _outputAudioLevel.ComputeLevel(*audioFrame); | 643 _outputAudioLevel.ComputeLevel(*audioFrame); |
| 646 | 644 |
| 647 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) { | 645 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) { |
| 648 // The first frame with a valid rtp timestamp. | 646 // The first frame with a valid rtp timestamp. |
| 649 capture_start_rtp_time_stamp_ = audioFrame->timestamp_; | 647 capture_start_rtp_time_stamp_ = audioFrame->timestamp_; |
| 650 } | 648 } |
| 651 | 649 |
| 652 if (capture_start_rtp_time_stamp_ >= 0) { | 650 if (capture_start_rtp_time_stamp_ >= 0) { |
| 653 // audioFrame.timestamp_ should be valid from now on. | 651 // audioFrame.timestamp_ should be valid from now on. |
| 654 | 652 |
| 655 // Compute elapsed time. | 653 // Compute elapsed time. |
| 656 int64_t unwrap_timestamp = | 654 int64_t unwrap_timestamp = |
| 657 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_); | 655 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_); |
| 658 audioFrame->elapsed_time_ms_ = | 656 audioFrame->elapsed_time_ms_ = |
| 659 (unwrap_timestamp - capture_start_rtp_time_stamp_) / | 657 (unwrap_timestamp - capture_start_rtp_time_stamp_) / |
| 660 (GetPlayoutFrequency() / 1000); | 658 (GetPlayoutFrequency() / 1000); |
| 661 | 659 |
| 662 { | 660 { |
| 663 CriticalSectionScoped lock(ts_stats_lock_.get()); | 661 rtc::CritScope lock(&ts_stats_lock_); |
| 664 // Compute ntp time. | 662 // Compute ntp time. |
| 665 audioFrame->ntp_time_ms_ = ntp_estimator_.Estimate( | 663 audioFrame->ntp_time_ms_ = ntp_estimator_.Estimate( |
| 666 audioFrame->timestamp_); | 664 audioFrame->timestamp_); |
| 667 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received. | 665 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received. |
| 668 if (audioFrame->ntp_time_ms_ > 0) { | 666 if (audioFrame->ntp_time_ms_ > 0) { |
| 669 // Compute |capture_start_ntp_time_ms_| so that | 667 // Compute |capture_start_ntp_time_ms_| so that |
| 670 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_| | 668 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_| |
| 671 capture_start_ntp_time_ms_ = | 669 capture_start_ntp_time_ms_ = |
| 672 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_; | 670 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_; |
| 673 } | 671 } |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 697 { | 695 { |
| 698 highestNeeded = receiveFrequency; | 696 highestNeeded = receiveFrequency; |
| 699 } | 697 } |
| 700 | 698 |
| 701 // Special case, if we're playing a file on the playout side | 699 // Special case, if we're playing a file on the playout side |
| 702 // we take that frequency into consideration as well | 700 // we take that frequency into consideration as well |
| 703 // This is not needed on sending side, since the codec will | 701 // This is not needed on sending side, since the codec will |
| 704 // limit the spectrum anyway. | 702 // limit the spectrum anyway. |
| 705 if (channel_state_.Get().output_file_playing) | 703 if (channel_state_.Get().output_file_playing) |
| 706 { | 704 { |
| 707 CriticalSectionScoped cs(&_fileCritSect); | 705 rtc::CritScope cs(&_fileCritSect); |
| 708 if (_outputFilePlayerPtr) | 706 if (_outputFilePlayerPtr) |
| 709 { | 707 { |
| 710 if(_outputFilePlayerPtr->Frequency()>highestNeeded) | 708 if(_outputFilePlayerPtr->Frequency()>highestNeeded) |
| 711 { | 709 { |
| 712 highestNeeded=_outputFilePlayerPtr->Frequency(); | 710 highestNeeded=_outputFilePlayerPtr->Frequency(); |
| 713 } | 711 } |
| 714 } | 712 } |
| 715 } | 713 } |
| 716 | 714 |
| 717 return(highestNeeded); | 715 return(highestNeeded); |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 783 } | 781 } |
| 784 | 782 |
| 785 void | 783 void |
| 786 Channel::RecordFileEnded(int32_t id) | 784 Channel::RecordFileEnded(int32_t id) |
| 787 { | 785 { |
| 788 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), | 786 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), |
| 789 "Channel::RecordFileEnded(id=%d)", id); | 787 "Channel::RecordFileEnded(id=%d)", id); |
| 790 | 788 |
| 791 assert(id == _outputFileRecorderId); | 789 assert(id == _outputFileRecorderId); |
| 792 | 790 |
| 793 CriticalSectionScoped cs(&_fileCritSect); | 791 rtc::CritScope cs(&_fileCritSect); |
| 794 | 792 |
| 795 _outputFileRecording = false; | 793 _outputFileRecording = false; |
| 796 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, | 794 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, |
| 797 VoEId(_instanceId,_channelId), | 795 VoEId(_instanceId,_channelId), |
| 798 "Channel::RecordFileEnded() => output file recorder module is" | 796 "Channel::RecordFileEnded() => output file recorder module is" |
| 799 " shutdown"); | 797 " shutdown"); |
| 800 } | 798 } |
| 801 | 799 |
| 802 Channel::Channel(int32_t channelId, | 800 Channel::Channel(int32_t channelId, |
| 803 uint32_t instanceId, | 801 uint32_t instanceId, |
| 804 RtcEventLog* const event_log, | 802 RtcEventLog* const event_log, |
| 805 const Config& config) | 803 const Config& config) |
| 806 : _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()), | 804 : _instanceId(instanceId), |
| 807 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()), | |
| 808 volume_settings_critsect_( | |
| 809 *CriticalSectionWrapper::CreateCriticalSection()), | |
| 810 _instanceId(instanceId), | |
| 811 _channelId(channelId), | 805 _channelId(channelId), |
| 812 event_log_(event_log), | 806 event_log_(event_log), |
| 813 rtp_header_parser_(RtpHeaderParser::Create()), | 807 rtp_header_parser_(RtpHeaderParser::Create()), |
| 814 rtp_payload_registry_( | 808 rtp_payload_registry_( |
| 815 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))), | 809 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))), |
| 816 rtp_receive_statistics_( | 810 rtp_receive_statistics_( |
| 817 ReceiveStatistics::Create(Clock::GetRealTimeClock())), | 811 ReceiveStatistics::Create(Clock::GetRealTimeClock())), |
| 818 rtp_receiver_( | 812 rtp_receiver_( |
| 819 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(), | 813 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(), |
| 820 this, | 814 this, |
| (...skipping 20 matching lines...) Expand all Loading... | |
| 841 _timeStamp(0), // This is just an offset, RTP module will add it's own | 835 _timeStamp(0), // This is just an offset, RTP module will add it's own |
| 842 // random offset | 836 // random offset |
| 843 _sendTelephoneEventPayloadType(106), | 837 _sendTelephoneEventPayloadType(106), |
| 844 ntp_estimator_(Clock::GetRealTimeClock()), | 838 ntp_estimator_(Clock::GetRealTimeClock()), |
| 845 jitter_buffer_playout_timestamp_(0), | 839 jitter_buffer_playout_timestamp_(0), |
| 846 playout_timestamp_rtp_(0), | 840 playout_timestamp_rtp_(0), |
| 847 playout_timestamp_rtcp_(0), | 841 playout_timestamp_rtcp_(0), |
| 848 playout_delay_ms_(0), | 842 playout_delay_ms_(0), |
| 849 _numberOfDiscardedPackets(0), | 843 _numberOfDiscardedPackets(0), |
| 850 send_sequence_number_(0), | 844 send_sequence_number_(0), |
| 851 ts_stats_lock_(CriticalSectionWrapper::CreateCriticalSection()), | |
| 852 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()), | 845 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()), |
| 853 capture_start_rtp_time_stamp_(-1), | 846 capture_start_rtp_time_stamp_(-1), |
| 854 capture_start_ntp_time_ms_(-1), | 847 capture_start_ntp_time_ms_(-1), |
| 855 _engineStatisticsPtr(NULL), | 848 _engineStatisticsPtr(NULL), |
| 856 _outputMixerPtr(NULL), | 849 _outputMixerPtr(NULL), |
| 857 _transmitMixerPtr(NULL), | 850 _transmitMixerPtr(NULL), |
| 858 _moduleProcessThreadPtr(NULL), | 851 _moduleProcessThreadPtr(NULL), |
| 859 _audioDeviceModulePtr(NULL), | 852 _audioDeviceModulePtr(NULL), |
| 860 _voiceEngineObserverPtr(NULL), | 853 _voiceEngineObserverPtr(NULL), |
| 861 _callbackCritSectPtr(NULL), | 854 _callbackCritSectPtr(NULL), |
| 862 _transportPtr(NULL), | 855 _transportPtr(NULL), |
| 863 _rxVadObserverPtr(NULL), | 856 _rxVadObserverPtr(NULL), |
| 864 _oldVadDecision(-1), | 857 _oldVadDecision(-1), |
| 865 _sendFrameType(0), | 858 _sendFrameType(0), |
| 866 _externalMixing(false), | 859 _externalMixing(false), |
| 867 _mixFileWithMicrophone(false), | 860 _mixFileWithMicrophone(false), |
| 868 _mute(false), | 861 _mute(false), |
| 869 _panLeft(1.0f), | 862 _panLeft(1.0f), |
| 870 _panRight(1.0f), | 863 _panRight(1.0f), |
| 871 _outputGain(1.0f), | 864 _outputGain(1.0f), |
| 872 _playOutbandDtmfEvent(false), | 865 _playOutbandDtmfEvent(false), |
| 873 _playInbandDtmfEvent(false), | 866 _playInbandDtmfEvent(false), |
| 874 _lastLocalTimeStamp(0), | 867 _lastLocalTimeStamp(0), |
| 875 _lastPayloadType(0), | 868 _lastPayloadType(0), |
| 876 _includeAudioLevelIndication(false), | 869 _includeAudioLevelIndication(false), |
| 877 _outputSpeechType(AudioFrame::kNormalSpeech), | 870 _outputSpeechType(AudioFrame::kNormalSpeech), |
| 878 video_sync_lock_(CriticalSectionWrapper::CreateCriticalSection()), | |
| 879 _average_jitter_buffer_delay_us(0), | 871 _average_jitter_buffer_delay_us(0), |
| 880 _previousTimestamp(0), | 872 _previousTimestamp(0), |
| 881 _recPacketDelayMs(20), | 873 _recPacketDelayMs(20), |
| 882 _RxVadDetection(false), | 874 _RxVadDetection(false), |
| 883 _rxAgcIsEnabled(false), | 875 _rxAgcIsEnabled(false), |
| 884 _rxNsIsEnabled(false), | 876 _rxNsIsEnabled(false), |
| 885 restored_packet_in_use_(false), | 877 restored_packet_in_use_(false), |
| 886 rtcp_observer_(new VoERtcpObserver(this)), | 878 rtcp_observer_(new VoERtcpObserver(this)), |
| 887 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())), | 879 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())), |
| 888 assoc_send_channel_lock_(CriticalSectionWrapper::CreateCriticalSection()), | |
| 889 associate_send_channel_(ChannelOwner(nullptr)), | 880 associate_send_channel_(ChannelOwner(nullptr)), |
| 890 pacing_enabled_(config.Get<VoicePacing>().enabled), | 881 pacing_enabled_(config.Get<VoicePacing>().enabled), |
| 891 feedback_observer_proxy_(pacing_enabled_ ? new TransportFeedbackProxy() | 882 feedback_observer_proxy_(pacing_enabled_ ? new TransportFeedbackProxy() |
| 892 : nullptr), | 883 : nullptr), |
| 893 seq_num_allocator_proxy_( | 884 seq_num_allocator_proxy_( |
| 894 pacing_enabled_ ? new TransportSequenceNumberProxy() : nullptr), | 885 pacing_enabled_ ? new TransportSequenceNumberProxy() : nullptr), |
| 895 rtp_packet_sender_proxy_(pacing_enabled_ ? new RtpPacketSenderProxy() | 886 rtp_packet_sender_proxy_(pacing_enabled_ ? new RtpPacketSenderProxy() |
| 896 : nullptr) { | 887 : nullptr) { |
| 897 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId), | 888 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId), |
| 898 "Channel::Channel() - ctor"); | 889 "Channel::Channel() - ctor"); |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 945 DeRegisterExternalMediaProcessing(kPlaybackPerChannel); | 936 DeRegisterExternalMediaProcessing(kPlaybackPerChannel); |
| 946 } | 937 } |
| 947 if (channel_state_.Get().input_external_media) | 938 if (channel_state_.Get().input_external_media) |
| 948 { | 939 { |
| 949 DeRegisterExternalMediaProcessing(kRecordingPerChannel); | 940 DeRegisterExternalMediaProcessing(kRecordingPerChannel); |
| 950 } | 941 } |
| 951 StopSend(); | 942 StopSend(); |
| 952 StopPlayout(); | 943 StopPlayout(); |
| 953 | 944 |
| 954 { | 945 { |
| 955 CriticalSectionScoped cs(&_fileCritSect); | 946 rtc::CritScope cs(&_fileCritSect); |
| 956 if (_inputFilePlayerPtr) | 947 if (_inputFilePlayerPtr) |
| 957 { | 948 { |
| 958 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL); | 949 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL); |
| 959 _inputFilePlayerPtr->StopPlayingFile(); | 950 _inputFilePlayerPtr->StopPlayingFile(); |
| 960 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr); | 951 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr); |
| 961 _inputFilePlayerPtr = NULL; | 952 _inputFilePlayerPtr = NULL; |
| 962 } | 953 } |
| 963 if (_outputFilePlayerPtr) | 954 if (_outputFilePlayerPtr) |
| 964 { | 955 { |
| 965 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); | 956 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); |
| (...skipping 25 matching lines...) Expand all Loading... | |
| 991 { | 982 { |
| 992 WEBRTC_TRACE(kTraceWarning, kTraceVoice, | 983 WEBRTC_TRACE(kTraceWarning, kTraceVoice, |
| 993 VoEId(_instanceId,_channelId), | 984 VoEId(_instanceId,_channelId), |
| 994 "~Channel() failed to de-register VAD callback" | 985 "~Channel() failed to de-register VAD callback" |
| 995 " (Audio coding module)"); | 986 " (Audio coding module)"); |
| 996 } | 987 } |
| 997 // De-register modules in process thread | 988 // De-register modules in process thread |
| 998 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()); | 989 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()); |
| 999 | 990 |
| 1000 // End of modules shutdown | 991 // End of modules shutdown |
| 1001 | |
| 1002 // Delete other objects | |
| 1003 delete &_callbackCritSect; | |
| 1004 delete &_fileCritSect; | |
| 1005 delete &volume_settings_critsect_; | |
| 1006 } | 992 } |
| 1007 | 993 |
| 1008 int32_t | 994 int32_t |
| 1009 Channel::Init() | 995 Channel::Init() |
| 1010 { | 996 { |
| 1011 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 997 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1012 "Channel::Init()"); | 998 "Channel::Init()"); |
| 1013 | 999 |
| 1014 channel_state_.Reset(); | 1000 channel_state_.Reset(); |
| 1015 | 1001 |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1156 return 0; | 1142 return 0; |
| 1157 } | 1143 } |
| 1158 | 1144 |
| 1159 int32_t | 1145 int32_t |
| 1160 Channel::SetEngineInformation(Statistics& engineStatistics, | 1146 Channel::SetEngineInformation(Statistics& engineStatistics, |
| 1161 OutputMixer& outputMixer, | 1147 OutputMixer& outputMixer, |
| 1162 voe::TransmitMixer& transmitMixer, | 1148 voe::TransmitMixer& transmitMixer, |
| 1163 ProcessThread& moduleProcessThread, | 1149 ProcessThread& moduleProcessThread, |
| 1164 AudioDeviceModule& audioDeviceModule, | 1150 AudioDeviceModule& audioDeviceModule, |
| 1165 VoiceEngineObserver* voiceEngineObserver, | 1151 VoiceEngineObserver* voiceEngineObserver, |
| 1166 CriticalSectionWrapper* callbackCritSect) | 1152 rtc::CriticalSection* callbackCritSect) |
| 1167 { | 1153 { |
| 1168 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1154 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1169 "Channel::SetEngineInformation()"); | 1155 "Channel::SetEngineInformation()"); |
| 1170 _engineStatisticsPtr = &engineStatistics; | 1156 _engineStatisticsPtr = &engineStatistics; |
| 1171 _outputMixerPtr = &outputMixer; | 1157 _outputMixerPtr = &outputMixer; |
| 1172 _transmitMixerPtr = &transmitMixer, | 1158 _transmitMixerPtr = &transmitMixer, |
| 1173 _moduleProcessThreadPtr = &moduleProcessThread; | 1159 _moduleProcessThreadPtr = &moduleProcessThread; |
| 1174 _audioDeviceModulePtr = &audioDeviceModule; | 1160 _audioDeviceModulePtr = &audioDeviceModule; |
| 1175 _voiceEngineObserverPtr = voiceEngineObserver; | 1161 _voiceEngineObserverPtr = voiceEngineObserver; |
| 1176 _callbackCritSectPtr = callbackCritSect; | 1162 _callbackCritSectPtr = callbackCritSect; |
| 1177 return 0; | 1163 return 0; |
| 1178 } | 1164 } |
| 1179 | 1165 |
| 1180 int32_t | 1166 int32_t |
| 1181 Channel::UpdateLocalTimeStamp() | 1167 Channel::UpdateLocalTimeStamp() |
| 1182 { | 1168 { |
| 1183 | 1169 |
| 1184 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_); | 1170 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_); |
| 1185 return 0; | 1171 return 0; |
| 1186 } | 1172 } |
| 1187 | 1173 |
| 1188 void Channel::SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) { | 1174 void Channel::SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) { |
| 1189 CriticalSectionScoped cs(&_callbackCritSect); | 1175 rtc::CritScope cs(&_callbackCritSect); |
| 1190 audio_sink_ = std::move(sink); | 1176 audio_sink_ = std::move(sink); |
| 1191 } | 1177 } |
| 1192 | 1178 |
| 1193 int32_t | 1179 int32_t |
| 1194 Channel::StartPlayout() | 1180 Channel::StartPlayout() |
| 1195 { | 1181 { |
| 1196 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1182 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1197 "Channel::StartPlayout()"); | 1183 "Channel::StartPlayout()"); |
| 1198 if (channel_state_.Get().playing) | 1184 if (channel_state_.Get().playing) |
| 1199 { | 1185 { |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1259 { | 1245 { |
| 1260 return 0; | 1246 return 0; |
| 1261 } | 1247 } |
| 1262 channel_state_.SetSending(true); | 1248 channel_state_.SetSending(true); |
| 1263 | 1249 |
| 1264 if (_rtpRtcpModule->SetSendingStatus(true) != 0) | 1250 if (_rtpRtcpModule->SetSendingStatus(true) != 0) |
| 1265 { | 1251 { |
| 1266 _engineStatisticsPtr->SetLastError( | 1252 _engineStatisticsPtr->SetLastError( |
| 1267 VE_RTP_RTCP_MODULE_ERROR, kTraceError, | 1253 VE_RTP_RTCP_MODULE_ERROR, kTraceError, |
| 1268 "StartSend() RTP/RTCP failed to start sending"); | 1254 "StartSend() RTP/RTCP failed to start sending"); |
| 1269 CriticalSectionScoped cs(&_callbackCritSect); | 1255 rtc::CritScope cs(&_callbackCritSect); |
| 1270 channel_state_.SetSending(false); | 1256 channel_state_.SetSending(false); |
| 1271 return -1; | 1257 return -1; |
| 1272 } | 1258 } |
| 1273 | 1259 |
| 1274 return 0; | 1260 return 0; |
| 1275 } | 1261 } |
| 1276 | 1262 |
| 1277 int32_t | 1263 int32_t |
| 1278 Channel::StopSend() | 1264 Channel::StopSend() |
| 1279 { | 1265 { |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1331 | 1317 |
| 1332 channel_state_.SetReceiving(false); | 1318 channel_state_.SetReceiving(false); |
| 1333 return 0; | 1319 return 0; |
| 1334 } | 1320 } |
| 1335 | 1321 |
| 1336 int32_t | 1322 int32_t |
| 1337 Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) | 1323 Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) |
| 1338 { | 1324 { |
| 1339 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1325 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1340 "Channel::RegisterVoiceEngineObserver()"); | 1326 "Channel::RegisterVoiceEngineObserver()"); |
| 1341 CriticalSectionScoped cs(&_callbackCritSect); | 1327 rtc::CritScope cs(&_callbackCritSect); |
| 1342 | 1328 |
| 1343 if (_voiceEngineObserverPtr) | 1329 if (_voiceEngineObserverPtr) |
| 1344 { | 1330 { |
| 1345 _engineStatisticsPtr->SetLastError( | 1331 _engineStatisticsPtr->SetLastError( |
| 1346 VE_INVALID_OPERATION, kTraceError, | 1332 VE_INVALID_OPERATION, kTraceError, |
| 1347 "RegisterVoiceEngineObserver() observer already enabled"); | 1333 "RegisterVoiceEngineObserver() observer already enabled"); |
| 1348 return -1; | 1334 return -1; |
| 1349 } | 1335 } |
| 1350 _voiceEngineObserverPtr = &observer; | 1336 _voiceEngineObserverPtr = &observer; |
| 1351 return 0; | 1337 return 0; |
| 1352 } | 1338 } |
| 1353 | 1339 |
| 1354 int32_t | 1340 int32_t |
| 1355 Channel::DeRegisterVoiceEngineObserver() | 1341 Channel::DeRegisterVoiceEngineObserver() |
| 1356 { | 1342 { |
| 1357 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1343 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1358 "Channel::DeRegisterVoiceEngineObserver()"); | 1344 "Channel::DeRegisterVoiceEngineObserver()"); |
| 1359 CriticalSectionScoped cs(&_callbackCritSect); | 1345 rtc::CritScope cs(&_callbackCritSect); |
| 1360 | 1346 |
| 1361 if (!_voiceEngineObserverPtr) | 1347 if (!_voiceEngineObserverPtr) |
| 1362 { | 1348 { |
| 1363 _engineStatisticsPtr->SetLastError( | 1349 _engineStatisticsPtr->SetLastError( |
| 1364 VE_INVALID_OPERATION, kTraceWarning, | 1350 VE_INVALID_OPERATION, kTraceWarning, |
| 1365 "DeRegisterVoiceEngineObserver() observer already disabled"); | 1351 "DeRegisterVoiceEngineObserver() observer already disabled"); |
| 1366 return 0; | 1352 return 0; |
| 1367 } | 1353 } |
| 1368 _voiceEngineObserverPtr = NULL; | 1354 _voiceEngineObserverPtr = NULL; |
| 1369 return 0; | 1355 return 0; |
| (...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1656 return -1; | 1642 return -1; |
| 1657 } | 1643 } |
| 1658 return 0; | 1644 return 0; |
| 1659 } | 1645 } |
| 1660 | 1646 |
| 1661 int32_t Channel::RegisterExternalTransport(Transport& transport) | 1647 int32_t Channel::RegisterExternalTransport(Transport& transport) |
| 1662 { | 1648 { |
| 1663 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), | 1649 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), |
| 1664 "Channel::RegisterExternalTransport()"); | 1650 "Channel::RegisterExternalTransport()"); |
| 1665 | 1651 |
| 1666 CriticalSectionScoped cs(&_callbackCritSect); | 1652 rtc::CritScope cs(&_callbackCritSect); |
| 1667 | 1653 |
| 1668 if (_externalTransport) | 1654 if (_externalTransport) |
| 1669 { | 1655 { |
| 1670 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, | 1656 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, |
| 1671 kTraceError, | 1657 kTraceError, |
| 1672 "RegisterExternalTransport() external transport already enabled"); | 1658 "RegisterExternalTransport() external transport already enabled"); |
| 1673 return -1; | 1659 return -1; |
| 1674 } | 1660 } |
| 1675 _externalTransport = true; | 1661 _externalTransport = true; |
| 1676 _transportPtr = &transport; | 1662 _transportPtr = &transport; |
| 1677 return 0; | 1663 return 0; |
| 1678 } | 1664 } |
| 1679 | 1665 |
| 1680 int32_t | 1666 int32_t |
| 1681 Channel::DeRegisterExternalTransport() | 1667 Channel::DeRegisterExternalTransport() |
| 1682 { | 1668 { |
| 1683 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1669 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1684 "Channel::DeRegisterExternalTransport()"); | 1670 "Channel::DeRegisterExternalTransport()"); |
| 1685 | 1671 |
| 1686 CriticalSectionScoped cs(&_callbackCritSect); | 1672 rtc::CritScope cs(&_callbackCritSect); |
| 1687 | 1673 |
| 1688 if (!_transportPtr) | 1674 if (!_transportPtr) |
| 1689 { | 1675 { |
| 1690 _engineStatisticsPtr->SetLastError( | 1676 _engineStatisticsPtr->SetLastError( |
| 1691 VE_INVALID_OPERATION, kTraceWarning, | 1677 VE_INVALID_OPERATION, kTraceWarning, |
| 1692 "DeRegisterExternalTransport() external transport already " | 1678 "DeRegisterExternalTransport() external transport already " |
| 1693 "disabled"); | 1679 "disabled"); |
| 1694 return 0; | 1680 return 0; |
| 1695 } | 1681 } |
| 1696 _externalTransport = false; | 1682 _externalTransport = false; |
| (...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1820 uint32_t ntp_secs = 0; | 1806 uint32_t ntp_secs = 0; |
| 1821 uint32_t ntp_frac = 0; | 1807 uint32_t ntp_frac = 0; |
| 1822 uint32_t rtp_timestamp = 0; | 1808 uint32_t rtp_timestamp = 0; |
| 1823 if (0 != _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, | 1809 if (0 != _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, |
| 1824 &rtp_timestamp)) { | 1810 &rtp_timestamp)) { |
| 1825 // Waiting for RTCP. | 1811 // Waiting for RTCP. |
| 1826 return 0; | 1812 return 0; |
| 1827 } | 1813 } |
| 1828 | 1814 |
| 1829 { | 1815 { |
| 1830 CriticalSectionScoped lock(ts_stats_lock_.get()); | 1816 rtc::CritScope lock(&ts_stats_lock_); |
| 1831 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); | 1817 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); |
| 1832 } | 1818 } |
| 1833 return 0; | 1819 return 0; |
| 1834 } | 1820 } |
| 1835 | 1821 |
| 1836 int Channel::StartPlayingFileLocally(const char* fileName, | 1822 int Channel::StartPlayingFileLocally(const char* fileName, |
| 1837 bool loop, | 1823 bool loop, |
| 1838 FileFormats format, | 1824 FileFormats format, |
| 1839 int startPosition, | 1825 int startPosition, |
| 1840 float volumeScaling, | 1826 float volumeScaling, |
| 1841 int stopPosition, | 1827 int stopPosition, |
| 1842 const CodecInst* codecInst) | 1828 const CodecInst* codecInst) |
| 1843 { | 1829 { |
| 1844 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1830 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1845 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d," | 1831 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d," |
| 1846 " format=%d, volumeScaling=%5.3f, startPosition=%d, " | 1832 " format=%d, volumeScaling=%5.3f, startPosition=%d, " |
| 1847 "stopPosition=%d)", fileName, loop, format, volumeScaling, | 1833 "stopPosition=%d)", fileName, loop, format, volumeScaling, |
| 1848 startPosition, stopPosition); | 1834 startPosition, stopPosition); |
| 1849 | 1835 |
| 1850 if (channel_state_.Get().output_file_playing) | 1836 if (channel_state_.Get().output_file_playing) |
| 1851 { | 1837 { |
| 1852 _engineStatisticsPtr->SetLastError( | 1838 _engineStatisticsPtr->SetLastError( |
| 1853 VE_ALREADY_PLAYING, kTraceError, | 1839 VE_ALREADY_PLAYING, kTraceError, |
| 1854 "StartPlayingFileLocally() is already playing"); | 1840 "StartPlayingFileLocally() is already playing"); |
| 1855 return -1; | 1841 return -1; |
| 1856 } | 1842 } |
| 1857 | 1843 |
| 1858 { | 1844 { |
| 1859 CriticalSectionScoped cs(&_fileCritSect); | 1845 rtc::CritScope cs(&_fileCritSect); |
| 1860 | 1846 |
| 1861 if (_outputFilePlayerPtr) | 1847 if (_outputFilePlayerPtr) |
| 1862 { | 1848 { |
| 1863 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); | 1849 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); |
| 1864 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); | 1850 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); |
| 1865 _outputFilePlayerPtr = NULL; | 1851 _outputFilePlayerPtr = NULL; |
| 1866 } | 1852 } |
| 1867 | 1853 |
| 1868 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer( | 1854 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer( |
| 1869 _outputFilePlayerId, (const FileFormats)format); | 1855 _outputFilePlayerId, (const FileFormats)format); |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1928 | 1914 |
| 1929 if (channel_state_.Get().output_file_playing) | 1915 if (channel_state_.Get().output_file_playing) |
| 1930 { | 1916 { |
| 1931 _engineStatisticsPtr->SetLastError( | 1917 _engineStatisticsPtr->SetLastError( |
| 1932 VE_ALREADY_PLAYING, kTraceError, | 1918 VE_ALREADY_PLAYING, kTraceError, |
| 1933 "StartPlayingFileLocally() is already playing"); | 1919 "StartPlayingFileLocally() is already playing"); |
| 1934 return -1; | 1920 return -1; |
| 1935 } | 1921 } |
| 1936 | 1922 |
| 1937 { | 1923 { |
| 1938 CriticalSectionScoped cs(&_fileCritSect); | 1924 rtc::CritScope cs(&_fileCritSect); |
| 1939 | 1925 |
| 1940 // Destroy the old instance | 1926 // Destroy the old instance |
| 1941 if (_outputFilePlayerPtr) | 1927 if (_outputFilePlayerPtr) |
| 1942 { | 1928 { |
| 1943 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); | 1929 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); |
| 1944 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); | 1930 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); |
| 1945 _outputFilePlayerPtr = NULL; | 1931 _outputFilePlayerPtr = NULL; |
| 1946 } | 1932 } |
| 1947 | 1933 |
| 1948 // Create the instance | 1934 // Create the instance |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1987 { | 1973 { |
| 1988 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1974 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1989 "Channel::StopPlayingFileLocally()"); | 1975 "Channel::StopPlayingFileLocally()"); |
| 1990 | 1976 |
| 1991 if (!channel_state_.Get().output_file_playing) | 1977 if (!channel_state_.Get().output_file_playing) |
| 1992 { | 1978 { |
| 1993 return 0; | 1979 return 0; |
| 1994 } | 1980 } |
| 1995 | 1981 |
| 1996 { | 1982 { |
| 1997 CriticalSectionScoped cs(&_fileCritSect); | 1983 rtc::CritScope cs(&_fileCritSect); |
| 1998 | 1984 |
| 1999 if (_outputFilePlayerPtr->StopPlayingFile() != 0) | 1985 if (_outputFilePlayerPtr->StopPlayingFile() != 0) |
| 2000 { | 1986 { |
| 2001 _engineStatisticsPtr->SetLastError( | 1987 _engineStatisticsPtr->SetLastError( |
| 2002 VE_STOP_RECORDING_FAILED, kTraceError, | 1988 VE_STOP_RECORDING_FAILED, kTraceError, |
| 2003 "StopPlayingFile() could not stop playing"); | 1989 "StopPlayingFile() could not stop playing"); |
| 2004 return -1; | 1990 return -1; |
| 2005 } | 1991 } |
| 2006 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); | 1992 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); |
| 2007 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); | 1993 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2039 return 0; | 2025 return 0; |
| 2040 } | 2026 } |
| 2041 | 2027 |
| 2042 // |_fileCritSect| cannot be taken while calling | 2028 // |_fileCritSect| cannot be taken while calling |
| 2043 // SetAnonymousMixabilityStatus() since as soon as the participant is added | 2029 // SetAnonymousMixabilityStatus() since as soon as the participant is added |
| 2044 // frames can be pulled by the mixer. Since the frames are generated from | 2030 // frames can be pulled by the mixer. Since the frames are generated from |
| 2045 // the file, _fileCritSect will be taken. This would result in a deadlock. | 2031 // the file, _fileCritSect will be taken. This would result in a deadlock. |
| 2046 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) | 2032 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) |
| 2047 { | 2033 { |
| 2048 channel_state_.SetOutputFilePlaying(false); | 2034 channel_state_.SetOutputFilePlaying(false); |
| 2049 CriticalSectionScoped cs(&_fileCritSect); | 2035 rtc::CritScope cs(&_fileCritSect); |
| 2050 _engineStatisticsPtr->SetLastError( | 2036 _engineStatisticsPtr->SetLastError( |
| 2051 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError, | 2037 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError, |
| 2052 "StartPlayingFile() failed to add participant as file to mixer"); | 2038 "StartPlayingFile() failed to add participant as file to mixer"); |
| 2053 _outputFilePlayerPtr->StopPlayingFile(); | 2039 _outputFilePlayerPtr->StopPlayingFile(); |
| 2054 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); | 2040 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); |
| 2055 _outputFilePlayerPtr = NULL; | 2041 _outputFilePlayerPtr = NULL; |
| 2056 return -1; | 2042 return -1; |
| 2057 } | 2043 } |
| 2058 | 2044 |
| 2059 return 0; | 2045 return 0; |
| 2060 } | 2046 } |
| 2061 | 2047 |
| 2062 int Channel::StartPlayingFileAsMicrophone(const char* fileName, | 2048 int Channel::StartPlayingFileAsMicrophone(const char* fileName, |
| 2063 bool loop, | 2049 bool loop, |
| 2064 FileFormats format, | 2050 FileFormats format, |
| 2065 int startPosition, | 2051 int startPosition, |
| 2066 float volumeScaling, | 2052 float volumeScaling, |
| 2067 int stopPosition, | 2053 int stopPosition, |
| 2068 const CodecInst* codecInst) | 2054 const CodecInst* codecInst) |
| 2069 { | 2055 { |
| 2070 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2056 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2071 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, " | 2057 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, " |
| 2072 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, " | 2058 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, " |
| 2073 "stopPosition=%d)", fileName, loop, format, volumeScaling, | 2059 "stopPosition=%d)", fileName, loop, format, volumeScaling, |
| 2074 startPosition, stopPosition); | 2060 startPosition, stopPosition); |
| 2075 | 2061 |
| 2076 CriticalSectionScoped cs(&_fileCritSect); | 2062 rtc::CritScope cs(&_fileCritSect); |
| 2077 | 2063 |
| 2078 if (channel_state_.Get().input_file_playing) | 2064 if (channel_state_.Get().input_file_playing) |
| 2079 { | 2065 { |
| 2080 _engineStatisticsPtr->SetLastError( | 2066 _engineStatisticsPtr->SetLastError( |
| 2081 VE_ALREADY_PLAYING, kTraceWarning, | 2067 VE_ALREADY_PLAYING, kTraceWarning, |
| 2082 "StartPlayingFileAsMicrophone() filePlayer is playing"); | 2068 "StartPlayingFileAsMicrophone() filePlayer is playing"); |
| 2083 return 0; | 2069 return 0; |
| 2084 } | 2070 } |
| 2085 | 2071 |
| 2086 // Destroy the old instance | 2072 // Destroy the old instance |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2141 format, volumeScaling, startPosition, stopPosition); | 2127 format, volumeScaling, startPosition, stopPosition); |
| 2142 | 2128 |
| 2143 if(stream == NULL) | 2129 if(stream == NULL) |
| 2144 { | 2130 { |
| 2145 _engineStatisticsPtr->SetLastError( | 2131 _engineStatisticsPtr->SetLastError( |
| 2146 VE_BAD_FILE, kTraceError, | 2132 VE_BAD_FILE, kTraceError, |
| 2147 "StartPlayingFileAsMicrophone NULL as input stream"); | 2133 "StartPlayingFileAsMicrophone NULL as input stream"); |
| 2148 return -1; | 2134 return -1; |
| 2149 } | 2135 } |
| 2150 | 2136 |
| 2151 CriticalSectionScoped cs(&_fileCritSect); | 2137 rtc::CritScope cs(&_fileCritSect); |
| 2152 | 2138 |
| 2153 if (channel_state_.Get().input_file_playing) | 2139 if (channel_state_.Get().input_file_playing) |
| 2154 { | 2140 { |
| 2155 _engineStatisticsPtr->SetLastError( | 2141 _engineStatisticsPtr->SetLastError( |
| 2156 VE_ALREADY_PLAYING, kTraceWarning, | 2142 VE_ALREADY_PLAYING, kTraceWarning, |
| 2157 "StartPlayingFileAsMicrophone() is playing"); | 2143 "StartPlayingFileAsMicrophone() is playing"); |
| 2158 return 0; | 2144 return 0; |
| 2159 } | 2145 } |
| 2160 | 2146 |
| 2161 // Destroy the old instance | 2147 // Destroy the old instance |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2197 channel_state_.SetInputFilePlaying(true); | 2183 channel_state_.SetInputFilePlaying(true); |
| 2198 | 2184 |
| 2199 return 0; | 2185 return 0; |
| 2200 } | 2186 } |
| 2201 | 2187 |
| 2202 int Channel::StopPlayingFileAsMicrophone() | 2188 int Channel::StopPlayingFileAsMicrophone() |
| 2203 { | 2189 { |
| 2204 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2190 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2205 "Channel::StopPlayingFileAsMicrophone()"); | 2191 "Channel::StopPlayingFileAsMicrophone()"); |
| 2206 | 2192 |
| 2207 CriticalSectionScoped cs(&_fileCritSect); | 2193 rtc::CritScope cs(&_fileCritSect); |
| 2208 | 2194 |
| 2209 if (!channel_state_.Get().input_file_playing) | 2195 if (!channel_state_.Get().input_file_playing) |
| 2210 { | 2196 { |
| 2211 return 0; | 2197 return 0; |
| 2212 } | 2198 } |
| 2213 | 2199 |
| 2214 if (_inputFilePlayerPtr->StopPlayingFile() != 0) | 2200 if (_inputFilePlayerPtr->StopPlayingFile() != 0) |
| 2215 { | 2201 { |
| 2216 _engineStatisticsPtr->SetLastError( | 2202 _engineStatisticsPtr->SetLastError( |
| 2217 VE_STOP_RECORDING_FAILED, kTraceError, | 2203 VE_STOP_RECORDING_FAILED, kTraceError, |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2265 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) || | 2251 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) || |
| 2266 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0)) | 2252 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0)) |
| 2267 { | 2253 { |
| 2268 format = kFileFormatWavFile; | 2254 format = kFileFormatWavFile; |
| 2269 } | 2255 } |
| 2270 else | 2256 else |
| 2271 { | 2257 { |
| 2272 format = kFileFormatCompressedFile; | 2258 format = kFileFormatCompressedFile; |
| 2273 } | 2259 } |
| 2274 | 2260 |
| 2275 CriticalSectionScoped cs(&_fileCritSect); | 2261 rtc::CritScope cs(&_fileCritSect); |
| 2276 | 2262 |
| 2277 // Destroy the old instance | 2263 // Destroy the old instance |
| 2278 if (_outputFileRecorderPtr) | 2264 if (_outputFileRecorderPtr) |
| 2279 { | 2265 { |
| 2280 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | 2266 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); |
| 2281 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | 2267 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); |
| 2282 _outputFileRecorderPtr = NULL; | 2268 _outputFileRecorderPtr = NULL; |
| 2283 } | 2269 } |
| 2284 | 2270 |
| 2285 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder( | 2271 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder( |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2342 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) || | 2328 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) || |
| 2343 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0)) | 2329 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0)) |
| 2344 { | 2330 { |
| 2345 format = kFileFormatWavFile; | 2331 format = kFileFormatWavFile; |
| 2346 } | 2332 } |
| 2347 else | 2333 else |
| 2348 { | 2334 { |
| 2349 format = kFileFormatCompressedFile; | 2335 format = kFileFormatCompressedFile; |
| 2350 } | 2336 } |
| 2351 | 2337 |
| 2352 CriticalSectionScoped cs(&_fileCritSect); | 2338 rtc::CritScope cs(&_fileCritSect); |
| 2353 | 2339 |
| 2354 // Destroy the old instance | 2340 // Destroy the old instance |
| 2355 if (_outputFileRecorderPtr) | 2341 if (_outputFileRecorderPtr) |
| 2356 { | 2342 { |
| 2357 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | 2343 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); |
| 2358 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | 2344 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); |
| 2359 _outputFileRecorderPtr = NULL; | 2345 _outputFileRecorderPtr = NULL; |
| 2360 } | 2346 } |
| 2361 | 2347 |
| 2362 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder( | 2348 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder( |
| (...skipping 30 matching lines...) Expand all Loading... | |
| 2393 "Channel::StopRecordingPlayout()"); | 2379 "Channel::StopRecordingPlayout()"); |
| 2394 | 2380 |
| 2395 if (!_outputFileRecording) | 2381 if (!_outputFileRecording) |
| 2396 { | 2382 { |
| 2397 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1), | 2383 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1), |
| 2398 "StopRecordingPlayout() isnot recording"); | 2384 "StopRecordingPlayout() isnot recording"); |
| 2399 return -1; | 2385 return -1; |
| 2400 } | 2386 } |
| 2401 | 2387 |
| 2402 | 2388 |
| 2403 CriticalSectionScoped cs(&_fileCritSect); | 2389 rtc::CritScope cs(&_fileCritSect); |
| 2404 | 2390 |
| 2405 if (_outputFileRecorderPtr->StopRecording() != 0) | 2391 if (_outputFileRecorderPtr->StopRecording() != 0) |
| 2406 { | 2392 { |
| 2407 _engineStatisticsPtr->SetLastError( | 2393 _engineStatisticsPtr->SetLastError( |
| 2408 VE_STOP_RECORDING_FAILED, kTraceError, | 2394 VE_STOP_RECORDING_FAILED, kTraceError, |
| 2409 "StopRecording() could not stop recording"); | 2395 "StopRecording() could not stop recording"); |
| 2410 return(-1); | 2396 return(-1); |
| 2411 } | 2397 } |
| 2412 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | 2398 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); |
| 2413 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | 2399 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); |
| 2414 _outputFileRecorderPtr = NULL; | 2400 _outputFileRecorderPtr = NULL; |
| 2415 _outputFileRecording = false; | 2401 _outputFileRecording = false; |
| 2416 | 2402 |
| 2417 return 0; | 2403 return 0; |
| 2418 } | 2404 } |
| 2419 | 2405 |
| 2420 void | 2406 void |
| 2421 Channel::SetMixWithMicStatus(bool mix) | 2407 Channel::SetMixWithMicStatus(bool mix) |
| 2422 { | 2408 { |
| 2423 CriticalSectionScoped cs(&_fileCritSect); | 2409 rtc::CritScope cs(&_fileCritSect); |
| 2424 _mixFileWithMicrophone=mix; | 2410 _mixFileWithMicrophone=mix; |
| 2425 } | 2411 } |
| 2426 | 2412 |
| 2427 int | 2413 int |
| 2428 Channel::GetSpeechOutputLevel(uint32_t& level) const | 2414 Channel::GetSpeechOutputLevel(uint32_t& level) const |
| 2429 { | 2415 { |
| 2430 int8_t currentLevel = _outputAudioLevel.Level(); | 2416 int8_t currentLevel = _outputAudioLevel.Level(); |
| 2431 level = static_cast<int32_t> (currentLevel); | 2417 level = static_cast<int32_t> (currentLevel); |
| 2432 return 0; | 2418 return 0; |
| 2433 } | 2419 } |
| 2434 | 2420 |
| 2435 int | 2421 int |
| 2436 Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const | 2422 Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const |
| 2437 { | 2423 { |
| 2438 int16_t currentLevel = _outputAudioLevel.LevelFullRange(); | 2424 int16_t currentLevel = _outputAudioLevel.LevelFullRange(); |
| 2439 level = static_cast<int32_t> (currentLevel); | 2425 level = static_cast<int32_t> (currentLevel); |
| 2440 return 0; | 2426 return 0; |
| 2441 } | 2427 } |
| 2442 | 2428 |
| 2443 int | 2429 int |
| 2444 Channel::SetMute(bool enable) | 2430 Channel::SetMute(bool enable) |
| 2445 { | 2431 { |
| 2446 CriticalSectionScoped cs(&volume_settings_critsect_); | 2432 rtc::CritScope cs(&volume_settings_critsect_); |
| 2447 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2433 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2448 "Channel::SetMute(enable=%d)", enable); | 2434 "Channel::SetMute(enable=%d)", enable); |
| 2449 _mute = enable; | 2435 _mute = enable; |
| 2450 return 0; | 2436 return 0; |
| 2451 } | 2437 } |
| 2452 | 2438 |
| 2453 bool | 2439 bool |
| 2454 Channel::Mute() const | 2440 Channel::Mute() const |
| 2455 { | 2441 { |
| 2456 CriticalSectionScoped cs(&volume_settings_critsect_); | 2442 rtc::CritScope cs(&volume_settings_critsect_); |
| 2457 return _mute; | 2443 return _mute; |
| 2458 } | 2444 } |
| 2459 | 2445 |
| 2460 int | 2446 int |
| 2461 Channel::SetOutputVolumePan(float left, float right) | 2447 Channel::SetOutputVolumePan(float left, float right) |
| 2462 { | 2448 { |
| 2463 CriticalSectionScoped cs(&volume_settings_critsect_); | 2449 rtc::CritScope cs(&volume_settings_critsect_); |
| 2464 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2450 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2465 "Channel::SetOutputVolumePan()"); | 2451 "Channel::SetOutputVolumePan()"); |
| 2466 _panLeft = left; | 2452 _panLeft = left; |
| 2467 _panRight = right; | 2453 _panRight = right; |
| 2468 return 0; | 2454 return 0; |
| 2469 } | 2455 } |
| 2470 | 2456 |
| 2471 int | 2457 int |
| 2472 Channel::GetOutputVolumePan(float& left, float& right) const | 2458 Channel::GetOutputVolumePan(float& left, float& right) const |
| 2473 { | 2459 { |
| 2474 CriticalSectionScoped cs(&volume_settings_critsect_); | 2460 rtc::CritScope cs(&volume_settings_critsect_); |
| 2475 left = _panLeft; | 2461 left = _panLeft; |
| 2476 right = _panRight; | 2462 right = _panRight; |
| 2477 return 0; | 2463 return 0; |
| 2478 } | 2464 } |
| 2479 | 2465 |
| 2480 int | 2466 int |
| 2481 Channel::SetChannelOutputVolumeScaling(float scaling) | 2467 Channel::SetChannelOutputVolumeScaling(float scaling) |
| 2482 { | 2468 { |
| 2483 CriticalSectionScoped cs(&volume_settings_critsect_); | 2469 rtc::CritScope cs(&volume_settings_critsect_); |
| 2484 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2470 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2485 "Channel::SetChannelOutputVolumeScaling()"); | 2471 "Channel::SetChannelOutputVolumeScaling()"); |
| 2486 _outputGain = scaling; | 2472 _outputGain = scaling; |
| 2487 return 0; | 2473 return 0; |
| 2488 } | 2474 } |
| 2489 | 2475 |
| 2490 int | 2476 int |
| 2491 Channel::GetChannelOutputVolumeScaling(float& scaling) const | 2477 Channel::GetChannelOutputVolumeScaling(float& scaling) const |
| 2492 { | 2478 { |
| 2493 CriticalSectionScoped cs(&volume_settings_critsect_); | 2479 rtc::CritScope cs(&volume_settings_critsect_); |
| 2494 scaling = _outputGain; | 2480 scaling = _outputGain; |
| 2495 return 0; | 2481 return 0; |
| 2496 } | 2482 } |
| 2497 | 2483 |
| 2498 int Channel::SendTelephoneEventOutband(unsigned char eventCode, | 2484 int Channel::SendTelephoneEventOutband(unsigned char eventCode, |
| 2499 int lengthMs, int attenuationDb, | 2485 int lengthMs, int attenuationDb, |
| 2500 bool playDtmfEvent) | 2486 bool playDtmfEvent) |
| 2501 { | 2487 { |
| 2502 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), | 2488 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), |
| 2503 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)", | 2489 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)", |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2593 "Channel::UpdateRxVadDetection() => vadDecision=%d", | 2579 "Channel::UpdateRxVadDetection() => vadDecision=%d", |
| 2594 vadDecision); | 2580 vadDecision); |
| 2595 return 0; | 2581 return 0; |
| 2596 } | 2582 } |
| 2597 | 2583 |
| 2598 int | 2584 int |
| 2599 Channel::RegisterRxVadObserver(VoERxVadCallback &observer) | 2585 Channel::RegisterRxVadObserver(VoERxVadCallback &observer) |
| 2600 { | 2586 { |
| 2601 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2587 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2602 "Channel::RegisterRxVadObserver()"); | 2588 "Channel::RegisterRxVadObserver()"); |
| 2603 CriticalSectionScoped cs(&_callbackCritSect); | 2589 rtc::CritScope cs(&_callbackCritSect); |
| 2604 | 2590 |
| 2605 if (_rxVadObserverPtr) | 2591 if (_rxVadObserverPtr) |
| 2606 { | 2592 { |
| 2607 _engineStatisticsPtr->SetLastError( | 2593 _engineStatisticsPtr->SetLastError( |
| 2608 VE_INVALID_OPERATION, kTraceError, | 2594 VE_INVALID_OPERATION, kTraceError, |
| 2609 "RegisterRxVadObserver() observer already enabled"); | 2595 "RegisterRxVadObserver() observer already enabled"); |
| 2610 return -1; | 2596 return -1; |
| 2611 } | 2597 } |
| 2612 _rxVadObserverPtr = &observer; | 2598 _rxVadObserverPtr = &observer; |
| 2613 _RxVadDetection = true; | 2599 _RxVadDetection = true; |
| 2614 return 0; | 2600 return 0; |
| 2615 } | 2601 } |
| 2616 | 2602 |
| 2617 int | 2603 int |
| 2618 Channel::DeRegisterRxVadObserver() | 2604 Channel::DeRegisterRxVadObserver() |
| 2619 { | 2605 { |
| 2620 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2606 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2621 "Channel::DeRegisterRxVadObserver()"); | 2607 "Channel::DeRegisterRxVadObserver()"); |
| 2622 CriticalSectionScoped cs(&_callbackCritSect); | 2608 rtc::CritScope cs(&_callbackCritSect); |
| 2623 | 2609 |
| 2624 if (!_rxVadObserverPtr) | 2610 if (!_rxVadObserverPtr) |
| 2625 { | 2611 { |
| 2626 _engineStatisticsPtr->SetLastError( | 2612 _engineStatisticsPtr->SetLastError( |
| 2627 VE_INVALID_OPERATION, kTraceWarning, | 2613 VE_INVALID_OPERATION, kTraceWarning, |
| 2628 "DeRegisterRxVadObserver() observer already disabled"); | 2614 "DeRegisterRxVadObserver() observer already disabled"); |
| 2629 return 0; | 2615 return 0; |
| 2630 } | 2616 } |
| 2631 _rxVadObserverPtr = NULL; | 2617 _rxVadObserverPtr = NULL; |
| 2632 _RxVadDetection = false; | 2618 _RxVadDetection = false; |
| (...skipping 611 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3244 " output will not be complete"); | 3230 " output will not be complete"); |
| 3245 } | 3231 } |
| 3246 | 3232 |
| 3247 stats.bytesSent = bytesSent; | 3233 stats.bytesSent = bytesSent; |
| 3248 stats.packetsSent = packetsSent; | 3234 stats.packetsSent = packetsSent; |
| 3249 stats.bytesReceived = bytesReceived; | 3235 stats.bytesReceived = bytesReceived; |
| 3250 stats.packetsReceived = packetsReceived; | 3236 stats.packetsReceived = packetsReceived; |
| 3251 | 3237 |
| 3252 // --- Timestamps | 3238 // --- Timestamps |
| 3253 { | 3239 { |
| 3254 CriticalSectionScoped lock(ts_stats_lock_.get()); | 3240 rtc::CritScope lock(&ts_stats_lock_); |
| 3255 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_; | 3241 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_; |
| 3256 } | 3242 } |
| 3257 return 0; | 3243 return 0; |
| 3258 } | 3244 } |
| 3259 | 3245 |
| 3260 int Channel::SetREDStatus(bool enable, int redPayloadtype) { | 3246 int Channel::SetREDStatus(bool enable, int redPayloadtype) { |
| 3261 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), | 3247 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), |
| 3262 "Channel::SetREDStatus()"); | 3248 "Channel::SetREDStatus()"); |
| 3263 | 3249 |
| 3264 if (enable) { | 3250 if (enable) { |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3385 MixOrReplaceAudioWithFile(mixingFrequency); | 3371 MixOrReplaceAudioWithFile(mixingFrequency); |
| 3386 } | 3372 } |
| 3387 | 3373 |
| 3388 bool is_muted = Mute(); // Cache locally as Mute() takes a lock. | 3374 bool is_muted = Mute(); // Cache locally as Mute() takes a lock. |
| 3389 if (is_muted) { | 3375 if (is_muted) { |
| 3390 AudioFrameOperations::Mute(_audioFrame); | 3376 AudioFrameOperations::Mute(_audioFrame); |
| 3391 } | 3377 } |
| 3392 | 3378 |
| 3393 if (channel_state_.Get().input_external_media) | 3379 if (channel_state_.Get().input_external_media) |
| 3394 { | 3380 { |
| 3395 CriticalSectionScoped cs(&_callbackCritSect); | 3381 rtc::CritScope cs(&_callbackCritSect); |
| 3396 const bool isStereo = (_audioFrame.num_channels_ == 2); | 3382 const bool isStereo = (_audioFrame.num_channels_ == 2); |
| 3397 if (_inputExternalMediaCallbackPtr) | 3383 if (_inputExternalMediaCallbackPtr) |
| 3398 { | 3384 { |
| 3399 _inputExternalMediaCallbackPtr->Process( | 3385 _inputExternalMediaCallbackPtr->Process( |
| 3400 _channelId, | 3386 _channelId, |
| 3401 kRecordingPerChannel, | 3387 kRecordingPerChannel, |
| 3402 (int16_t*)_audioFrame.data_, | 3388 (int16_t*)_audioFrame.data_, |
| 3403 _audioFrame.samples_per_channel_, | 3389 _audioFrame.samples_per_channel_, |
| 3404 _audioFrame.sample_rate_hz_, | 3390 _audioFrame.sample_rate_hz_, |
| 3405 isStereo); | 3391 isStereo); |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3449 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId), | 3435 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId), |
| 3450 "Channel::EncodeAndSend() ACM encoding failed"); | 3436 "Channel::EncodeAndSend() ACM encoding failed"); |
| 3451 return 0xFFFFFFFF; | 3437 return 0xFFFFFFFF; |
| 3452 } | 3438 } |
| 3453 | 3439 |
| 3454 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_); | 3440 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_); |
| 3455 return 0; | 3441 return 0; |
| 3456 } | 3442 } |
| 3457 | 3443 |
| 3458 void Channel::DisassociateSendChannel(int channel_id) { | 3444 void Channel::DisassociateSendChannel(int channel_id) { |
| 3459 CriticalSectionScoped lock(assoc_send_channel_lock_.get()); | 3445 rtc::CritScope lock(&assoc_send_channel_lock_); |
| 3460 Channel* channel = associate_send_channel_.channel(); | 3446 Channel* channel = associate_send_channel_.channel(); |
| 3461 if (channel && channel->ChannelId() == channel_id) { | 3447 if (channel && channel->ChannelId() == channel_id) { |
| 3462 // If this channel is associated with a send channel of the specified | 3448 // If this channel is associated with a send channel of the specified |
| 3463 // Channel ID, disassociate with it. | 3449 // Channel ID, disassociate with it. |
| 3464 ChannelOwner ref(NULL); | 3450 ChannelOwner ref(NULL); |
| 3465 associate_send_channel_ = ref; | 3451 associate_send_channel_ = ref; |
| 3466 } | 3452 } |
| 3467 } | 3453 } |
| 3468 | 3454 |
| 3469 int Channel::RegisterExternalMediaProcessing( | 3455 int Channel::RegisterExternalMediaProcessing( |
| 3470 ProcessingTypes type, | 3456 ProcessingTypes type, |
| 3471 VoEMediaProcess& processObject) | 3457 VoEMediaProcess& processObject) |
| 3472 { | 3458 { |
| 3473 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 3459 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 3474 "Channel::RegisterExternalMediaProcessing()"); | 3460 "Channel::RegisterExternalMediaProcessing()"); |
| 3475 | 3461 |
| 3476 CriticalSectionScoped cs(&_callbackCritSect); | 3462 rtc::CritScope cs(&_callbackCritSect); |
| 3477 | 3463 |
| 3478 if (kPlaybackPerChannel == type) | 3464 if (kPlaybackPerChannel == type) |
| 3479 { | 3465 { |
| 3480 if (_outputExternalMediaCallbackPtr) | 3466 if (_outputExternalMediaCallbackPtr) |
| 3481 { | 3467 { |
| 3482 _engineStatisticsPtr->SetLastError( | 3468 _engineStatisticsPtr->SetLastError( |
| 3483 VE_INVALID_OPERATION, kTraceError, | 3469 VE_INVALID_OPERATION, kTraceError, |
| 3484 "Channel::RegisterExternalMediaProcessing() " | 3470 "Channel::RegisterExternalMediaProcessing() " |
| 3485 "output external media already enabled"); | 3471 "output external media already enabled"); |
| 3486 return -1; | 3472 return -1; |
| (...skipping 15 matching lines...) Expand all Loading... | |
| 3502 channel_state_.SetInputExternalMedia(true); | 3488 channel_state_.SetInputExternalMedia(true); |
| 3503 } | 3489 } |
| 3504 return 0; | 3490 return 0; |
| 3505 } | 3491 } |
| 3506 | 3492 |
| 3507 int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) | 3493 int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) |
| 3508 { | 3494 { |
| 3509 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 3495 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 3510 "Channel::DeRegisterExternalMediaProcessing()"); | 3496 "Channel::DeRegisterExternalMediaProcessing()"); |
| 3511 | 3497 |
| 3512 CriticalSectionScoped cs(&_callbackCritSect); | 3498 rtc::CritScope cs(&_callbackCritSect); |
| 3513 | 3499 |
| 3514 if (kPlaybackPerChannel == type) | 3500 if (kPlaybackPerChannel == type) |
| 3515 { | 3501 { |
| 3516 if (!_outputExternalMediaCallbackPtr) | 3502 if (!_outputExternalMediaCallbackPtr) |
| 3517 { | 3503 { |
| 3518 _engineStatisticsPtr->SetLastError( | 3504 _engineStatisticsPtr->SetLastError( |
| 3519 VE_INVALID_OPERATION, kTraceWarning, | 3505 VE_INVALID_OPERATION, kTraceWarning, |
| 3520 "Channel::DeRegisterExternalMediaProcessing() " | 3506 "Channel::DeRegisterExternalMediaProcessing() " |
| 3521 "output external media already disabled"); | 3507 "output external media already disabled"); |
| 3522 return 0; | 3508 return 0; |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3564 { | 3550 { |
| 3565 return audio_coding_->GetNetworkStatistics(&stats); | 3551 return audio_coding_->GetNetworkStatistics(&stats); |
| 3566 } | 3552 } |
| 3567 | 3553 |
| 3568 void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const { | 3554 void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const { |
| 3569 audio_coding_->GetDecodingCallStatistics(stats); | 3555 audio_coding_->GetDecodingCallStatistics(stats); |
| 3570 } | 3556 } |
| 3571 | 3557 |
| 3572 bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms, | 3558 bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms, |
| 3573 int* playout_buffer_delay_ms) const { | 3559 int* playout_buffer_delay_ms) const { |
| 3574 CriticalSectionScoped cs(video_sync_lock_.get()); | 3560 rtc::CritScope lock(&video_sync_lock_); |
| 3575 if (_average_jitter_buffer_delay_us == 0) { | 3561 if (_average_jitter_buffer_delay_us == 0) { |
| 3576 return false; | 3562 return false; |
| 3577 } | 3563 } |
| 3578 *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 + | 3564 *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 + |
| 3579 _recPacketDelayMs; | 3565 _recPacketDelayMs; |
| 3580 *playout_buffer_delay_ms = playout_delay_ms_; | 3566 *playout_buffer_delay_ms = playout_delay_ms_; |
| 3581 return true; | 3567 return true; |
| 3582 } | 3568 } |
| 3583 | 3569 |
| 3584 uint32_t Channel::GetDelayEstimate() const { | 3570 uint32_t Channel::GetDelayEstimate() const { |
| (...skipping 26 matching lines...) Expand all Loading... | |
| 3611 VE_AUDIO_CODING_MODULE_ERROR, kTraceError, | 3597 VE_AUDIO_CODING_MODULE_ERROR, kTraceError, |
| 3612 "SetMinimumPlayoutDelay() failed to set min playout delay"); | 3598 "SetMinimumPlayoutDelay() failed to set min playout delay"); |
| 3613 return -1; | 3599 return -1; |
| 3614 } | 3600 } |
| 3615 return 0; | 3601 return 0; |
| 3616 } | 3602 } |
| 3617 | 3603 |
| 3618 int Channel::GetPlayoutTimestamp(unsigned int& timestamp) { | 3604 int Channel::GetPlayoutTimestamp(unsigned int& timestamp) { |
| 3619 uint32_t playout_timestamp_rtp = 0; | 3605 uint32_t playout_timestamp_rtp = 0; |
| 3620 { | 3606 { |
| 3621 CriticalSectionScoped cs(video_sync_lock_.get()); | 3607 rtc::CritScope lock(&video_sync_lock_); |
| 3622 playout_timestamp_rtp = playout_timestamp_rtp_; | 3608 playout_timestamp_rtp = playout_timestamp_rtp_; |
| 3623 } | 3609 } |
| 3624 if (playout_timestamp_rtp == 0) { | 3610 if (playout_timestamp_rtp == 0) { |
| 3625 _engineStatisticsPtr->SetLastError( | 3611 _engineStatisticsPtr->SetLastError( |
| 3626 VE_CANNOT_RETRIEVE_VALUE, kTraceError, | 3612 VE_CANNOT_RETRIEVE_VALUE, kTraceError, |
| 3627 "GetPlayoutTimestamp() failed to retrieve timestamp"); | 3613 "GetPlayoutTimestamp() failed to retrieve timestamp"); |
| 3628 return -1; | 3614 return -1; |
| 3629 } | 3615 } |
| 3630 timestamp = playout_timestamp_rtp; | 3616 timestamp = playout_timestamp_rtp; |
| 3631 return 0; | 3617 return 0; |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3665 | 3651 |
| 3666 // TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use | 3652 // TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use |
| 3667 // a shared helper. | 3653 // a shared helper. |
| 3668 int32_t | 3654 int32_t |
| 3669 Channel::MixOrReplaceAudioWithFile(int mixingFrequency) | 3655 Channel::MixOrReplaceAudioWithFile(int mixingFrequency) |
| 3670 { | 3656 { |
| 3671 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]); | 3657 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]); |
| 3672 size_t fileSamples(0); | 3658 size_t fileSamples(0); |
| 3673 | 3659 |
| 3674 { | 3660 { |
| 3675 CriticalSectionScoped cs(&_fileCritSect); | 3661 rtc::CritScope cs(&_fileCritSect); |
| 3676 | 3662 |
| 3677 if (_inputFilePlayerPtr == NULL) | 3663 if (_inputFilePlayerPtr == NULL) |
| 3678 { | 3664 { |
| 3679 WEBRTC_TRACE(kTraceWarning, kTraceVoice, | 3665 WEBRTC_TRACE(kTraceWarning, kTraceVoice, |
| 3680 VoEId(_instanceId, _channelId), | 3666 VoEId(_instanceId, _channelId), |
| 3681 "Channel::MixOrReplaceAudioWithFile() fileplayer" | 3667 "Channel::MixOrReplaceAudioWithFile() fileplayer" |
| 3682 " doesnt exist"); | 3668 " doesnt exist"); |
| 3683 return -1; | 3669 return -1; |
| 3684 } | 3670 } |
| 3685 | 3671 |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3735 int32_t | 3721 int32_t |
| 3736 Channel::MixAudioWithFile(AudioFrame& audioFrame, | 3722 Channel::MixAudioWithFile(AudioFrame& audioFrame, |
| 3737 int mixingFrequency) | 3723 int mixingFrequency) |
| 3738 { | 3724 { |
| 3739 assert(mixingFrequency <= 48000); | 3725 assert(mixingFrequency <= 48000); |
| 3740 | 3726 |
| 3741 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[960]); | 3727 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[960]); |
| 3742 size_t fileSamples(0); | 3728 size_t fileSamples(0); |
| 3743 | 3729 |
| 3744 { | 3730 { |
| 3745 CriticalSectionScoped cs(&_fileCritSect); | 3731 rtc::CritScope cs(&_fileCritSect); |
| 3746 | 3732 |
| 3747 if (_outputFilePlayerPtr == NULL) | 3733 if (_outputFilePlayerPtr == NULL) |
| 3748 { | 3734 { |
| 3749 WEBRTC_TRACE(kTraceWarning, kTraceVoice, | 3735 WEBRTC_TRACE(kTraceWarning, kTraceVoice, |
| 3750 VoEId(_instanceId, _channelId), | 3736 VoEId(_instanceId, _channelId), |
| 3751 "Channel::MixAudioWithFile() file mixing failed"); | 3737 "Channel::MixAudioWithFile() file mixing failed"); |
| 3752 return -1; | 3738 return -1; |
| 3753 } | 3739 } |
| 3754 | 3740 |
| 3755 // We should get the frequency we ask for. | 3741 // We should get the frequency we ask for. |
| (...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3884 jitter_buffer_playout_timestamp_ = playout_timestamp; | 3870 jitter_buffer_playout_timestamp_ = playout_timestamp; |
| 3885 | 3871 |
| 3886 // Remove the playout delay. | 3872 // Remove the playout delay. |
| 3887 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000)); | 3873 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000)); |
| 3888 | 3874 |
| 3889 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), | 3875 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), |
| 3890 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu", | 3876 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu", |
| 3891 playout_timestamp); | 3877 playout_timestamp); |
| 3892 | 3878 |
| 3893 { | 3879 { |
| 3894 CriticalSectionScoped cs(video_sync_lock_.get()); | 3880 rtc::CritScope lock(&video_sync_lock_); |
| 3895 if (rtcp) { | 3881 if (rtcp) { |
| 3896 playout_timestamp_rtcp_ = playout_timestamp; | 3882 playout_timestamp_rtcp_ = playout_timestamp; |
| 3897 } else { | 3883 } else { |
| 3898 playout_timestamp_rtp_ = playout_timestamp; | 3884 playout_timestamp_rtp_ = playout_timestamp; |
| 3899 } | 3885 } |
| 3900 playout_delay_ms_ = delay_ms; | 3886 playout_delay_ms_ = delay_ms; |
| 3901 } | 3887 } |
| 3902 } | 3888 } |
| 3903 | 3889 |
| 3904 // Called for incoming RTP packets after successful RTP header parsing. | 3890 // Called for incoming RTP packets after successful RTP header parsing. |
| (...skipping 20 matching lines...) Expand all Loading... | |
| 3925 } | 3911 } |
| 3926 | 3912 |
| 3927 uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) / | 3913 uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) / |
| 3928 (rtp_receive_frequency / 1000); | 3914 (rtp_receive_frequency / 1000); |
| 3929 | 3915 |
| 3930 _previousTimestamp = rtp_timestamp; | 3916 _previousTimestamp = rtp_timestamp; |
| 3931 | 3917 |
| 3932 if (timestamp_diff_ms == 0) return; | 3918 if (timestamp_diff_ms == 0) return; |
| 3933 | 3919 |
| 3934 { | 3920 { |
| 3935 CriticalSectionScoped cs(video_sync_lock_.get()); | 3921 rtc::CritScope lock(&video_sync_lock_); |
| 3936 | 3922 |
| 3937 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) { | 3923 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) { |
| 3938 _recPacketDelayMs = packet_delay_ms; | 3924 _recPacketDelayMs = packet_delay_ms; |
| 3939 } | 3925 } |
| 3940 | 3926 |
| 3941 if (_average_jitter_buffer_delay_us == 0) { | 3927 if (_average_jitter_buffer_delay_us == 0) { |
| 3942 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000; | 3928 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000; |
| 3943 return; | 3929 return; |
| 3944 } | 3930 } |
| 3945 | 3931 |
| (...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4069 RtcpMode method = _rtpRtcpModule->RTCP(); | 4055 RtcpMode method = _rtpRtcpModule->RTCP(); |
| 4070 if (method == RtcpMode::kOff) { | 4056 if (method == RtcpMode::kOff) { |
| 4071 return 0; | 4057 return 0; |
| 4072 } | 4058 } |
| 4073 std::vector<RTCPReportBlock> report_blocks; | 4059 std::vector<RTCPReportBlock> report_blocks; |
| 4074 _rtpRtcpModule->RemoteRTCPStat(&report_blocks); | 4060 _rtpRtcpModule->RemoteRTCPStat(&report_blocks); |
| 4075 | 4061 |
| 4076 int64_t rtt = 0; | 4062 int64_t rtt = 0; |
| 4077 if (report_blocks.empty()) { | 4063 if (report_blocks.empty()) { |
| 4078 if (allow_associate_channel) { | 4064 if (allow_associate_channel) { |
| 4079 CriticalSectionScoped lock(assoc_send_channel_lock_.get()); | 4065 rtc::CritScope lock(&assoc_send_channel_lock_); |
| 4080 Channel* channel = associate_send_channel_.channel(); | 4066 Channel* channel = associate_send_channel_.channel(); |
| 4081 // Tries to get RTT from an associated channel. This is important for | 4067 // Tries to get RTT from an associated channel. This is important for |
| 4082 // receive-only channels. | 4068 // receive-only channels. |
| 4083 if (channel) { | 4069 if (channel) { |
| 4084 // To prevent infinite recursion and deadlock, calling GetRTT of | 4070 // To prevent infinite recursion and deadlock, calling GetRTT of |
| 4085 // associate channel should always use "false" for argument: | 4071 // associate channel should always use "false" for argument: |
| 4086 // |allow_associate_channel|. | 4072 // |allow_associate_channel|. |
| 4087 rtt = channel->GetRTT(false); | 4073 rtt = channel->GetRTT(false); |
| 4088 } | 4074 } |
| 4089 } | 4075 } |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 4109 int64_t min_rtt = 0; | 4095 int64_t min_rtt = 0; |
| 4110 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) | 4096 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) |
| 4111 != 0) { | 4097 != 0) { |
| 4112 return 0; | 4098 return 0; |
| 4113 } | 4099 } |
| 4114 return rtt; | 4100 return rtt; |
| 4115 } | 4101 } |
| 4116 | 4102 |
| 4117 } // namespace voe | 4103 } // namespace voe |
| 4118 } // namespace webrtc | 4104 } // namespace webrtc |
| OLD | NEW |