| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include "webrtc/voice_engine/channel.h" | 11 #include "webrtc/voice_engine/channel.h" |
| 12 | 12 |
| 13 #include <algorithm> | 13 #include <algorithm> |
| 14 #include <utility> | 14 #include <utility> |
| 15 | 15 |
| 16 #include "webrtc/base/checks.h" | 16 #include "webrtc/base/checks.h" |
| 17 #include "webrtc/base/criticalsection.h" |
| 17 #include "webrtc/base/format_macros.h" | 18 #include "webrtc/base/format_macros.h" |
| 18 #include "webrtc/base/logging.h" | 19 #include "webrtc/base/logging.h" |
| 19 #include "webrtc/base/thread_checker.h" | 20 #include "webrtc/base/thread_checker.h" |
| 20 #include "webrtc/base/timeutils.h" | 21 #include "webrtc/base/timeutils.h" |
| 21 #include "webrtc/common.h" | 22 #include "webrtc/common.h" |
| 22 #include "webrtc/config.h" | 23 #include "webrtc/config.h" |
| 23 #include "webrtc/modules/audio_device/include/audio_device.h" | 24 #include "webrtc/modules/audio_device/include/audio_device.h" |
| 24 #include "webrtc/modules/audio_processing/include/audio_processing.h" | 25 #include "webrtc/modules/audio_processing/include/audio_processing.h" |
| 25 #include "webrtc/modules/include/module_common_types.h" | 26 #include "webrtc/modules/include/module_common_types.h" |
| 26 #include "webrtc/modules/pacing/packet_router.h" | 27 #include "webrtc/modules/pacing/packet_router.h" |
| 27 #include "webrtc/modules/rtp_rtcp/include/receive_statistics.h" | 28 #include "webrtc/modules/rtp_rtcp/include/receive_statistics.h" |
| 28 #include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h" | 29 #include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h" |
| 29 #include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h" | 30 #include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h" |
| 30 #include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h" | 31 #include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h" |
| 31 #include "webrtc/modules/utility/include/audio_frame_operations.h" | 32 #include "webrtc/modules/utility/include/audio_frame_operations.h" |
| 32 #include "webrtc/modules/utility/include/process_thread.h" | 33 #include "webrtc/modules/utility/include/process_thread.h" |
| 33 #include "webrtc/system_wrappers/include/critical_section_wrapper.h" | |
| 34 #include "webrtc/system_wrappers/include/trace.h" | 34 #include "webrtc/system_wrappers/include/trace.h" |
| 35 #include "webrtc/voice_engine/include/voe_base.h" | 35 #include "webrtc/voice_engine/include/voe_base.h" |
| 36 #include "webrtc/voice_engine/include/voe_external_media.h" | 36 #include "webrtc/voice_engine/include/voe_external_media.h" |
| 37 #include "webrtc/voice_engine/include/voe_rtp_rtcp.h" | 37 #include "webrtc/voice_engine/include/voe_rtp_rtcp.h" |
| 38 #include "webrtc/voice_engine/output_mixer.h" | 38 #include "webrtc/voice_engine/output_mixer.h" |
| 39 #include "webrtc/voice_engine/statistics.h" | 39 #include "webrtc/voice_engine/statistics.h" |
| 40 #include "webrtc/voice_engine/transmit_mixer.h" | 40 #include "webrtc/voice_engine/transmit_mixer.h" |
| 41 #include "webrtc/voice_engine/utility.h" | 41 #include "webrtc/voice_engine/utility.h" |
| 42 | 42 |
| 43 #if defined(_WIN32) | 43 #if defined(_WIN32) |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 150 struct ChannelStatistics : public RtcpStatistics { | 150 struct ChannelStatistics : public RtcpStatistics { |
| 151 ChannelStatistics() : rtcp(), max_jitter(0) {} | 151 ChannelStatistics() : rtcp(), max_jitter(0) {} |
| 152 | 152 |
| 153 RtcpStatistics rtcp; | 153 RtcpStatistics rtcp; |
| 154 uint32_t max_jitter; | 154 uint32_t max_jitter; |
| 155 }; | 155 }; |
| 156 | 156 |
| 157 // Statistics callback, called at each generation of a new RTCP report block. | 157 // Statistics callback, called at each generation of a new RTCP report block. |
| 158 class StatisticsProxy : public RtcpStatisticsCallback { | 158 class StatisticsProxy : public RtcpStatisticsCallback { |
| 159 public: | 159 public: |
| 160 StatisticsProxy(uint32_t ssrc) | 160 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {} |
| 161 : stats_lock_(CriticalSectionWrapper::CreateCriticalSection()), | |
| 162 ssrc_(ssrc) {} | |
| 163 virtual ~StatisticsProxy() {} | 161 virtual ~StatisticsProxy() {} |
| 164 | 162 |
| 165 void StatisticsUpdated(const RtcpStatistics& statistics, | 163 void StatisticsUpdated(const RtcpStatistics& statistics, |
| 166 uint32_t ssrc) override { | 164 uint32_t ssrc) override { |
| 167 if (ssrc != ssrc_) | 165 if (ssrc != ssrc_) |
| 168 return; | 166 return; |
| 169 | 167 |
| 170 CriticalSectionScoped cs(stats_lock_.get()); | 168 rtc::CritScope cs(&stats_lock_); |
| 171 stats_.rtcp = statistics; | 169 stats_.rtcp = statistics; |
| 172 if (statistics.jitter > stats_.max_jitter) { | 170 if (statistics.jitter > stats_.max_jitter) { |
| 173 stats_.max_jitter = statistics.jitter; | 171 stats_.max_jitter = statistics.jitter; |
| 174 } | 172 } |
| 175 } | 173 } |
| 176 | 174 |
| 177 void CNameChanged(const char* cname, uint32_t ssrc) override {} | 175 void CNameChanged(const char* cname, uint32_t ssrc) override {} |
| 178 | 176 |
| 179 ChannelStatistics GetStats() { | 177 ChannelStatistics GetStats() { |
| 180 CriticalSectionScoped cs(stats_lock_.get()); | 178 rtc::CritScope cs(&stats_lock_); |
| 181 return stats_; | 179 return stats_; |
| 182 } | 180 } |
| 183 | 181 |
| 184 private: | 182 private: |
| 185 // StatisticsUpdated calls are triggered from threads in the RTP module, | 183 // StatisticsUpdated calls are triggered from threads in the RTP module, |
| 186 // while GetStats calls can be triggered from the public voice engine API, | 184 // while GetStats calls can be triggered from the public voice engine API, |
| 187 // hence synchronization is needed. | 185 // hence synchronization is needed. |
| 188 rtc::scoped_ptr<CriticalSectionWrapper> stats_lock_; | 186 rtc::CriticalSection stats_lock_; |
| 189 const uint32_t ssrc_; | 187 const uint32_t ssrc_; |
| 190 ChannelStatistics stats_; | 188 ChannelStatistics stats_; |
| 191 }; | 189 }; |
| 192 | 190 |
| 193 class VoERtcpObserver : public RtcpBandwidthObserver { | 191 class VoERtcpObserver : public RtcpBandwidthObserver { |
| 194 public: | 192 public: |
| 195 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {} | 193 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {} |
| 196 virtual ~VoERtcpObserver() {} | 194 virtual ~VoERtcpObserver() {} |
| 197 | 195 |
| 198 void OnReceivedEstimatedBitrate(uint32_t bitrate) override { | 196 void OnReceivedEstimatedBitrate(uint32_t bitrate) override { |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 291 | 289 |
| 292 return 0; | 290 return 0; |
| 293 } | 291 } |
| 294 | 292 |
| 295 int32_t | 293 int32_t |
| 296 Channel::InFrameType(FrameType frame_type) | 294 Channel::InFrameType(FrameType frame_type) |
| 297 { | 295 { |
| 298 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 296 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 299 "Channel::InFrameType(frame_type=%d)", frame_type); | 297 "Channel::InFrameType(frame_type=%d)", frame_type); |
| 300 | 298 |
| 301 CriticalSectionScoped cs(&_callbackCritSect); | 299 rtc::CritScope cs(&_callbackCritSect); |
| 302 _sendFrameType = (frame_type == kAudioFrameSpeech); | 300 _sendFrameType = (frame_type == kAudioFrameSpeech); |
| 303 return 0; | 301 return 0; |
| 304 } | 302 } |
| 305 | 303 |
| 306 int32_t | 304 int32_t |
| 307 Channel::OnRxVadDetected(int vadDecision) | 305 Channel::OnRxVadDetected(int vadDecision) |
| 308 { | 306 { |
| 309 CriticalSectionScoped cs(&_callbackCritSect); | 307 rtc::CritScope cs(&_callbackCritSect); |
| 310 if (_rxVadObserverPtr) | 308 if (_rxVadObserverPtr) |
| 311 { | 309 { |
| 312 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision); | 310 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision); |
| 313 } | 311 } |
| 314 | 312 |
| 315 return 0; | 313 return 0; |
| 316 } | 314 } |
| 317 | 315 |
| 318 bool Channel::SendRtp(const uint8_t* data, | 316 bool Channel::SendRtp(const uint8_t* data, |
| 319 size_t len, | 317 size_t len, |
| 320 const PacketOptions& options) { | 318 const PacketOptions& options) { |
| 321 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), | 319 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), |
| 322 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len); | 320 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len); |
| 323 | 321 |
| 324 CriticalSectionScoped cs(&_callbackCritSect); | 322 rtc::CritScope cs(&_callbackCritSect); |
| 325 | 323 |
| 326 if (_transportPtr == NULL) | 324 if (_transportPtr == NULL) |
| 327 { | 325 { |
| 328 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId), | 326 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId), |
| 329 "Channel::SendPacket() failed to send RTP packet due to" | 327 "Channel::SendPacket() failed to send RTP packet due to" |
| 330 " invalid transport object"); | 328 " invalid transport object"); |
| 331 return false; | 329 return false; |
| 332 } | 330 } |
| 333 | 331 |
| 334 uint8_t* bufferToSendPtr = (uint8_t*)data; | 332 uint8_t* bufferToSendPtr = (uint8_t*)data; |
| (...skipping 10 matching lines...) Expand all Loading... |
| 345 } | 343 } |
| 346 return true; | 344 return true; |
| 347 } | 345 } |
| 348 | 346 |
| 349 bool | 347 bool |
| 350 Channel::SendRtcp(const uint8_t *data, size_t len) | 348 Channel::SendRtcp(const uint8_t *data, size_t len) |
| 351 { | 349 { |
| 352 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), | 350 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), |
| 353 "Channel::SendRtcp(len=%" PRIuS ")", len); | 351 "Channel::SendRtcp(len=%" PRIuS ")", len); |
| 354 | 352 |
| 355 CriticalSectionScoped cs(&_callbackCritSect); | 353 rtc::CritScope cs(&_callbackCritSect); |
| 356 if (_transportPtr == NULL) | 354 if (_transportPtr == NULL) |
| 357 { | 355 { |
| 358 WEBRTC_TRACE(kTraceError, kTraceVoice, | 356 WEBRTC_TRACE(kTraceError, kTraceVoice, |
| 359 VoEId(_instanceId,_channelId), | 357 VoEId(_instanceId,_channelId), |
| 360 "Channel::SendRtcp() failed to send RTCP packet" | 358 "Channel::SendRtcp() failed to send RTCP packet" |
| 361 " due to invalid transport object"); | 359 " due to invalid transport object"); |
| 362 return false; | 360 return false; |
| 363 } | 361 } |
| 364 | 362 |
| 365 uint8_t* bufferToSendPtr = (uint8_t*)data; | 363 uint8_t* bufferToSendPtr = (uint8_t*)data; |
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 559 LOG(LS_ERROR) << "ProcessStream() error: " << err; | 557 LOG(LS_ERROR) << "ProcessStream() error: " << err; |
| 560 assert(false); | 558 assert(false); |
| 561 } | 559 } |
| 562 } | 560 } |
| 563 | 561 |
| 564 { | 562 { |
| 565 // Pass the audio buffers to an optional sink callback, before applying | 563 // Pass the audio buffers to an optional sink callback, before applying |
| 566 // scaling/panning, as that applies to the mix operation. | 564 // scaling/panning, as that applies to the mix operation. |
| 567 // External recipients of the audio (e.g. via AudioTrack), will do their | 565 // External recipients of the audio (e.g. via AudioTrack), will do their |
| 568 // own mixing/dynamic processing. | 566 // own mixing/dynamic processing. |
| 569 CriticalSectionScoped cs(&_callbackCritSect); | 567 rtc::CritScope cs(&_callbackCritSect); |
| 570 if (audio_sink_) { | 568 if (audio_sink_) { |
| 571 AudioSinkInterface::Data data( | 569 AudioSinkInterface::Data data( |
| 572 &audioFrame->data_[0], | 570 &audioFrame->data_[0], |
| 573 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_, | 571 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_, |
| 574 audioFrame->num_channels_, audioFrame->timestamp_); | 572 audioFrame->num_channels_, audioFrame->timestamp_); |
| 575 audio_sink_->OnData(data); | 573 audio_sink_->OnData(data); |
| 576 } | 574 } |
| 577 } | 575 } |
| 578 | 576 |
| 579 float output_gain = 1.0f; | 577 float output_gain = 1.0f; |
| 580 float left_pan = 1.0f; | 578 float left_pan = 1.0f; |
| 581 float right_pan = 1.0f; | 579 float right_pan = 1.0f; |
| 582 { | 580 { |
| 583 CriticalSectionScoped cs(&volume_settings_critsect_); | 581 rtc::CritScope cs(&volume_settings_critsect_); |
| 584 output_gain = _outputGain; | 582 output_gain = _outputGain; |
| 585 left_pan = _panLeft; | 583 left_pan = _panLeft; |
| 586 right_pan= _panRight; | 584 right_pan= _panRight; |
| 587 } | 585 } |
| 588 | 586 |
| 589 // Output volume scaling | 587 // Output volume scaling |
| 590 if (output_gain < 0.99f || output_gain > 1.01f) | 588 if (output_gain < 0.99f || output_gain > 1.01f) |
| 591 { | 589 { |
| 592 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame); | 590 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame); |
| 593 } | 591 } |
| (...skipping 19 matching lines...) Expand all Loading... |
| 613 | 611 |
| 614 // Mix decoded PCM output with file if file mixing is enabled | 612 // Mix decoded PCM output with file if file mixing is enabled |
| 615 if (state.output_file_playing) | 613 if (state.output_file_playing) |
| 616 { | 614 { |
| 617 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_); | 615 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_); |
| 618 } | 616 } |
| 619 | 617 |
| 620 // External media | 618 // External media |
| 621 if (_outputExternalMedia) | 619 if (_outputExternalMedia) |
| 622 { | 620 { |
| 623 CriticalSectionScoped cs(&_callbackCritSect); | 621 rtc::CritScope cs(&_callbackCritSect); |
| 624 const bool isStereo = (audioFrame->num_channels_ == 2); | 622 const bool isStereo = (audioFrame->num_channels_ == 2); |
| 625 if (_outputExternalMediaCallbackPtr) | 623 if (_outputExternalMediaCallbackPtr) |
| 626 { | 624 { |
| 627 _outputExternalMediaCallbackPtr->Process( | 625 _outputExternalMediaCallbackPtr->Process( |
| 628 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_, | 626 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_, |
| 629 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_, | 627 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_, |
| 630 isStereo); | 628 isStereo); |
| 631 } | 629 } |
| 632 } | 630 } |
| 633 | 631 |
| 634 // Record playout if enabled | 632 // Record playout if enabled |
| 635 { | 633 { |
| 636 CriticalSectionScoped cs(&_fileCritSect); | 634 rtc::CritScope cs(&_fileCritSect); |
| 637 | 635 |
| 638 if (_outputFileRecording && _outputFileRecorderPtr) | 636 if (_outputFileRecording && _outputFileRecorderPtr) |
| 639 { | 637 { |
| 640 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame); | 638 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame); |
| 641 } | 639 } |
| 642 } | 640 } |
| 643 | 641 |
| 644 // Measure audio level (0-9) | 642 // Measure audio level (0-9) |
| 645 _outputAudioLevel.ComputeLevel(*audioFrame); | 643 _outputAudioLevel.ComputeLevel(*audioFrame); |
| 646 | 644 |
| 647 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) { | 645 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) { |
| 648 // The first frame with a valid rtp timestamp. | 646 // The first frame with a valid rtp timestamp. |
| 649 capture_start_rtp_time_stamp_ = audioFrame->timestamp_; | 647 capture_start_rtp_time_stamp_ = audioFrame->timestamp_; |
| 650 } | 648 } |
| 651 | 649 |
| 652 if (capture_start_rtp_time_stamp_ >= 0) { | 650 if (capture_start_rtp_time_stamp_ >= 0) { |
| 653 // audioFrame.timestamp_ should be valid from now on. | 651 // audioFrame.timestamp_ should be valid from now on. |
| 654 | 652 |
| 655 // Compute elapsed time. | 653 // Compute elapsed time. |
| 656 int64_t unwrap_timestamp = | 654 int64_t unwrap_timestamp = |
| 657 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_); | 655 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_); |
| 658 audioFrame->elapsed_time_ms_ = | 656 audioFrame->elapsed_time_ms_ = |
| 659 (unwrap_timestamp - capture_start_rtp_time_stamp_) / | 657 (unwrap_timestamp - capture_start_rtp_time_stamp_) / |
| 660 (GetPlayoutFrequency() / 1000); | 658 (GetPlayoutFrequency() / 1000); |
| 661 | 659 |
| 662 { | 660 { |
| 663 CriticalSectionScoped lock(ts_stats_lock_.get()); | 661 rtc::CritScope lock(&ts_stats_lock_); |
| 664 // Compute ntp time. | 662 // Compute ntp time. |
| 665 audioFrame->ntp_time_ms_ = ntp_estimator_.Estimate( | 663 audioFrame->ntp_time_ms_ = ntp_estimator_.Estimate( |
| 666 audioFrame->timestamp_); | 664 audioFrame->timestamp_); |
| 667 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received. | 665 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received. |
| 668 if (audioFrame->ntp_time_ms_ > 0) { | 666 if (audioFrame->ntp_time_ms_ > 0) { |
| 669 // Compute |capture_start_ntp_time_ms_| so that | 667 // Compute |capture_start_ntp_time_ms_| so that |
| 670 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_| | 668 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_| |
| 671 capture_start_ntp_time_ms_ = | 669 capture_start_ntp_time_ms_ = |
| 672 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_; | 670 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_; |
| 673 } | 671 } |
| (...skipping 23 matching lines...) Expand all Loading... |
| 697 { | 695 { |
| 698 highestNeeded = receiveFrequency; | 696 highestNeeded = receiveFrequency; |
| 699 } | 697 } |
| 700 | 698 |
| 701 // Special case, if we're playing a file on the playout side | 699 // Special case, if we're playing a file on the playout side |
| 702 // we take that frequency into consideration as well | 700 // we take that frequency into consideration as well |
| 703 // This is not needed on sending side, since the codec will | 701 // This is not needed on sending side, since the codec will |
| 704 // limit the spectrum anyway. | 702 // limit the spectrum anyway. |
| 705 if (channel_state_.Get().output_file_playing) | 703 if (channel_state_.Get().output_file_playing) |
| 706 { | 704 { |
| 707 CriticalSectionScoped cs(&_fileCritSect); | 705 rtc::CritScope cs(&_fileCritSect); |
| 708 if (_outputFilePlayerPtr) | 706 if (_outputFilePlayerPtr) |
| 709 { | 707 { |
| 710 if(_outputFilePlayerPtr->Frequency()>highestNeeded) | 708 if(_outputFilePlayerPtr->Frequency()>highestNeeded) |
| 711 { | 709 { |
| 712 highestNeeded=_outputFilePlayerPtr->Frequency(); | 710 highestNeeded=_outputFilePlayerPtr->Frequency(); |
| 713 } | 711 } |
| 714 } | 712 } |
| 715 } | 713 } |
| 716 | 714 |
| 717 return(highestNeeded); | 715 return(highestNeeded); |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 783 } | 781 } |
| 784 | 782 |
| 785 void | 783 void |
| 786 Channel::RecordFileEnded(int32_t id) | 784 Channel::RecordFileEnded(int32_t id) |
| 787 { | 785 { |
| 788 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), | 786 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), |
| 789 "Channel::RecordFileEnded(id=%d)", id); | 787 "Channel::RecordFileEnded(id=%d)", id); |
| 790 | 788 |
| 791 assert(id == _outputFileRecorderId); | 789 assert(id == _outputFileRecorderId); |
| 792 | 790 |
| 793 CriticalSectionScoped cs(&_fileCritSect); | 791 rtc::CritScope cs(&_fileCritSect); |
| 794 | 792 |
| 795 _outputFileRecording = false; | 793 _outputFileRecording = false; |
| 796 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, | 794 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, |
| 797 VoEId(_instanceId,_channelId), | 795 VoEId(_instanceId,_channelId), |
| 798 "Channel::RecordFileEnded() => output file recorder module is" | 796 "Channel::RecordFileEnded() => output file recorder module is" |
| 799 " shutdown"); | 797 " shutdown"); |
| 800 } | 798 } |
| 801 | 799 |
| 802 Channel::Channel(int32_t channelId, | 800 Channel::Channel(int32_t channelId, |
| 803 uint32_t instanceId, | 801 uint32_t instanceId, |
| 804 RtcEventLog* const event_log, | 802 RtcEventLog* const event_log, |
| 805 const Config& config) | 803 const Config& config) |
| 806 : _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()), | 804 : _instanceId(instanceId), |
| 807 _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()), | |
| 808 volume_settings_critsect_( | |
| 809 *CriticalSectionWrapper::CreateCriticalSection()), | |
| 810 _instanceId(instanceId), | |
| 811 _channelId(channelId), | 805 _channelId(channelId), |
| 812 event_log_(event_log), | 806 event_log_(event_log), |
| 813 rtp_header_parser_(RtpHeaderParser::Create()), | 807 rtp_header_parser_(RtpHeaderParser::Create()), |
| 814 rtp_payload_registry_( | 808 rtp_payload_registry_( |
| 815 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))), | 809 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))), |
| 816 rtp_receive_statistics_( | 810 rtp_receive_statistics_( |
| 817 ReceiveStatistics::Create(Clock::GetRealTimeClock())), | 811 ReceiveStatistics::Create(Clock::GetRealTimeClock())), |
| 818 rtp_receiver_( | 812 rtp_receiver_( |
| 819 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(), | 813 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(), |
| 820 this, | 814 this, |
| (...skipping 20 matching lines...) Expand all Loading... |
| 841 _timeStamp(0), // This is just an offset, RTP module will add it's own | 835 _timeStamp(0), // This is just an offset, RTP module will add it's own |
| 842 // random offset | 836 // random offset |
| 843 _sendTelephoneEventPayloadType(106), | 837 _sendTelephoneEventPayloadType(106), |
| 844 ntp_estimator_(Clock::GetRealTimeClock()), | 838 ntp_estimator_(Clock::GetRealTimeClock()), |
| 845 jitter_buffer_playout_timestamp_(0), | 839 jitter_buffer_playout_timestamp_(0), |
| 846 playout_timestamp_rtp_(0), | 840 playout_timestamp_rtp_(0), |
| 847 playout_timestamp_rtcp_(0), | 841 playout_timestamp_rtcp_(0), |
| 848 playout_delay_ms_(0), | 842 playout_delay_ms_(0), |
| 849 _numberOfDiscardedPackets(0), | 843 _numberOfDiscardedPackets(0), |
| 850 send_sequence_number_(0), | 844 send_sequence_number_(0), |
| 851 ts_stats_lock_(CriticalSectionWrapper::CreateCriticalSection()), | |
| 852 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()), | 845 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()), |
| 853 capture_start_rtp_time_stamp_(-1), | 846 capture_start_rtp_time_stamp_(-1), |
| 854 capture_start_ntp_time_ms_(-1), | 847 capture_start_ntp_time_ms_(-1), |
| 855 _engineStatisticsPtr(NULL), | 848 _engineStatisticsPtr(NULL), |
| 856 _outputMixerPtr(NULL), | 849 _outputMixerPtr(NULL), |
| 857 _transmitMixerPtr(NULL), | 850 _transmitMixerPtr(NULL), |
| 858 _moduleProcessThreadPtr(NULL), | 851 _moduleProcessThreadPtr(NULL), |
| 859 _audioDeviceModulePtr(NULL), | 852 _audioDeviceModulePtr(NULL), |
| 860 _voiceEngineObserverPtr(NULL), | 853 _voiceEngineObserverPtr(NULL), |
| 861 _callbackCritSectPtr(NULL), | 854 _callbackCritSectPtr(NULL), |
| 862 _transportPtr(NULL), | 855 _transportPtr(NULL), |
| 863 _rxVadObserverPtr(NULL), | 856 _rxVadObserverPtr(NULL), |
| 864 _oldVadDecision(-1), | 857 _oldVadDecision(-1), |
| 865 _sendFrameType(0), | 858 _sendFrameType(0), |
| 866 _externalMixing(false), | 859 _externalMixing(false), |
| 867 _mixFileWithMicrophone(false), | 860 _mixFileWithMicrophone(false), |
| 868 _mute(false), | 861 _mute(false), |
| 869 _panLeft(1.0f), | 862 _panLeft(1.0f), |
| 870 _panRight(1.0f), | 863 _panRight(1.0f), |
| 871 _outputGain(1.0f), | 864 _outputGain(1.0f), |
| 872 _playOutbandDtmfEvent(false), | 865 _playOutbandDtmfEvent(false), |
| 873 _playInbandDtmfEvent(false), | 866 _playInbandDtmfEvent(false), |
| 874 _lastLocalTimeStamp(0), | 867 _lastLocalTimeStamp(0), |
| 875 _lastPayloadType(0), | 868 _lastPayloadType(0), |
| 876 _includeAudioLevelIndication(false), | 869 _includeAudioLevelIndication(false), |
| 877 _outputSpeechType(AudioFrame::kNormalSpeech), | 870 _outputSpeechType(AudioFrame::kNormalSpeech), |
| 878 video_sync_lock_(CriticalSectionWrapper::CreateCriticalSection()), | |
| 879 _average_jitter_buffer_delay_us(0), | 871 _average_jitter_buffer_delay_us(0), |
| 880 _previousTimestamp(0), | 872 _previousTimestamp(0), |
| 881 _recPacketDelayMs(20), | 873 _recPacketDelayMs(20), |
| 882 _RxVadDetection(false), | 874 _RxVadDetection(false), |
| 883 _rxAgcIsEnabled(false), | 875 _rxAgcIsEnabled(false), |
| 884 _rxNsIsEnabled(false), | 876 _rxNsIsEnabled(false), |
| 885 restored_packet_in_use_(false), | 877 restored_packet_in_use_(false), |
| 886 rtcp_observer_(new VoERtcpObserver(this)), | 878 rtcp_observer_(new VoERtcpObserver(this)), |
| 887 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())), | 879 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())), |
| 888 assoc_send_channel_lock_(CriticalSectionWrapper::CreateCriticalSection()), | |
| 889 associate_send_channel_(ChannelOwner(nullptr)), | 880 associate_send_channel_(ChannelOwner(nullptr)), |
| 890 pacing_enabled_(config.Get<VoicePacing>().enabled), | 881 pacing_enabled_(config.Get<VoicePacing>().enabled), |
| 891 feedback_observer_proxy_(pacing_enabled_ ? new TransportFeedbackProxy() | 882 feedback_observer_proxy_(pacing_enabled_ ? new TransportFeedbackProxy() |
| 892 : nullptr), | 883 : nullptr), |
| 893 seq_num_allocator_proxy_( | 884 seq_num_allocator_proxy_( |
| 894 pacing_enabled_ ? new TransportSequenceNumberProxy() : nullptr), | 885 pacing_enabled_ ? new TransportSequenceNumberProxy() : nullptr), |
| 895 rtp_packet_sender_proxy_(pacing_enabled_ ? new RtpPacketSenderProxy() | 886 rtp_packet_sender_proxy_(pacing_enabled_ ? new RtpPacketSenderProxy() |
| 896 : nullptr) { | 887 : nullptr) { |
| 897 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId), | 888 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId), |
| 898 "Channel::Channel() - ctor"); | 889 "Channel::Channel() - ctor"); |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 946 DeRegisterExternalMediaProcessing(kPlaybackPerChannel); | 937 DeRegisterExternalMediaProcessing(kPlaybackPerChannel); |
| 947 } | 938 } |
| 948 if (channel_state_.Get().input_external_media) | 939 if (channel_state_.Get().input_external_media) |
| 949 { | 940 { |
| 950 DeRegisterExternalMediaProcessing(kRecordingPerChannel); | 941 DeRegisterExternalMediaProcessing(kRecordingPerChannel); |
| 951 } | 942 } |
| 952 StopSend(); | 943 StopSend(); |
| 953 StopPlayout(); | 944 StopPlayout(); |
| 954 | 945 |
| 955 { | 946 { |
| 956 CriticalSectionScoped cs(&_fileCritSect); | 947 rtc::CritScope cs(&_fileCritSect); |
| 957 if (_inputFilePlayerPtr) | 948 if (_inputFilePlayerPtr) |
| 958 { | 949 { |
| 959 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL); | 950 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL); |
| 960 _inputFilePlayerPtr->StopPlayingFile(); | 951 _inputFilePlayerPtr->StopPlayingFile(); |
| 961 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr); | 952 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr); |
| 962 _inputFilePlayerPtr = NULL; | 953 _inputFilePlayerPtr = NULL; |
| 963 } | 954 } |
| 964 if (_outputFilePlayerPtr) | 955 if (_outputFilePlayerPtr) |
| 965 { | 956 { |
| 966 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); | 957 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 992 { | 983 { |
| 993 WEBRTC_TRACE(kTraceWarning, kTraceVoice, | 984 WEBRTC_TRACE(kTraceWarning, kTraceVoice, |
| 994 VoEId(_instanceId,_channelId), | 985 VoEId(_instanceId,_channelId), |
| 995 "~Channel() failed to de-register VAD callback" | 986 "~Channel() failed to de-register VAD callback" |
| 996 " (Audio coding module)"); | 987 " (Audio coding module)"); |
| 997 } | 988 } |
| 998 // De-register modules in process thread | 989 // De-register modules in process thread |
| 999 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()); | 990 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()); |
| 1000 | 991 |
| 1001 // End of modules shutdown | 992 // End of modules shutdown |
| 1002 | |
| 1003 // Delete other objects | |
| 1004 delete &_callbackCritSect; | |
| 1005 delete &_fileCritSect; | |
| 1006 delete &volume_settings_critsect_; | |
| 1007 } | 993 } |
| 1008 | 994 |
| 1009 int32_t | 995 int32_t |
| 1010 Channel::Init() | 996 Channel::Init() |
| 1011 { | 997 { |
| 1012 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 998 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1013 "Channel::Init()"); | 999 "Channel::Init()"); |
| 1014 | 1000 |
| 1015 channel_state_.Reset(); | 1001 channel_state_.Reset(); |
| 1016 | 1002 |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1157 return 0; | 1143 return 0; |
| 1158 } | 1144 } |
| 1159 | 1145 |
| 1160 int32_t | 1146 int32_t |
| 1161 Channel::SetEngineInformation(Statistics& engineStatistics, | 1147 Channel::SetEngineInformation(Statistics& engineStatistics, |
| 1162 OutputMixer& outputMixer, | 1148 OutputMixer& outputMixer, |
| 1163 voe::TransmitMixer& transmitMixer, | 1149 voe::TransmitMixer& transmitMixer, |
| 1164 ProcessThread& moduleProcessThread, | 1150 ProcessThread& moduleProcessThread, |
| 1165 AudioDeviceModule& audioDeviceModule, | 1151 AudioDeviceModule& audioDeviceModule, |
| 1166 VoiceEngineObserver* voiceEngineObserver, | 1152 VoiceEngineObserver* voiceEngineObserver, |
| 1167 CriticalSectionWrapper* callbackCritSect) | 1153 rtc::CriticalSection* callbackCritSect) |
| 1168 { | 1154 { |
| 1169 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1155 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1170 "Channel::SetEngineInformation()"); | 1156 "Channel::SetEngineInformation()"); |
| 1171 _engineStatisticsPtr = &engineStatistics; | 1157 _engineStatisticsPtr = &engineStatistics; |
| 1172 _outputMixerPtr = &outputMixer; | 1158 _outputMixerPtr = &outputMixer; |
| 1173 _transmitMixerPtr = &transmitMixer, | 1159 _transmitMixerPtr = &transmitMixer, |
| 1174 _moduleProcessThreadPtr = &moduleProcessThread; | 1160 _moduleProcessThreadPtr = &moduleProcessThread; |
| 1175 _audioDeviceModulePtr = &audioDeviceModule; | 1161 _audioDeviceModulePtr = &audioDeviceModule; |
| 1176 _voiceEngineObserverPtr = voiceEngineObserver; | 1162 _voiceEngineObserverPtr = voiceEngineObserver; |
| 1177 _callbackCritSectPtr = callbackCritSect; | 1163 _callbackCritSectPtr = callbackCritSect; |
| 1178 return 0; | 1164 return 0; |
| 1179 } | 1165 } |
| 1180 | 1166 |
| 1181 int32_t | 1167 int32_t |
| 1182 Channel::UpdateLocalTimeStamp() | 1168 Channel::UpdateLocalTimeStamp() |
| 1183 { | 1169 { |
| 1184 | 1170 |
| 1185 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_); | 1171 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_); |
| 1186 return 0; | 1172 return 0; |
| 1187 } | 1173 } |
| 1188 | 1174 |
| 1189 void Channel::SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) { | 1175 void Channel::SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) { |
| 1190 CriticalSectionScoped cs(&_callbackCritSect); | 1176 rtc::CritScope cs(&_callbackCritSect); |
| 1191 audio_sink_ = std::move(sink); | 1177 audio_sink_ = std::move(sink); |
| 1192 } | 1178 } |
| 1193 | 1179 |
| 1194 int32_t | 1180 int32_t |
| 1195 Channel::StartPlayout() | 1181 Channel::StartPlayout() |
| 1196 { | 1182 { |
| 1197 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1183 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1198 "Channel::StartPlayout()"); | 1184 "Channel::StartPlayout()"); |
| 1199 if (channel_state_.Get().playing) | 1185 if (channel_state_.Get().playing) |
| 1200 { | 1186 { |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1260 { | 1246 { |
| 1261 return 0; | 1247 return 0; |
| 1262 } | 1248 } |
| 1263 channel_state_.SetSending(true); | 1249 channel_state_.SetSending(true); |
| 1264 | 1250 |
| 1265 if (_rtpRtcpModule->SetSendingStatus(true) != 0) | 1251 if (_rtpRtcpModule->SetSendingStatus(true) != 0) |
| 1266 { | 1252 { |
| 1267 _engineStatisticsPtr->SetLastError( | 1253 _engineStatisticsPtr->SetLastError( |
| 1268 VE_RTP_RTCP_MODULE_ERROR, kTraceError, | 1254 VE_RTP_RTCP_MODULE_ERROR, kTraceError, |
| 1269 "StartSend() RTP/RTCP failed to start sending"); | 1255 "StartSend() RTP/RTCP failed to start sending"); |
| 1270 CriticalSectionScoped cs(&_callbackCritSect); | 1256 rtc::CritScope cs(&_callbackCritSect); |
| 1271 channel_state_.SetSending(false); | 1257 channel_state_.SetSending(false); |
| 1272 return -1; | 1258 return -1; |
| 1273 } | 1259 } |
| 1274 | 1260 |
| 1275 return 0; | 1261 return 0; |
| 1276 } | 1262 } |
| 1277 | 1263 |
| 1278 int32_t | 1264 int32_t |
| 1279 Channel::StopSend() | 1265 Channel::StopSend() |
| 1280 { | 1266 { |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1332 | 1318 |
| 1333 channel_state_.SetReceiving(false); | 1319 channel_state_.SetReceiving(false); |
| 1334 return 0; | 1320 return 0; |
| 1335 } | 1321 } |
| 1336 | 1322 |
| 1337 int32_t | 1323 int32_t |
| 1338 Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) | 1324 Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) |
| 1339 { | 1325 { |
| 1340 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1326 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1341 "Channel::RegisterVoiceEngineObserver()"); | 1327 "Channel::RegisterVoiceEngineObserver()"); |
| 1342 CriticalSectionScoped cs(&_callbackCritSect); | 1328 rtc::CritScope cs(&_callbackCritSect); |
| 1343 | 1329 |
| 1344 if (_voiceEngineObserverPtr) | 1330 if (_voiceEngineObserverPtr) |
| 1345 { | 1331 { |
| 1346 _engineStatisticsPtr->SetLastError( | 1332 _engineStatisticsPtr->SetLastError( |
| 1347 VE_INVALID_OPERATION, kTraceError, | 1333 VE_INVALID_OPERATION, kTraceError, |
| 1348 "RegisterVoiceEngineObserver() observer already enabled"); | 1334 "RegisterVoiceEngineObserver() observer already enabled"); |
| 1349 return -1; | 1335 return -1; |
| 1350 } | 1336 } |
| 1351 _voiceEngineObserverPtr = &observer; | 1337 _voiceEngineObserverPtr = &observer; |
| 1352 return 0; | 1338 return 0; |
| 1353 } | 1339 } |
| 1354 | 1340 |
| 1355 int32_t | 1341 int32_t |
| 1356 Channel::DeRegisterVoiceEngineObserver() | 1342 Channel::DeRegisterVoiceEngineObserver() |
| 1357 { | 1343 { |
| 1358 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1344 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1359 "Channel::DeRegisterVoiceEngineObserver()"); | 1345 "Channel::DeRegisterVoiceEngineObserver()"); |
| 1360 CriticalSectionScoped cs(&_callbackCritSect); | 1346 rtc::CritScope cs(&_callbackCritSect); |
| 1361 | 1347 |
| 1362 if (!_voiceEngineObserverPtr) | 1348 if (!_voiceEngineObserverPtr) |
| 1363 { | 1349 { |
| 1364 _engineStatisticsPtr->SetLastError( | 1350 _engineStatisticsPtr->SetLastError( |
| 1365 VE_INVALID_OPERATION, kTraceWarning, | 1351 VE_INVALID_OPERATION, kTraceWarning, |
| 1366 "DeRegisterVoiceEngineObserver() observer already disabled"); | 1352 "DeRegisterVoiceEngineObserver() observer already disabled"); |
| 1367 return 0; | 1353 return 0; |
| 1368 } | 1354 } |
| 1369 _voiceEngineObserverPtr = NULL; | 1355 _voiceEngineObserverPtr = NULL; |
| 1370 return 0; | 1356 return 0; |
| (...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1657 return -1; | 1643 return -1; |
| 1658 } | 1644 } |
| 1659 return 0; | 1645 return 0; |
| 1660 } | 1646 } |
| 1661 | 1647 |
| 1662 int32_t Channel::RegisterExternalTransport(Transport& transport) | 1648 int32_t Channel::RegisterExternalTransport(Transport& transport) |
| 1663 { | 1649 { |
| 1664 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), | 1650 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), |
| 1665 "Channel::RegisterExternalTransport()"); | 1651 "Channel::RegisterExternalTransport()"); |
| 1666 | 1652 |
| 1667 CriticalSectionScoped cs(&_callbackCritSect); | 1653 rtc::CritScope cs(&_callbackCritSect); |
| 1668 | 1654 |
| 1669 if (_externalTransport) | 1655 if (_externalTransport) |
| 1670 { | 1656 { |
| 1671 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, | 1657 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, |
| 1672 kTraceError, | 1658 kTraceError, |
| 1673 "RegisterExternalTransport() external transport already enabled"); | 1659 "RegisterExternalTransport() external transport already enabled"); |
| 1674 return -1; | 1660 return -1; |
| 1675 } | 1661 } |
| 1676 _externalTransport = true; | 1662 _externalTransport = true; |
| 1677 _transportPtr = &transport; | 1663 _transportPtr = &transport; |
| 1678 return 0; | 1664 return 0; |
| 1679 } | 1665 } |
| 1680 | 1666 |
| 1681 int32_t | 1667 int32_t |
| 1682 Channel::DeRegisterExternalTransport() | 1668 Channel::DeRegisterExternalTransport() |
| 1683 { | 1669 { |
| 1684 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1670 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1685 "Channel::DeRegisterExternalTransport()"); | 1671 "Channel::DeRegisterExternalTransport()"); |
| 1686 | 1672 |
| 1687 CriticalSectionScoped cs(&_callbackCritSect); | 1673 rtc::CritScope cs(&_callbackCritSect); |
| 1688 | 1674 |
| 1689 if (!_transportPtr) | 1675 if (!_transportPtr) |
| 1690 { | 1676 { |
| 1691 _engineStatisticsPtr->SetLastError( | 1677 _engineStatisticsPtr->SetLastError( |
| 1692 VE_INVALID_OPERATION, kTraceWarning, | 1678 VE_INVALID_OPERATION, kTraceWarning, |
| 1693 "DeRegisterExternalTransport() external transport already " | 1679 "DeRegisterExternalTransport() external transport already " |
| 1694 "disabled"); | 1680 "disabled"); |
| 1695 return 0; | 1681 return 0; |
| 1696 } | 1682 } |
| 1697 _externalTransport = false; | 1683 _externalTransport = false; |
| (...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1821 uint32_t ntp_secs = 0; | 1807 uint32_t ntp_secs = 0; |
| 1822 uint32_t ntp_frac = 0; | 1808 uint32_t ntp_frac = 0; |
| 1823 uint32_t rtp_timestamp = 0; | 1809 uint32_t rtp_timestamp = 0; |
| 1824 if (0 != _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, | 1810 if (0 != _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, |
| 1825 &rtp_timestamp)) { | 1811 &rtp_timestamp)) { |
| 1826 // Waiting for RTCP. | 1812 // Waiting for RTCP. |
| 1827 return 0; | 1813 return 0; |
| 1828 } | 1814 } |
| 1829 | 1815 |
| 1830 { | 1816 { |
| 1831 CriticalSectionScoped lock(ts_stats_lock_.get()); | 1817 rtc::CritScope lock(&ts_stats_lock_); |
| 1832 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); | 1818 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); |
| 1833 } | 1819 } |
| 1834 return 0; | 1820 return 0; |
| 1835 } | 1821 } |
| 1836 | 1822 |
| 1837 int Channel::StartPlayingFileLocally(const char* fileName, | 1823 int Channel::StartPlayingFileLocally(const char* fileName, |
| 1838 bool loop, | 1824 bool loop, |
| 1839 FileFormats format, | 1825 FileFormats format, |
| 1840 int startPosition, | 1826 int startPosition, |
| 1841 float volumeScaling, | 1827 float volumeScaling, |
| 1842 int stopPosition, | 1828 int stopPosition, |
| 1843 const CodecInst* codecInst) | 1829 const CodecInst* codecInst) |
| 1844 { | 1830 { |
| 1845 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1831 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1846 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d," | 1832 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d," |
| 1847 " format=%d, volumeScaling=%5.3f, startPosition=%d, " | 1833 " format=%d, volumeScaling=%5.3f, startPosition=%d, " |
| 1848 "stopPosition=%d)", fileName, loop, format, volumeScaling, | 1834 "stopPosition=%d)", fileName, loop, format, volumeScaling, |
| 1849 startPosition, stopPosition); | 1835 startPosition, stopPosition); |
| 1850 | 1836 |
| 1851 if (channel_state_.Get().output_file_playing) | 1837 if (channel_state_.Get().output_file_playing) |
| 1852 { | 1838 { |
| 1853 _engineStatisticsPtr->SetLastError( | 1839 _engineStatisticsPtr->SetLastError( |
| 1854 VE_ALREADY_PLAYING, kTraceError, | 1840 VE_ALREADY_PLAYING, kTraceError, |
| 1855 "StartPlayingFileLocally() is already playing"); | 1841 "StartPlayingFileLocally() is already playing"); |
| 1856 return -1; | 1842 return -1; |
| 1857 } | 1843 } |
| 1858 | 1844 |
| 1859 { | 1845 { |
| 1860 CriticalSectionScoped cs(&_fileCritSect); | 1846 rtc::CritScope cs(&_fileCritSect); |
| 1861 | 1847 |
| 1862 if (_outputFilePlayerPtr) | 1848 if (_outputFilePlayerPtr) |
| 1863 { | 1849 { |
| 1864 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); | 1850 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); |
| 1865 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); | 1851 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); |
| 1866 _outputFilePlayerPtr = NULL; | 1852 _outputFilePlayerPtr = NULL; |
| 1867 } | 1853 } |
| 1868 | 1854 |
| 1869 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer( | 1855 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer( |
| 1870 _outputFilePlayerId, (const FileFormats)format); | 1856 _outputFilePlayerId, (const FileFormats)format); |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1929 | 1915 |
| 1930 if (channel_state_.Get().output_file_playing) | 1916 if (channel_state_.Get().output_file_playing) |
| 1931 { | 1917 { |
| 1932 _engineStatisticsPtr->SetLastError( | 1918 _engineStatisticsPtr->SetLastError( |
| 1933 VE_ALREADY_PLAYING, kTraceError, | 1919 VE_ALREADY_PLAYING, kTraceError, |
| 1934 "StartPlayingFileLocally() is already playing"); | 1920 "StartPlayingFileLocally() is already playing"); |
| 1935 return -1; | 1921 return -1; |
| 1936 } | 1922 } |
| 1937 | 1923 |
| 1938 { | 1924 { |
| 1939 CriticalSectionScoped cs(&_fileCritSect); | 1925 rtc::CritScope cs(&_fileCritSect); |
| 1940 | 1926 |
| 1941 // Destroy the old instance | 1927 // Destroy the old instance |
| 1942 if (_outputFilePlayerPtr) | 1928 if (_outputFilePlayerPtr) |
| 1943 { | 1929 { |
| 1944 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); | 1930 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); |
| 1945 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); | 1931 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); |
| 1946 _outputFilePlayerPtr = NULL; | 1932 _outputFilePlayerPtr = NULL; |
| 1947 } | 1933 } |
| 1948 | 1934 |
| 1949 // Create the instance | 1935 // Create the instance |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1988 { | 1974 { |
| 1989 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 1975 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 1990 "Channel::StopPlayingFileLocally()"); | 1976 "Channel::StopPlayingFileLocally()"); |
| 1991 | 1977 |
| 1992 if (!channel_state_.Get().output_file_playing) | 1978 if (!channel_state_.Get().output_file_playing) |
| 1993 { | 1979 { |
| 1994 return 0; | 1980 return 0; |
| 1995 } | 1981 } |
| 1996 | 1982 |
| 1997 { | 1983 { |
| 1998 CriticalSectionScoped cs(&_fileCritSect); | 1984 rtc::CritScope cs(&_fileCritSect); |
| 1999 | 1985 |
| 2000 if (_outputFilePlayerPtr->StopPlayingFile() != 0) | 1986 if (_outputFilePlayerPtr->StopPlayingFile() != 0) |
| 2001 { | 1987 { |
| 2002 _engineStatisticsPtr->SetLastError( | 1988 _engineStatisticsPtr->SetLastError( |
| 2003 VE_STOP_RECORDING_FAILED, kTraceError, | 1989 VE_STOP_RECORDING_FAILED, kTraceError, |
| 2004 "StopPlayingFile() could not stop playing"); | 1990 "StopPlayingFile() could not stop playing"); |
| 2005 return -1; | 1991 return -1; |
| 2006 } | 1992 } |
| 2007 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); | 1993 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); |
| 2008 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); | 1994 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2040 return 0; | 2026 return 0; |
| 2041 } | 2027 } |
| 2042 | 2028 |
| 2043 // |_fileCritSect| cannot be taken while calling | 2029 // |_fileCritSect| cannot be taken while calling |
| 2044 // SetAnonymousMixabilityStatus() since as soon as the participant is added | 2030 // SetAnonymousMixabilityStatus() since as soon as the participant is added |
| 2045 // frames can be pulled by the mixer. Since the frames are generated from | 2031 // frames can be pulled by the mixer. Since the frames are generated from |
| 2046 // the file, _fileCritSect will be taken. This would result in a deadlock. | 2032 // the file, _fileCritSect will be taken. This would result in a deadlock. |
| 2047 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) | 2033 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) |
| 2048 { | 2034 { |
| 2049 channel_state_.SetOutputFilePlaying(false); | 2035 channel_state_.SetOutputFilePlaying(false); |
| 2050 CriticalSectionScoped cs(&_fileCritSect); | 2036 rtc::CritScope cs(&_fileCritSect); |
| 2051 _engineStatisticsPtr->SetLastError( | 2037 _engineStatisticsPtr->SetLastError( |
| 2052 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError, | 2038 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError, |
| 2053 "StartPlayingFile() failed to add participant as file to mixer"); | 2039 "StartPlayingFile() failed to add participant as file to mixer"); |
| 2054 _outputFilePlayerPtr->StopPlayingFile(); | 2040 _outputFilePlayerPtr->StopPlayingFile(); |
| 2055 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); | 2041 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); |
| 2056 _outputFilePlayerPtr = NULL; | 2042 _outputFilePlayerPtr = NULL; |
| 2057 return -1; | 2043 return -1; |
| 2058 } | 2044 } |
| 2059 | 2045 |
| 2060 return 0; | 2046 return 0; |
| 2061 } | 2047 } |
| 2062 | 2048 |
| 2063 int Channel::StartPlayingFileAsMicrophone(const char* fileName, | 2049 int Channel::StartPlayingFileAsMicrophone(const char* fileName, |
| 2064 bool loop, | 2050 bool loop, |
| 2065 FileFormats format, | 2051 FileFormats format, |
| 2066 int startPosition, | 2052 int startPosition, |
| 2067 float volumeScaling, | 2053 float volumeScaling, |
| 2068 int stopPosition, | 2054 int stopPosition, |
| 2069 const CodecInst* codecInst) | 2055 const CodecInst* codecInst) |
| 2070 { | 2056 { |
| 2071 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2057 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2072 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, " | 2058 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, " |
| 2073 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, " | 2059 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, " |
| 2074 "stopPosition=%d)", fileName, loop, format, volumeScaling, | 2060 "stopPosition=%d)", fileName, loop, format, volumeScaling, |
| 2075 startPosition, stopPosition); | 2061 startPosition, stopPosition); |
| 2076 | 2062 |
| 2077 CriticalSectionScoped cs(&_fileCritSect); | 2063 rtc::CritScope cs(&_fileCritSect); |
| 2078 | 2064 |
| 2079 if (channel_state_.Get().input_file_playing) | 2065 if (channel_state_.Get().input_file_playing) |
| 2080 { | 2066 { |
| 2081 _engineStatisticsPtr->SetLastError( | 2067 _engineStatisticsPtr->SetLastError( |
| 2082 VE_ALREADY_PLAYING, kTraceWarning, | 2068 VE_ALREADY_PLAYING, kTraceWarning, |
| 2083 "StartPlayingFileAsMicrophone() filePlayer is playing"); | 2069 "StartPlayingFileAsMicrophone() filePlayer is playing"); |
| 2084 return 0; | 2070 return 0; |
| 2085 } | 2071 } |
| 2086 | 2072 |
| 2087 // Destroy the old instance | 2073 // Destroy the old instance |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2142 format, volumeScaling, startPosition, stopPosition); | 2128 format, volumeScaling, startPosition, stopPosition); |
| 2143 | 2129 |
| 2144 if(stream == NULL) | 2130 if(stream == NULL) |
| 2145 { | 2131 { |
| 2146 _engineStatisticsPtr->SetLastError( | 2132 _engineStatisticsPtr->SetLastError( |
| 2147 VE_BAD_FILE, kTraceError, | 2133 VE_BAD_FILE, kTraceError, |
| 2148 "StartPlayingFileAsMicrophone NULL as input stream"); | 2134 "StartPlayingFileAsMicrophone NULL as input stream"); |
| 2149 return -1; | 2135 return -1; |
| 2150 } | 2136 } |
| 2151 | 2137 |
| 2152 CriticalSectionScoped cs(&_fileCritSect); | 2138 rtc::CritScope cs(&_fileCritSect); |
| 2153 | 2139 |
| 2154 if (channel_state_.Get().input_file_playing) | 2140 if (channel_state_.Get().input_file_playing) |
| 2155 { | 2141 { |
| 2156 _engineStatisticsPtr->SetLastError( | 2142 _engineStatisticsPtr->SetLastError( |
| 2157 VE_ALREADY_PLAYING, kTraceWarning, | 2143 VE_ALREADY_PLAYING, kTraceWarning, |
| 2158 "StartPlayingFileAsMicrophone() is playing"); | 2144 "StartPlayingFileAsMicrophone() is playing"); |
| 2159 return 0; | 2145 return 0; |
| 2160 } | 2146 } |
| 2161 | 2147 |
| 2162 // Destroy the old instance | 2148 // Destroy the old instance |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2198 channel_state_.SetInputFilePlaying(true); | 2184 channel_state_.SetInputFilePlaying(true); |
| 2199 | 2185 |
| 2200 return 0; | 2186 return 0; |
| 2201 } | 2187 } |
| 2202 | 2188 |
| 2203 int Channel::StopPlayingFileAsMicrophone() | 2189 int Channel::StopPlayingFileAsMicrophone() |
| 2204 { | 2190 { |
| 2205 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2191 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2206 "Channel::StopPlayingFileAsMicrophone()"); | 2192 "Channel::StopPlayingFileAsMicrophone()"); |
| 2207 | 2193 |
| 2208 CriticalSectionScoped cs(&_fileCritSect); | 2194 rtc::CritScope cs(&_fileCritSect); |
| 2209 | 2195 |
| 2210 if (!channel_state_.Get().input_file_playing) | 2196 if (!channel_state_.Get().input_file_playing) |
| 2211 { | 2197 { |
| 2212 return 0; | 2198 return 0; |
| 2213 } | 2199 } |
| 2214 | 2200 |
| 2215 if (_inputFilePlayerPtr->StopPlayingFile() != 0) | 2201 if (_inputFilePlayerPtr->StopPlayingFile() != 0) |
| 2216 { | 2202 { |
| 2217 _engineStatisticsPtr->SetLastError( | 2203 _engineStatisticsPtr->SetLastError( |
| 2218 VE_STOP_RECORDING_FAILED, kTraceError, | 2204 VE_STOP_RECORDING_FAILED, kTraceError, |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2266 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) || | 2252 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) || |
| 2267 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0)) | 2253 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0)) |
| 2268 { | 2254 { |
| 2269 format = kFileFormatWavFile; | 2255 format = kFileFormatWavFile; |
| 2270 } | 2256 } |
| 2271 else | 2257 else |
| 2272 { | 2258 { |
| 2273 format = kFileFormatCompressedFile; | 2259 format = kFileFormatCompressedFile; |
| 2274 } | 2260 } |
| 2275 | 2261 |
| 2276 CriticalSectionScoped cs(&_fileCritSect); | 2262 rtc::CritScope cs(&_fileCritSect); |
| 2277 | 2263 |
| 2278 // Destroy the old instance | 2264 // Destroy the old instance |
| 2279 if (_outputFileRecorderPtr) | 2265 if (_outputFileRecorderPtr) |
| 2280 { | 2266 { |
| 2281 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | 2267 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); |
| 2282 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | 2268 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); |
| 2283 _outputFileRecorderPtr = NULL; | 2269 _outputFileRecorderPtr = NULL; |
| 2284 } | 2270 } |
| 2285 | 2271 |
| 2286 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder( | 2272 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder( |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2343 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) || | 2329 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) || |
| 2344 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0)) | 2330 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0)) |
| 2345 { | 2331 { |
| 2346 format = kFileFormatWavFile; | 2332 format = kFileFormatWavFile; |
| 2347 } | 2333 } |
| 2348 else | 2334 else |
| 2349 { | 2335 { |
| 2350 format = kFileFormatCompressedFile; | 2336 format = kFileFormatCompressedFile; |
| 2351 } | 2337 } |
| 2352 | 2338 |
| 2353 CriticalSectionScoped cs(&_fileCritSect); | 2339 rtc::CritScope cs(&_fileCritSect); |
| 2354 | 2340 |
| 2355 // Destroy the old instance | 2341 // Destroy the old instance |
| 2356 if (_outputFileRecorderPtr) | 2342 if (_outputFileRecorderPtr) |
| 2357 { | 2343 { |
| 2358 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | 2344 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); |
| 2359 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | 2345 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); |
| 2360 _outputFileRecorderPtr = NULL; | 2346 _outputFileRecorderPtr = NULL; |
| 2361 } | 2347 } |
| 2362 | 2348 |
| 2363 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder( | 2349 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder( |
| (...skipping 30 matching lines...) Expand all Loading... |
| 2394 "Channel::StopRecordingPlayout()"); | 2380 "Channel::StopRecordingPlayout()"); |
| 2395 | 2381 |
| 2396 if (!_outputFileRecording) | 2382 if (!_outputFileRecording) |
| 2397 { | 2383 { |
| 2398 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1), | 2384 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1), |
| 2399 "StopRecordingPlayout() isnot recording"); | 2385 "StopRecordingPlayout() isnot recording"); |
| 2400 return -1; | 2386 return -1; |
| 2401 } | 2387 } |
| 2402 | 2388 |
| 2403 | 2389 |
| 2404 CriticalSectionScoped cs(&_fileCritSect); | 2390 rtc::CritScope cs(&_fileCritSect); |
| 2405 | 2391 |
| 2406 if (_outputFileRecorderPtr->StopRecording() != 0) | 2392 if (_outputFileRecorderPtr->StopRecording() != 0) |
| 2407 { | 2393 { |
| 2408 _engineStatisticsPtr->SetLastError( | 2394 _engineStatisticsPtr->SetLastError( |
| 2409 VE_STOP_RECORDING_FAILED, kTraceError, | 2395 VE_STOP_RECORDING_FAILED, kTraceError, |
| 2410 "StopRecording() could not stop recording"); | 2396 "StopRecording() could not stop recording"); |
| 2411 return(-1); | 2397 return(-1); |
| 2412 } | 2398 } |
| 2413 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | 2399 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); |
| 2414 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | 2400 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); |
| 2415 _outputFileRecorderPtr = NULL; | 2401 _outputFileRecorderPtr = NULL; |
| 2416 _outputFileRecording = false; | 2402 _outputFileRecording = false; |
| 2417 | 2403 |
| 2418 return 0; | 2404 return 0; |
| 2419 } | 2405 } |
| 2420 | 2406 |
| 2421 void | 2407 void |
| 2422 Channel::SetMixWithMicStatus(bool mix) | 2408 Channel::SetMixWithMicStatus(bool mix) |
| 2423 { | 2409 { |
| 2424 CriticalSectionScoped cs(&_fileCritSect); | 2410 rtc::CritScope cs(&_fileCritSect); |
| 2425 _mixFileWithMicrophone=mix; | 2411 _mixFileWithMicrophone=mix; |
| 2426 } | 2412 } |
| 2427 | 2413 |
| 2428 int | 2414 int |
| 2429 Channel::GetSpeechOutputLevel(uint32_t& level) const | 2415 Channel::GetSpeechOutputLevel(uint32_t& level) const |
| 2430 { | 2416 { |
| 2431 int8_t currentLevel = _outputAudioLevel.Level(); | 2417 int8_t currentLevel = _outputAudioLevel.Level(); |
| 2432 level = static_cast<int32_t> (currentLevel); | 2418 level = static_cast<int32_t> (currentLevel); |
| 2433 return 0; | 2419 return 0; |
| 2434 } | 2420 } |
| 2435 | 2421 |
| 2436 int | 2422 int |
| 2437 Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const | 2423 Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const |
| 2438 { | 2424 { |
| 2439 int16_t currentLevel = _outputAudioLevel.LevelFullRange(); | 2425 int16_t currentLevel = _outputAudioLevel.LevelFullRange(); |
| 2440 level = static_cast<int32_t> (currentLevel); | 2426 level = static_cast<int32_t> (currentLevel); |
| 2441 return 0; | 2427 return 0; |
| 2442 } | 2428 } |
| 2443 | 2429 |
| 2444 int | 2430 int |
| 2445 Channel::SetMute(bool enable) | 2431 Channel::SetMute(bool enable) |
| 2446 { | 2432 { |
| 2447 CriticalSectionScoped cs(&volume_settings_critsect_); | 2433 rtc::CritScope cs(&volume_settings_critsect_); |
| 2448 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2434 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2449 "Channel::SetMute(enable=%d)", enable); | 2435 "Channel::SetMute(enable=%d)", enable); |
| 2450 _mute = enable; | 2436 _mute = enable; |
| 2451 return 0; | 2437 return 0; |
| 2452 } | 2438 } |
| 2453 | 2439 |
| 2454 bool | 2440 bool |
| 2455 Channel::Mute() const | 2441 Channel::Mute() const |
| 2456 { | 2442 { |
| 2457 CriticalSectionScoped cs(&volume_settings_critsect_); | 2443 rtc::CritScope cs(&volume_settings_critsect_); |
| 2458 return _mute; | 2444 return _mute; |
| 2459 } | 2445 } |
| 2460 | 2446 |
| 2461 int | 2447 int |
| 2462 Channel::SetOutputVolumePan(float left, float right) | 2448 Channel::SetOutputVolumePan(float left, float right) |
| 2463 { | 2449 { |
| 2464 CriticalSectionScoped cs(&volume_settings_critsect_); | 2450 rtc::CritScope cs(&volume_settings_critsect_); |
| 2465 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2451 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2466 "Channel::SetOutputVolumePan()"); | 2452 "Channel::SetOutputVolumePan()"); |
| 2467 _panLeft = left; | 2453 _panLeft = left; |
| 2468 _panRight = right; | 2454 _panRight = right; |
| 2469 return 0; | 2455 return 0; |
| 2470 } | 2456 } |
| 2471 | 2457 |
| 2472 int | 2458 int |
| 2473 Channel::GetOutputVolumePan(float& left, float& right) const | 2459 Channel::GetOutputVolumePan(float& left, float& right) const |
| 2474 { | 2460 { |
| 2475 CriticalSectionScoped cs(&volume_settings_critsect_); | 2461 rtc::CritScope cs(&volume_settings_critsect_); |
| 2476 left = _panLeft; | 2462 left = _panLeft; |
| 2477 right = _panRight; | 2463 right = _panRight; |
| 2478 return 0; | 2464 return 0; |
| 2479 } | 2465 } |
| 2480 | 2466 |
| 2481 int | 2467 int |
| 2482 Channel::SetChannelOutputVolumeScaling(float scaling) | 2468 Channel::SetChannelOutputVolumeScaling(float scaling) |
| 2483 { | 2469 { |
| 2484 CriticalSectionScoped cs(&volume_settings_critsect_); | 2470 rtc::CritScope cs(&volume_settings_critsect_); |
| 2485 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2471 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2486 "Channel::SetChannelOutputVolumeScaling()"); | 2472 "Channel::SetChannelOutputVolumeScaling()"); |
| 2487 _outputGain = scaling; | 2473 _outputGain = scaling; |
| 2488 return 0; | 2474 return 0; |
| 2489 } | 2475 } |
| 2490 | 2476 |
| 2491 int | 2477 int |
| 2492 Channel::GetChannelOutputVolumeScaling(float& scaling) const | 2478 Channel::GetChannelOutputVolumeScaling(float& scaling) const |
| 2493 { | 2479 { |
| 2494 CriticalSectionScoped cs(&volume_settings_critsect_); | 2480 rtc::CritScope cs(&volume_settings_critsect_); |
| 2495 scaling = _outputGain; | 2481 scaling = _outputGain; |
| 2496 return 0; | 2482 return 0; |
| 2497 } | 2483 } |
| 2498 | 2484 |
| 2499 int Channel::SendTelephoneEventOutband(unsigned char eventCode, | 2485 int Channel::SendTelephoneEventOutband(unsigned char eventCode, |
| 2500 int lengthMs, int attenuationDb, | 2486 int lengthMs, int attenuationDb, |
| 2501 bool playDtmfEvent) | 2487 bool playDtmfEvent) |
| 2502 { | 2488 { |
| 2503 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), | 2489 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), |
| 2504 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)", | 2490 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)", |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2594 "Channel::UpdateRxVadDetection() => vadDecision=%d", | 2580 "Channel::UpdateRxVadDetection() => vadDecision=%d", |
| 2595 vadDecision); | 2581 vadDecision); |
| 2596 return 0; | 2582 return 0; |
| 2597 } | 2583 } |
| 2598 | 2584 |
| 2599 int | 2585 int |
| 2600 Channel::RegisterRxVadObserver(VoERxVadCallback &observer) | 2586 Channel::RegisterRxVadObserver(VoERxVadCallback &observer) |
| 2601 { | 2587 { |
| 2602 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2588 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2603 "Channel::RegisterRxVadObserver()"); | 2589 "Channel::RegisterRxVadObserver()"); |
| 2604 CriticalSectionScoped cs(&_callbackCritSect); | 2590 rtc::CritScope cs(&_callbackCritSect); |
| 2605 | 2591 |
| 2606 if (_rxVadObserverPtr) | 2592 if (_rxVadObserverPtr) |
| 2607 { | 2593 { |
| 2608 _engineStatisticsPtr->SetLastError( | 2594 _engineStatisticsPtr->SetLastError( |
| 2609 VE_INVALID_OPERATION, kTraceError, | 2595 VE_INVALID_OPERATION, kTraceError, |
| 2610 "RegisterRxVadObserver() observer already enabled"); | 2596 "RegisterRxVadObserver() observer already enabled"); |
| 2611 return -1; | 2597 return -1; |
| 2612 } | 2598 } |
| 2613 _rxVadObserverPtr = &observer; | 2599 _rxVadObserverPtr = &observer; |
| 2614 _RxVadDetection = true; | 2600 _RxVadDetection = true; |
| 2615 return 0; | 2601 return 0; |
| 2616 } | 2602 } |
| 2617 | 2603 |
| 2618 int | 2604 int |
| 2619 Channel::DeRegisterRxVadObserver() | 2605 Channel::DeRegisterRxVadObserver() |
| 2620 { | 2606 { |
| 2621 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 2607 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 2622 "Channel::DeRegisterRxVadObserver()"); | 2608 "Channel::DeRegisterRxVadObserver()"); |
| 2623 CriticalSectionScoped cs(&_callbackCritSect); | 2609 rtc::CritScope cs(&_callbackCritSect); |
| 2624 | 2610 |
| 2625 if (!_rxVadObserverPtr) | 2611 if (!_rxVadObserverPtr) |
| 2626 { | 2612 { |
| 2627 _engineStatisticsPtr->SetLastError( | 2613 _engineStatisticsPtr->SetLastError( |
| 2628 VE_INVALID_OPERATION, kTraceWarning, | 2614 VE_INVALID_OPERATION, kTraceWarning, |
| 2629 "DeRegisterRxVadObserver() observer already disabled"); | 2615 "DeRegisterRxVadObserver() observer already disabled"); |
| 2630 return 0; | 2616 return 0; |
| 2631 } | 2617 } |
| 2632 _rxVadObserverPtr = NULL; | 2618 _rxVadObserverPtr = NULL; |
| 2633 _RxVadDetection = false; | 2619 _RxVadDetection = false; |
| (...skipping 619 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3253 " output will not be complete"); | 3239 " output will not be complete"); |
| 3254 } | 3240 } |
| 3255 | 3241 |
| 3256 stats.bytesSent = bytesSent; | 3242 stats.bytesSent = bytesSent; |
| 3257 stats.packetsSent = packetsSent; | 3243 stats.packetsSent = packetsSent; |
| 3258 stats.bytesReceived = bytesReceived; | 3244 stats.bytesReceived = bytesReceived; |
| 3259 stats.packetsReceived = packetsReceived; | 3245 stats.packetsReceived = packetsReceived; |
| 3260 | 3246 |
| 3261 // --- Timestamps | 3247 // --- Timestamps |
| 3262 { | 3248 { |
| 3263 CriticalSectionScoped lock(ts_stats_lock_.get()); | 3249 rtc::CritScope lock(&ts_stats_lock_); |
| 3264 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_; | 3250 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_; |
| 3265 } | 3251 } |
| 3266 return 0; | 3252 return 0; |
| 3267 } | 3253 } |
| 3268 | 3254 |
| 3269 int Channel::SetREDStatus(bool enable, int redPayloadtype) { | 3255 int Channel::SetREDStatus(bool enable, int redPayloadtype) { |
| 3270 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), | 3256 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), |
| 3271 "Channel::SetREDStatus()"); | 3257 "Channel::SetREDStatus()"); |
| 3272 | 3258 |
| 3273 if (enable) { | 3259 if (enable) { |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3394 MixOrReplaceAudioWithFile(mixingFrequency); | 3380 MixOrReplaceAudioWithFile(mixingFrequency); |
| 3395 } | 3381 } |
| 3396 | 3382 |
| 3397 bool is_muted = Mute(); // Cache locally as Mute() takes a lock. | 3383 bool is_muted = Mute(); // Cache locally as Mute() takes a lock. |
| 3398 if (is_muted) { | 3384 if (is_muted) { |
| 3399 AudioFrameOperations::Mute(_audioFrame); | 3385 AudioFrameOperations::Mute(_audioFrame); |
| 3400 } | 3386 } |
| 3401 | 3387 |
| 3402 if (channel_state_.Get().input_external_media) | 3388 if (channel_state_.Get().input_external_media) |
| 3403 { | 3389 { |
| 3404 CriticalSectionScoped cs(&_callbackCritSect); | 3390 rtc::CritScope cs(&_callbackCritSect); |
| 3405 const bool isStereo = (_audioFrame.num_channels_ == 2); | 3391 const bool isStereo = (_audioFrame.num_channels_ == 2); |
| 3406 if (_inputExternalMediaCallbackPtr) | 3392 if (_inputExternalMediaCallbackPtr) |
| 3407 { | 3393 { |
| 3408 _inputExternalMediaCallbackPtr->Process( | 3394 _inputExternalMediaCallbackPtr->Process( |
| 3409 _channelId, | 3395 _channelId, |
| 3410 kRecordingPerChannel, | 3396 kRecordingPerChannel, |
| 3411 (int16_t*)_audioFrame.data_, | 3397 (int16_t*)_audioFrame.data_, |
| 3412 _audioFrame.samples_per_channel_, | 3398 _audioFrame.samples_per_channel_, |
| 3413 _audioFrame.sample_rate_hz_, | 3399 _audioFrame.sample_rate_hz_, |
| 3414 isStereo); | 3400 isStereo); |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3458 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId), | 3444 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId), |
| 3459 "Channel::EncodeAndSend() ACM encoding failed"); | 3445 "Channel::EncodeAndSend() ACM encoding failed"); |
| 3460 return 0xFFFFFFFF; | 3446 return 0xFFFFFFFF; |
| 3461 } | 3447 } |
| 3462 | 3448 |
| 3463 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_); | 3449 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_); |
| 3464 return 0; | 3450 return 0; |
| 3465 } | 3451 } |
| 3466 | 3452 |
| 3467 void Channel::DisassociateSendChannel(int channel_id) { | 3453 void Channel::DisassociateSendChannel(int channel_id) { |
| 3468 CriticalSectionScoped lock(assoc_send_channel_lock_.get()); | 3454 rtc::CritScope lock(&assoc_send_channel_lock_); |
| 3469 Channel* channel = associate_send_channel_.channel(); | 3455 Channel* channel = associate_send_channel_.channel(); |
| 3470 if (channel && channel->ChannelId() == channel_id) { | 3456 if (channel && channel->ChannelId() == channel_id) { |
| 3471 // If this channel is associated with a send channel of the specified | 3457 // If this channel is associated with a send channel of the specified |
| 3472 // Channel ID, disassociate with it. | 3458 // Channel ID, disassociate with it. |
| 3473 ChannelOwner ref(NULL); | 3459 ChannelOwner ref(NULL); |
| 3474 associate_send_channel_ = ref; | 3460 associate_send_channel_ = ref; |
| 3475 } | 3461 } |
| 3476 } | 3462 } |
| 3477 | 3463 |
| 3478 int Channel::RegisterExternalMediaProcessing( | 3464 int Channel::RegisterExternalMediaProcessing( |
| 3479 ProcessingTypes type, | 3465 ProcessingTypes type, |
| 3480 VoEMediaProcess& processObject) | 3466 VoEMediaProcess& processObject) |
| 3481 { | 3467 { |
| 3482 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 3468 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 3483 "Channel::RegisterExternalMediaProcessing()"); | 3469 "Channel::RegisterExternalMediaProcessing()"); |
| 3484 | 3470 |
| 3485 CriticalSectionScoped cs(&_callbackCritSect); | 3471 rtc::CritScope cs(&_callbackCritSect); |
| 3486 | 3472 |
| 3487 if (kPlaybackPerChannel == type) | 3473 if (kPlaybackPerChannel == type) |
| 3488 { | 3474 { |
| 3489 if (_outputExternalMediaCallbackPtr) | 3475 if (_outputExternalMediaCallbackPtr) |
| 3490 { | 3476 { |
| 3491 _engineStatisticsPtr->SetLastError( | 3477 _engineStatisticsPtr->SetLastError( |
| 3492 VE_INVALID_OPERATION, kTraceError, | 3478 VE_INVALID_OPERATION, kTraceError, |
| 3493 "Channel::RegisterExternalMediaProcessing() " | 3479 "Channel::RegisterExternalMediaProcessing() " |
| 3494 "output external media already enabled"); | 3480 "output external media already enabled"); |
| 3495 return -1; | 3481 return -1; |
| (...skipping 15 matching lines...) Expand all Loading... |
| 3511 channel_state_.SetInputExternalMedia(true); | 3497 channel_state_.SetInputExternalMedia(true); |
| 3512 } | 3498 } |
| 3513 return 0; | 3499 return 0; |
| 3514 } | 3500 } |
| 3515 | 3501 |
| 3516 int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) | 3502 int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) |
| 3517 { | 3503 { |
| 3518 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), | 3504 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), |
| 3519 "Channel::DeRegisterExternalMediaProcessing()"); | 3505 "Channel::DeRegisterExternalMediaProcessing()"); |
| 3520 | 3506 |
| 3521 CriticalSectionScoped cs(&_callbackCritSect); | 3507 rtc::CritScope cs(&_callbackCritSect); |
| 3522 | 3508 |
| 3523 if (kPlaybackPerChannel == type) | 3509 if (kPlaybackPerChannel == type) |
| 3524 { | 3510 { |
| 3525 if (!_outputExternalMediaCallbackPtr) | 3511 if (!_outputExternalMediaCallbackPtr) |
| 3526 { | 3512 { |
| 3527 _engineStatisticsPtr->SetLastError( | 3513 _engineStatisticsPtr->SetLastError( |
| 3528 VE_INVALID_OPERATION, kTraceWarning, | 3514 VE_INVALID_OPERATION, kTraceWarning, |
| 3529 "Channel::DeRegisterExternalMediaProcessing() " | 3515 "Channel::DeRegisterExternalMediaProcessing() " |
| 3530 "output external media already disabled"); | 3516 "output external media already disabled"); |
| 3531 return 0; | 3517 return 0; |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3573 { | 3559 { |
| 3574 return audio_coding_->GetNetworkStatistics(&stats); | 3560 return audio_coding_->GetNetworkStatistics(&stats); |
| 3575 } | 3561 } |
| 3576 | 3562 |
| 3577 void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const { | 3563 void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const { |
| 3578 audio_coding_->GetDecodingCallStatistics(stats); | 3564 audio_coding_->GetDecodingCallStatistics(stats); |
| 3579 } | 3565 } |
| 3580 | 3566 |
| 3581 bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms, | 3567 bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms, |
| 3582 int* playout_buffer_delay_ms) const { | 3568 int* playout_buffer_delay_ms) const { |
| 3583 CriticalSectionScoped cs(video_sync_lock_.get()); | 3569 rtc::CritScope lock(&video_sync_lock_); |
| 3584 if (_average_jitter_buffer_delay_us == 0) { | 3570 if (_average_jitter_buffer_delay_us == 0) { |
| 3585 return false; | 3571 return false; |
| 3586 } | 3572 } |
| 3587 *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 + | 3573 *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 + |
| 3588 _recPacketDelayMs; | 3574 _recPacketDelayMs; |
| 3589 *playout_buffer_delay_ms = playout_delay_ms_; | 3575 *playout_buffer_delay_ms = playout_delay_ms_; |
| 3590 return true; | 3576 return true; |
| 3591 } | 3577 } |
| 3592 | 3578 |
| 3593 uint32_t Channel::GetDelayEstimate() const { | 3579 uint32_t Channel::GetDelayEstimate() const { |
| (...skipping 26 matching lines...) Expand all Loading... |
| 3620 VE_AUDIO_CODING_MODULE_ERROR, kTraceError, | 3606 VE_AUDIO_CODING_MODULE_ERROR, kTraceError, |
| 3621 "SetMinimumPlayoutDelay() failed to set min playout delay"); | 3607 "SetMinimumPlayoutDelay() failed to set min playout delay"); |
| 3622 return -1; | 3608 return -1; |
| 3623 } | 3609 } |
| 3624 return 0; | 3610 return 0; |
| 3625 } | 3611 } |
| 3626 | 3612 |
| 3627 int Channel::GetPlayoutTimestamp(unsigned int& timestamp) { | 3613 int Channel::GetPlayoutTimestamp(unsigned int& timestamp) { |
| 3628 uint32_t playout_timestamp_rtp = 0; | 3614 uint32_t playout_timestamp_rtp = 0; |
| 3629 { | 3615 { |
| 3630 CriticalSectionScoped cs(video_sync_lock_.get()); | 3616 rtc::CritScope lock(&video_sync_lock_); |
| 3631 playout_timestamp_rtp = playout_timestamp_rtp_; | 3617 playout_timestamp_rtp = playout_timestamp_rtp_; |
| 3632 } | 3618 } |
| 3633 if (playout_timestamp_rtp == 0) { | 3619 if (playout_timestamp_rtp == 0) { |
| 3634 _engineStatisticsPtr->SetLastError( | 3620 _engineStatisticsPtr->SetLastError( |
| 3635 VE_CANNOT_RETRIEVE_VALUE, kTraceError, | 3621 VE_CANNOT_RETRIEVE_VALUE, kTraceError, |
| 3636 "GetPlayoutTimestamp() failed to retrieve timestamp"); | 3622 "GetPlayoutTimestamp() failed to retrieve timestamp"); |
| 3637 return -1; | 3623 return -1; |
| 3638 } | 3624 } |
| 3639 timestamp = playout_timestamp_rtp; | 3625 timestamp = playout_timestamp_rtp; |
| 3640 return 0; | 3626 return 0; |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3674 | 3660 |
| 3675 // TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use | 3661 // TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use |
| 3676 // a shared helper. | 3662 // a shared helper. |
| 3677 int32_t | 3663 int32_t |
| 3678 Channel::MixOrReplaceAudioWithFile(int mixingFrequency) | 3664 Channel::MixOrReplaceAudioWithFile(int mixingFrequency) |
| 3679 { | 3665 { |
| 3680 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]); | 3666 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]); |
| 3681 size_t fileSamples(0); | 3667 size_t fileSamples(0); |
| 3682 | 3668 |
| 3683 { | 3669 { |
| 3684 CriticalSectionScoped cs(&_fileCritSect); | 3670 rtc::CritScope cs(&_fileCritSect); |
| 3685 | 3671 |
| 3686 if (_inputFilePlayerPtr == NULL) | 3672 if (_inputFilePlayerPtr == NULL) |
| 3687 { | 3673 { |
| 3688 WEBRTC_TRACE(kTraceWarning, kTraceVoice, | 3674 WEBRTC_TRACE(kTraceWarning, kTraceVoice, |
| 3689 VoEId(_instanceId, _channelId), | 3675 VoEId(_instanceId, _channelId), |
| 3690 "Channel::MixOrReplaceAudioWithFile() fileplayer" | 3676 "Channel::MixOrReplaceAudioWithFile() fileplayer" |
| 3691 " doesnt exist"); | 3677 " doesnt exist"); |
| 3692 return -1; | 3678 return -1; |
| 3693 } | 3679 } |
| 3694 | 3680 |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3744 int32_t | 3730 int32_t |
| 3745 Channel::MixAudioWithFile(AudioFrame& audioFrame, | 3731 Channel::MixAudioWithFile(AudioFrame& audioFrame, |
| 3746 int mixingFrequency) | 3732 int mixingFrequency) |
| 3747 { | 3733 { |
| 3748 assert(mixingFrequency <= 48000); | 3734 assert(mixingFrequency <= 48000); |
| 3749 | 3735 |
| 3750 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[960]); | 3736 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[960]); |
| 3751 size_t fileSamples(0); | 3737 size_t fileSamples(0); |
| 3752 | 3738 |
| 3753 { | 3739 { |
| 3754 CriticalSectionScoped cs(&_fileCritSect); | 3740 rtc::CritScope cs(&_fileCritSect); |
| 3755 | 3741 |
| 3756 if (_outputFilePlayerPtr == NULL) | 3742 if (_outputFilePlayerPtr == NULL) |
| 3757 { | 3743 { |
| 3758 WEBRTC_TRACE(kTraceWarning, kTraceVoice, | 3744 WEBRTC_TRACE(kTraceWarning, kTraceVoice, |
| 3759 VoEId(_instanceId, _channelId), | 3745 VoEId(_instanceId, _channelId), |
| 3760 "Channel::MixAudioWithFile() file mixing failed"); | 3746 "Channel::MixAudioWithFile() file mixing failed"); |
| 3761 return -1; | 3747 return -1; |
| 3762 } | 3748 } |
| 3763 | 3749 |
| 3764 // We should get the frequency we ask for. | 3750 // We should get the frequency we ask for. |
| (...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3893 jitter_buffer_playout_timestamp_ = playout_timestamp; | 3879 jitter_buffer_playout_timestamp_ = playout_timestamp; |
| 3894 | 3880 |
| 3895 // Remove the playout delay. | 3881 // Remove the playout delay. |
| 3896 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000)); | 3882 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000)); |
| 3897 | 3883 |
| 3898 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), | 3884 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), |
| 3899 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu", | 3885 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu", |
| 3900 playout_timestamp); | 3886 playout_timestamp); |
| 3901 | 3887 |
| 3902 { | 3888 { |
| 3903 CriticalSectionScoped cs(video_sync_lock_.get()); | 3889 rtc::CritScope lock(&video_sync_lock_); |
| 3904 if (rtcp) { | 3890 if (rtcp) { |
| 3905 playout_timestamp_rtcp_ = playout_timestamp; | 3891 playout_timestamp_rtcp_ = playout_timestamp; |
| 3906 } else { | 3892 } else { |
| 3907 playout_timestamp_rtp_ = playout_timestamp; | 3893 playout_timestamp_rtp_ = playout_timestamp; |
| 3908 } | 3894 } |
| 3909 playout_delay_ms_ = delay_ms; | 3895 playout_delay_ms_ = delay_ms; |
| 3910 } | 3896 } |
| 3911 } | 3897 } |
| 3912 | 3898 |
| 3913 // Called for incoming RTP packets after successful RTP header parsing. | 3899 // Called for incoming RTP packets after successful RTP header parsing. |
| (...skipping 20 matching lines...) Expand all Loading... |
| 3934 } | 3920 } |
| 3935 | 3921 |
| 3936 uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) / | 3922 uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) / |
| 3937 (rtp_receive_frequency / 1000); | 3923 (rtp_receive_frequency / 1000); |
| 3938 | 3924 |
| 3939 _previousTimestamp = rtp_timestamp; | 3925 _previousTimestamp = rtp_timestamp; |
| 3940 | 3926 |
| 3941 if (timestamp_diff_ms == 0) return; | 3927 if (timestamp_diff_ms == 0) return; |
| 3942 | 3928 |
| 3943 { | 3929 { |
| 3944 CriticalSectionScoped cs(video_sync_lock_.get()); | 3930 rtc::CritScope lock(&video_sync_lock_); |
| 3945 | 3931 |
| 3946 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) { | 3932 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) { |
| 3947 _recPacketDelayMs = packet_delay_ms; | 3933 _recPacketDelayMs = packet_delay_ms; |
| 3948 } | 3934 } |
| 3949 | 3935 |
| 3950 if (_average_jitter_buffer_delay_us == 0) { | 3936 if (_average_jitter_buffer_delay_us == 0) { |
| 3951 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000; | 3937 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000; |
| 3952 return; | 3938 return; |
| 3953 } | 3939 } |
| 3954 | 3940 |
| (...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4078 RtcpMode method = _rtpRtcpModule->RTCP(); | 4064 RtcpMode method = _rtpRtcpModule->RTCP(); |
| 4079 if (method == RtcpMode::kOff) { | 4065 if (method == RtcpMode::kOff) { |
| 4080 return 0; | 4066 return 0; |
| 4081 } | 4067 } |
| 4082 std::vector<RTCPReportBlock> report_blocks; | 4068 std::vector<RTCPReportBlock> report_blocks; |
| 4083 _rtpRtcpModule->RemoteRTCPStat(&report_blocks); | 4069 _rtpRtcpModule->RemoteRTCPStat(&report_blocks); |
| 4084 | 4070 |
| 4085 int64_t rtt = 0; | 4071 int64_t rtt = 0; |
| 4086 if (report_blocks.empty()) { | 4072 if (report_blocks.empty()) { |
| 4087 if (allow_associate_channel) { | 4073 if (allow_associate_channel) { |
| 4088 CriticalSectionScoped lock(assoc_send_channel_lock_.get()); | 4074 rtc::CritScope lock(&assoc_send_channel_lock_); |
| 4089 Channel* channel = associate_send_channel_.channel(); | 4075 Channel* channel = associate_send_channel_.channel(); |
| 4090 // Tries to get RTT from an associated channel. This is important for | 4076 // Tries to get RTT from an associated channel. This is important for |
| 4091 // receive-only channels. | 4077 // receive-only channels. |
| 4092 if (channel) { | 4078 if (channel) { |
| 4093 // To prevent infinite recursion and deadlock, calling GetRTT of | 4079 // To prevent infinite recursion and deadlock, calling GetRTT of |
| 4094 // associate channel should always use "false" for argument: | 4080 // associate channel should always use "false" for argument: |
| 4095 // |allow_associate_channel|. | 4081 // |allow_associate_channel|. |
| 4096 rtt = channel->GetRTT(false); | 4082 rtt = channel->GetRTT(false); |
| 4097 } | 4083 } |
| 4098 } | 4084 } |
| (...skipping 19 matching lines...) Expand all Loading... |
| 4118 int64_t min_rtt = 0; | 4104 int64_t min_rtt = 0; |
| 4119 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) | 4105 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) |
| 4120 != 0) { | 4106 != 0) { |
| 4121 return 0; | 4107 return 0; |
| 4122 } | 4108 } |
| 4123 return rtt; | 4109 return rtt; |
| 4124 } | 4110 } |
| 4125 | 4111 |
| 4126 } // namespace voe | 4112 } // namespace voe |
| 4127 } // namespace webrtc | 4113 } // namespace webrtc |
| OLD | NEW |