OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 631 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
642 | 642 |
643 ChannelState::State state = channel_state_.Get(); | 643 ChannelState::State state = channel_state_.Get(); |
644 | 644 |
645 { | 645 { |
646 // Pass the audio buffers to an optional sink callback, before applying | 646 // Pass the audio buffers to an optional sink callback, before applying |
647 // scaling/panning, as that applies to the mix operation. | 647 // scaling/panning, as that applies to the mix operation. |
648 // External recipients of the audio (e.g. via AudioTrack), will do their | 648 // External recipients of the audio (e.g. via AudioTrack), will do their |
649 // own mixing/dynamic processing. | 649 // own mixing/dynamic processing. |
650 rtc::CritScope cs(&_callbackCritSect); | 650 rtc::CritScope cs(&_callbackCritSect); |
651 if (audio_sink_) { | 651 if (audio_sink_) { |
| 652 // TODO(yujo): switch to passing audioFrame->data() once |
| 653 // AudioSinkInterface::Data holds a const int16_t*. |
652 AudioSinkInterface::Data data( | 654 AudioSinkInterface::Data data( |
653 &audioFrame->data_[0], audioFrame->samples_per_channel_, | 655 audioFrame->mutable_data(), audioFrame->samples_per_channel_, |
654 audioFrame->sample_rate_hz_, audioFrame->num_channels_, | 656 audioFrame->sample_rate_hz_, audioFrame->num_channels_, |
655 audioFrame->timestamp_); | 657 audioFrame->timestamp_); |
656 audio_sink_->OnData(data); | 658 audio_sink_->OnData(data); |
657 } | 659 } |
658 } | 660 } |
659 | 661 |
660 float output_gain = 1.0f; | 662 float output_gain = 1.0f; |
661 { | 663 { |
662 rtc::CritScope cs(&volume_settings_critsect_); | 664 rtc::CritScope cs(&volume_settings_critsect_); |
663 output_gain = _outputGain; | 665 output_gain = _outputGain; |
(...skipping 1950 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2614 if (channel_state_.Get().input_file_playing) { | 2616 if (channel_state_.Get().input_file_playing) { |
2615 MixOrReplaceAudioWithFile(mixingFrequency); | 2617 MixOrReplaceAudioWithFile(mixingFrequency); |
2616 } | 2618 } |
2617 | 2619 |
2618 bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock. | 2620 bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock. |
2619 AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted); | 2621 AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted); |
2620 | 2622 |
2621 if (_includeAudioLevelIndication) { | 2623 if (_includeAudioLevelIndication) { |
2622 size_t length = | 2624 size_t length = |
2623 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_; | 2625 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_; |
2624 RTC_CHECK_LE(length, sizeof(_audioFrame.data_)); | 2626 RTC_CHECK_LE(length, AudioFrame::kMaxDataSizeBytes); |
2625 if (is_muted && previous_frame_muted_) { | 2627 if (is_muted && previous_frame_muted_) { |
2626 rms_level_.AnalyzeMuted(length); | 2628 rms_level_.AnalyzeMuted(length); |
2627 } else { | 2629 } else { |
2628 rms_level_.Analyze( | 2630 rms_level_.Analyze( |
2629 rtc::ArrayView<const int16_t>(_audioFrame.data_, length)); | 2631 rtc::ArrayView<const int16_t>(_audioFrame.data(), length)); |
2630 } | 2632 } |
2631 } | 2633 } |
2632 previous_frame_muted_ = is_muted; | 2634 previous_frame_muted_ = is_muted; |
2633 | 2635 |
2634 return 0; | 2636 return 0; |
2635 } | 2637 } |
2636 | 2638 |
2637 uint32_t Channel::EncodeAndSend() { | 2639 uint32_t Channel::EncodeAndSend() { |
2638 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId), | 2640 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId), |
2639 "Channel::EncodeAndSend()"); | 2641 "Channel::EncodeAndSend()"); |
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2796 "Channel::MixOrReplaceAudioWithFile() file is ended"); | 2798 "Channel::MixOrReplaceAudioWithFile() file is ended"); |
2797 return 0; | 2799 return 0; |
2798 } | 2800 } |
2799 } | 2801 } |
2800 | 2802 |
2801 assert(_audioFrame.samples_per_channel_ == fileSamples); | 2803 assert(_audioFrame.samples_per_channel_ == fileSamples); |
2802 | 2804 |
2803 if (_mixFileWithMicrophone) { | 2805 if (_mixFileWithMicrophone) { |
2804 // Currently file stream is always mono. | 2806 // Currently file stream is always mono. |
2805 // TODO(xians): Change the code when FilePlayer supports real stereo. | 2807 // TODO(xians): Change the code when FilePlayer supports real stereo. |
2806 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(), | 2808 MixWithSat(_audioFrame.mutable_data(), _audioFrame.num_channels_, |
2807 1, fileSamples); | 2809 fileBuffer.get(), 1, fileSamples); |
2808 } else { | 2810 } else { |
2809 // Replace ACM audio with file. | 2811 // Replace ACM audio with file. |
2810 // Currently file stream is always mono. | 2812 // Currently file stream is always mono. |
2811 // TODO(xians): Change the code when FilePlayer supports real stereo. | 2813 // TODO(xians): Change the code when FilePlayer supports real stereo. |
2812 _audioFrame.UpdateFrame( | 2814 _audioFrame.UpdateFrame( |
2813 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency, | 2815 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency, |
2814 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1); | 2816 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1); |
2815 } | 2817 } |
2816 return 0; | 2818 return 0; |
2817 } | 2819 } |
(...skipping 18 matching lines...) Expand all Loading... |
2836 fileBuffer.get(), &fileSamples, mixingFrequency) == -1) { | 2838 fileBuffer.get(), &fileSamples, mixingFrequency) == -1) { |
2837 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId), | 2839 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId), |
2838 "Channel::MixAudioWithFile() file mixing failed"); | 2840 "Channel::MixAudioWithFile() file mixing failed"); |
2839 return -1; | 2841 return -1; |
2840 } | 2842 } |
2841 } | 2843 } |
2842 | 2844 |
2843 if (audioFrame.samples_per_channel_ == fileSamples) { | 2845 if (audioFrame.samples_per_channel_ == fileSamples) { |
2844 // Currently file stream is always mono. | 2846 // Currently file stream is always mono. |
2845 // TODO(xians): Change the code when FilePlayer supports real stereo. | 2847 // TODO(xians): Change the code when FilePlayer supports real stereo. |
2846 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1, | 2848 MixWithSat(audioFrame.mutable_data(), audioFrame.num_channels_, |
2847 fileSamples); | 2849 fileBuffer.get(), 1, fileSamples); |
2848 } else { | 2850 } else { |
2849 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId), | 2851 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId), |
2850 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS | 2852 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS |
2851 ") != " | 2853 ") != " |
2852 "fileSamples(%" PRIuS ")", | 2854 "fileSamples(%" PRIuS ")", |
2853 audioFrame.samples_per_channel_, fileSamples); | 2855 audioFrame.samples_per_channel_, fileSamples); |
2854 return -1; | 2856 return -1; |
2855 } | 2857 } |
2856 | 2858 |
2857 return 0; | 2859 return 0; |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2992 int64_t min_rtt = 0; | 2994 int64_t min_rtt = 0; |
2993 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) != | 2995 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) != |
2994 0) { | 2996 0) { |
2995 return 0; | 2997 return 0; |
2996 } | 2998 } |
2997 return rtt; | 2999 return rtt; |
2998 } | 3000 } |
2999 | 3001 |
3000 } // namespace voe | 3002 } // namespace voe |
3001 } // namespace webrtc | 3003 } // namespace webrtc |
OLD | NEW |