Index: webrtc/voice_engine/channel.cc |
diff --git a/webrtc/voice_engine/channel.cc b/webrtc/voice_engine/channel.cc |
index 1d73db6d396a29ab1bc3ca12beee0c8ce1106ae0..eeaaa368f03219a3017a2b6334c9ef9fe52609a7 100644 |
--- a/webrtc/voice_engine/channel.cc |
+++ b/webrtc/voice_engine/channel.cc |
@@ -649,8 +649,10 @@ MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted( |
// own mixing/dynamic processing. |
rtc::CritScope cs(&_callbackCritSect); |
if (audio_sink_) { |
+ // TODO(yujo): switch to passing audioFrame->data() once |
+ // AudioSinkInterface::Data holds a const int16_t*. |
AudioSinkInterface::Data data( |
- &audioFrame->data_[0], audioFrame->samples_per_channel_, |
+ audioFrame->mutable_data(), audioFrame->samples_per_channel_, |
audioFrame->sample_rate_hz_, audioFrame->num_channels_, |
audioFrame->timestamp_); |
audio_sink_->OnData(data); |
@@ -2621,12 +2623,12 @@ uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) { |
if (_includeAudioLevelIndication) { |
size_t length = |
_audioFrame.samples_per_channel_ * _audioFrame.num_channels_; |
- RTC_CHECK_LE(length, sizeof(_audioFrame.data_)); |
+ RTC_CHECK_LE(length, AudioFrame::kMaxDataSizeBytes); |
if (is_muted && previous_frame_muted_) { |
rms_level_.AnalyzeMuted(length); |
} else { |
rms_level_.Analyze( |
- rtc::ArrayView<const int16_t>(_audioFrame.data_, length)); |
+ rtc::ArrayView<const int16_t>(_audioFrame.data(), length)); |
} |
} |
previous_frame_muted_ = is_muted; |
@@ -2803,8 +2805,8 @@ int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) { |
if (_mixFileWithMicrophone) { |
// Currently file stream is always mono. |
// TODO(xians): Change the code when FilePlayer supports real stereo. |
- MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(), |
- 1, fileSamples); |
+ MixWithSat(_audioFrame.mutable_data(), _audioFrame.num_channels_, |
+ fileBuffer.get(), 1, fileSamples); |
} else { |
// Replace ACM audio with file. |
// Currently file stream is always mono. |
@@ -2843,8 +2845,8 @@ int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) { |
if (audioFrame.samples_per_channel_ == fileSamples) { |
// Currently file stream is always mono. |
// TODO(xians): Change the code when FilePlayer supports real stereo. |
- MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1, |
- fileSamples); |
+ MixWithSat(audioFrame.mutable_data(), audioFrame.num_channels_, |
+ fileBuffer.get(), 1, fileSamples); |
} else { |
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId), |
"Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS |