| Index: webrtc/modules/audio_coding/neteq/neteq_impl.cc
|
| diff --git a/webrtc/modules/audio_coding/neteq/neteq_impl.cc b/webrtc/modules/audio_coding/neteq/neteq_impl.cc
|
| index fc74f2de8bcdffbd855071ea61e155a7f55eeae0..fce6f5acae74ccaaef81bbba587661db5d787cc4 100644
|
| --- a/webrtc/modules/audio_coding/neteq/neteq_impl.cc
|
| +++ b/webrtc/modules/audio_coding/neteq/neteq_impl.cc
|
| @@ -150,33 +150,33 @@ int NetEqImpl::InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
|
|
|
| namespace {
|
| void SetAudioFrameActivityAndType(bool vad_enabled,
|
| - NetEqOutputType type,
|
| + NetEqImpl::OutputType type,
|
| AudioFrame::VADActivity last_vad_activity,
|
| AudioFrame* audio_frame) {
|
| switch (type) {
|
| - case kOutputNormal: {
|
| + case NetEqImpl::kOutputNormal: {
|
| audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
|
| audio_frame->vad_activity_ = AudioFrame::kVadActive;
|
| break;
|
| }
|
| - case kOutputVADPassive: {
|
| + case NetEqImpl::kOutputVADPassive: {
|
| // This should only be reached if the VAD is enabled.
|
| RTC_DCHECK(vad_enabled);
|
| audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
|
| audio_frame->vad_activity_ = AudioFrame::kVadPassive;
|
| break;
|
| }
|
| - case kOutputCNG: {
|
| + case NetEqImpl::kOutputCNG: {
|
| audio_frame->speech_type_ = AudioFrame::kCNG;
|
| audio_frame->vad_activity_ = AudioFrame::kVadPassive;
|
| break;
|
| }
|
| - case kOutputPLC: {
|
| + case NetEqImpl::kOutputPLC: {
|
| audio_frame->speech_type_ = AudioFrame::kPLC;
|
| audio_frame->vad_activity_ = last_vad_activity;
|
| break;
|
| }
|
| - case kOutputPLCtoCNG: {
|
| + case NetEqImpl::kOutputPLCtoCNG: {
|
| audio_frame->speech_type_ = AudioFrame::kPLCCNG;
|
| audio_frame->vad_activity_ = AudioFrame::kVadPassive;
|
| break;
|
| @@ -191,7 +191,7 @@ void SetAudioFrameActivityAndType(bool vad_enabled,
|
| }
|
| }
|
|
|
| -int NetEqImpl::GetAudio(AudioFrame* audio_frame, NetEqOutputType* type) {
|
| +int NetEqImpl::GetAudio(AudioFrame* audio_frame) {
|
| TRACE_EVENT0("webrtc", "NetEqImpl::GetAudio");
|
| rtc::CritScope lock(&crit_sect_);
|
| int error = GetAudioInternal(audio_frame);
|
| @@ -202,9 +202,6 @@ int NetEqImpl::GetAudio(AudioFrame* audio_frame, NetEqOutputType* type) {
|
| error_code_ = error;
|
| return kFail;
|
| }
|
| - if (type) {
|
| - *type = LastOutputType();
|
| - }
|
| SetAudioFrameActivityAndType(vad_->enabled(), LastOutputType(),
|
| last_vad_activity_, audio_frame);
|
| last_vad_activity_ = audio_frame->vad_activity_;
|
| @@ -2068,7 +2065,7 @@ void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) {
|
| decision_logic_->SetSampleRate(fs_hz_, output_size_samples_);
|
| }
|
|
|
| -NetEqOutputType NetEqImpl::LastOutputType() {
|
| +NetEqImpl::OutputType NetEqImpl::LastOutputType() {
|
| assert(vad_.get());
|
| assert(expand_.get());
|
| if (last_mode_ == kModeCodecInternalCng || last_mode_ == kModeRfc3389Cng) {
|
|
|