Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 108 NewAudioConferenceMixerImpl::NewAudioConferenceMixerImpl(int id) | 108 NewAudioConferenceMixerImpl::NewAudioConferenceMixerImpl(int id) |
| 109 : _id(id), | 109 : _id(id), |
| 110 _minimumMixingFreq(kLowestPossible), | 110 _minimumMixingFreq(kLowestPossible), |
| 111 _outputFrequency(kDefaultFrequency), | 111 _outputFrequency(kDefaultFrequency), |
| 112 _sampleSize(0), | 112 _sampleSize(0), |
| 113 _audioFramePool(NULL), | 113 _audioFramePool(NULL), |
| 114 _participantList(), | 114 _participantList(), |
| 115 _additionalParticipantList(), | 115 _additionalParticipantList(), |
| 116 _numMixedParticipants(0), | 116 _numMixedParticipants(0), |
| 117 use_limiter_(true), | 117 use_limiter_(true), |
| 118 _timeStamp(0), | 118 _timeStamp(0) {} |
| 119 _timeScheduler(kProcessPeriodicityInMs), | |
| 120 _processCalls(0) {} | |
| 121 | 119 |
| 122 bool NewAudioConferenceMixerImpl::Init() { | 120 bool NewAudioConferenceMixerImpl::Init() { |
| 123 _crit.reset(CriticalSectionWrapper::CreateCriticalSection()); | 121 _crit.reset(CriticalSectionWrapper::CreateCriticalSection()); |
| 124 if (_crit.get() == NULL) | 122 if (_crit.get() == NULL) |
| 125 return false; | 123 return false; |
| 126 | 124 |
| 127 _cbCrit.reset(CriticalSectionWrapper::CreateCriticalSection()); | 125 _cbCrit.reset(CriticalSectionWrapper::CreateCriticalSection()); |
| 128 if (_cbCrit.get() == NULL) | 126 if (_cbCrit.get() == NULL) |
| 129 return false; | 127 return false; |
| 130 | 128 |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 163 return false; | 161 return false; |
| 164 | 162 |
| 165 return true; | 163 return true; |
| 166 } | 164 } |
| 167 | 165 |
| 168 NewAudioConferenceMixerImpl::~NewAudioConferenceMixerImpl() { | 166 NewAudioConferenceMixerImpl::~NewAudioConferenceMixerImpl() { |
| 169 MemoryPool<AudioFrame>::DeleteMemoryPool(_audioFramePool); | 167 MemoryPool<AudioFrame>::DeleteMemoryPool(_audioFramePool); |
| 170 assert(_audioFramePool == NULL); | 168 assert(_audioFramePool == NULL); |
| 171 } | 169 } |
| 172 | 170 |
| 173 // Process should be called every kProcessPeriodicityInMs ms | |
| 174 int64_t NewAudioConferenceMixerImpl::TimeUntilNextProcess() { | |
| 175 int64_t timeUntilNextProcess = 0; | |
| 176 CriticalSectionScoped cs(_crit.get()); | |
| 177 if (_timeScheduler.TimeToNextUpdate(timeUntilNextProcess) != 0) { | |
| 178 WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id, | |
| 179 "failed in TimeToNextUpdate() call"); | |
| 180 // Sanity check | |
| 181 assert(false); | |
| 182 return -1; | |
| 183 } | |
| 184 return timeUntilNextProcess; | |
| 185 } | |
| 186 | |
| 187 void NewAudioConferenceMixerImpl::Process() { | |
| 188 RTC_NOTREACHED(); | |
| 189 } | |
| 190 | |
| 191 void NewAudioConferenceMixerImpl::Mix(AudioFrame* audio_frame_for_mixing) { | 171 void NewAudioConferenceMixerImpl::Mix(AudioFrame* audio_frame_for_mixing) { |
| 192 size_t remainingParticipantsAllowedToMix = kMaximumAmountOfMixedParticipants; | 172 size_t remainingParticipantsAllowedToMix = kMaximumAmountOfMixedParticipants; |
| 193 { | |
| 194 CriticalSectionScoped cs(_crit.get()); | |
| 195 assert(_processCalls == 0); | |
|
ivoc
2016/07/04 13:11:52
It seems like this code checks that this function
aleloi
2016/07/06 10:20:32
Wow! This was important and I missed it. I thought
ivoc
2016/07/06 14:43:26
Although I agree that it's good to check for this,
aleloi
2016/07/06 15:17:19
Good idea! Done.
| |
| 196 _processCalls++; | |
| 197 | |
| 198 // Let the scheduler know that we are running one iteration. | |
| 199 _timeScheduler.UpdateScheduler(); | |
| 200 } | |
| 201 | 173 |
| 202 AudioFrameList mixList; | 174 AudioFrameList mixList; |
| 203 AudioFrameList rampOutList; | 175 AudioFrameList rampOutList; |
| 204 AudioFrameList additionalFramesList; | 176 AudioFrameList additionalFramesList; |
| 205 std::map<int, MixerAudioSource*> mixedParticipantsMap; | 177 std::map<int, MixerAudioSource*> mixedParticipantsMap; |
| 206 { | 178 { |
| 207 CriticalSectionScoped cs(_cbCrit.get()); | 179 CriticalSectionScoped cs(_cbCrit.get()); |
| 208 | 180 |
| 209 int32_t lowFreq = GetLowestMixingFrequency(); | 181 int32_t lowFreq = GetLowestMixingFrequency(); |
| 210 // SILK can run in 12 kHz and 24 kHz. These frequencies are not | 182 // SILK can run in 12 kHz and 24 kHz. These frequencies are not |
| 211 // supported so use the closest higher frequency to not lose any | 183 // supported so use the closest higher frequency to not lose any |
| 212 // information. | 184 // information. |
| 213 // TODO(henrike): this is probably more appropriate to do in | 185 // TODO(henrike): this is probably more appropriate to do in |
| 214 // GetLowestMixingFrequency(). | 186 // GetLowestMixingFrequency(). |
| 215 if (lowFreq == 12000) { | 187 if (lowFreq == 12000) { |
| 216 lowFreq = 16000; | 188 lowFreq = 16000; |
| 217 } else if (lowFreq == 24000) { | 189 } else if (lowFreq == 24000) { |
| 218 lowFreq = 32000; | 190 lowFreq = 32000; |
| 219 } | 191 } |
| 220 if (lowFreq <= 0) { | 192 if (lowFreq <= 0) { |
| 221 CriticalSectionScoped cs(_crit.get()); | 193 CriticalSectionScoped cs(_crit.get()); |
| 222 _processCalls--; | |
| 223 return; | 194 return; |
| 224 } else { | 195 } else { |
| 225 switch (lowFreq) { | 196 switch (lowFreq) { |
| 226 case 8000: | 197 case 8000: |
| 227 if (OutputFrequency() != kNbInHz) { | 198 if (OutputFrequency() != kNbInHz) { |
| 228 SetOutputFrequency(kNbInHz); | 199 SetOutputFrequency(kNbInHz); |
| 229 } | 200 } |
| 230 break; | 201 break; |
| 231 case 16000: | 202 case 16000: |
| 232 if (OutputFrequency() != kWbInHz) { | 203 if (OutputFrequency() != kWbInHz) { |
| 233 SetOutputFrequency(kWbInHz); | 204 SetOutputFrequency(kWbInHz); |
| 234 } | 205 } |
| 235 break; | 206 break; |
| 236 case 32000: | 207 case 32000: |
| 237 if (OutputFrequency() != kSwbInHz) { | 208 if (OutputFrequency() != kSwbInHz) { |
| 238 SetOutputFrequency(kSwbInHz); | 209 SetOutputFrequency(kSwbInHz); |
| 239 } | 210 } |
| 240 break; | 211 break; |
| 241 case 48000: | 212 case 48000: |
| 242 if (OutputFrequency() != kFbInHz) { | 213 if (OutputFrequency() != kFbInHz) { |
| 243 SetOutputFrequency(kFbInHz); | 214 SetOutputFrequency(kFbInHz); |
| 244 } | 215 } |
| 245 break; | 216 break; |
| 246 default: | 217 default: |
| 247 assert(false); | 218 assert(false); |
| 248 | 219 |
| 249 CriticalSectionScoped cs(_crit.get()); | 220 CriticalSectionScoped cs(_crit.get()); |
| 250 _processCalls--; | |
| 251 return; | 221 return; |
| 252 } | 222 } |
| 253 } | 223 } |
| 254 | 224 |
| 255 UpdateToMix(&mixList, &rampOutList, &mixedParticipantsMap, | 225 UpdateToMix(&mixList, &rampOutList, &mixedParticipantsMap, |
| 256 &remainingParticipantsAllowedToMix); | 226 &remainingParticipantsAllowedToMix); |
| 257 | 227 |
| 258 GetAdditionalAudio(&additionalFramesList); | 228 GetAdditionalAudio(&additionalFramesList); |
| 259 UpdateMixedStatus(mixedParticipantsMap); | 229 UpdateMixedStatus(mixedParticipantsMap); |
| 260 } | 230 } |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 292 audio_frame_for_mixing->Mute(); | 262 audio_frame_for_mixing->Mute(); |
| 293 } else { | 263 } else { |
| 294 // Only call the limiter if we have something to mix. | 264 // Only call the limiter if we have something to mix. |
| 295 LimitMixedAudio(audio_frame_for_mixing); | 265 LimitMixedAudio(audio_frame_for_mixing); |
| 296 } | 266 } |
| 297 } | 267 } |
| 298 | 268 |
| 299 ClearAudioFrameList(&mixList); | 269 ClearAudioFrameList(&mixList); |
| 300 ClearAudioFrameList(&rampOutList); | 270 ClearAudioFrameList(&rampOutList); |
| 301 ClearAudioFrameList(&additionalFramesList); | 271 ClearAudioFrameList(&additionalFramesList); |
| 302 { | |
| 303 CriticalSectionScoped cs(_crit.get()); | |
| 304 _processCalls--; | |
| 305 } | |
| 306 return; | 272 return; |
| 307 } | 273 } |
| 308 | 274 |
| 309 int32_t NewAudioConferenceMixerImpl::SetOutputFrequency( | 275 int32_t NewAudioConferenceMixerImpl::SetOutputFrequency( |
| 310 const Frequency& frequency) { | 276 const Frequency& frequency) { |
| 311 CriticalSectionScoped cs(_crit.get()); | 277 CriticalSectionScoped cs(_crit.get()); |
| 312 | 278 |
| 313 _outputFrequency = frequency; | 279 _outputFrequency = frequency; |
| 314 _sampleSize = | 280 _sampleSize = |
| 315 static_cast<size_t>((_outputFrequency * kProcessPeriodicityInMs) / 1000); | 281 static_cast<size_t>((_outputFrequency * kProcessPeriodicityInMs) / 1000); |
| (...skipping 535 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 851 | 817 |
| 852 if (error != _limiter->kNoError) { | 818 if (error != _limiter->kNoError) { |
| 853 WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id, | 819 WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id, |
| 854 "Error from AudioProcessing: %d", error); | 820 "Error from AudioProcessing: %d", error); |
| 855 assert(false); | 821 assert(false); |
| 856 return false; | 822 return false; |
| 857 } | 823 } |
| 858 return true; | 824 return true; |
| 859 } | 825 } |
| 860 } // namespace webrtc | 826 } // namespace webrtc |
| OLD | NEW |