Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 /* | |
| 2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. | |
| 3 * | |
| 4 * Use of this source code is governed by a BSD-style license | |
| 5 * that can be found in the LICENSE file in the root of the source | |
| 6 * tree. An additional intellectual property rights grant can be found | |
| 7 * in the file PATENTS. All contributing project authors may | |
| 8 * be found in the AUTHORS file in the root of the source tree. | |
| 9 */ | |
| 10 | |
| 11 #include "webrtc/modules/audio_mixer/audio_mixer.h" | |
| 12 | |
| 13 #include "webrtc/base/format_macros.h" | |
| 14 #include "webrtc/modules/audio_processing/include/audio_processing.h" | |
| 15 #include "webrtc/modules/utility/include/audio_frame_operations.h" | |
| 16 #include "webrtc/system_wrappers/include/file_wrapper.h" | |
| 17 #include "webrtc/system_wrappers/include/trace.h" | |
| 18 #include "webrtc/voice_engine/include/voe_external_media.h" | |
| 19 #include "webrtc/voice_engine/statistics.h" | |
| 20 #include "webrtc/voice_engine/utility.h" | |
| 21 | |
| 22 namespace webrtc { | |
| 23 namespace voe { | |
| 24 | |
| 25 void AudioMixer::PlayNotification(int32_t id, uint32_t durationMs) { | |
| 26 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
| 27 "AudioMixer::PlayNotification(id=%d, durationMs=%d)", id, | |
| 28 durationMs); | |
| 29 // Not implement yet | |
| 30 } | |
| 31 | |
| 32 void AudioMixer::RecordNotification(int32_t id, uint32_t durationMs) { | |
| 33 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
| 34 "AudioMixer::RecordNotification(id=%d, durationMs=%d)", id, | |
| 35 durationMs); | |
| 36 | |
| 37 // Not implement yet | |
| 38 } | |
| 39 | |
| 40 void AudioMixer::PlayFileEnded(int32_t id) { | |
| 41 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
| 42 "AudioMixer::PlayFileEnded(id=%d)", id); | |
| 43 | |
| 44 // not needed | |
| 45 } | |
| 46 | |
| 47 void AudioMixer::RecordFileEnded(int32_t id) { | |
| 48 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
| 49 "AudioMixer::RecordFileEnded(id=%d)", id); | |
| 50 assert(id == _instanceId); | |
|
ivoc
2016/07/07 13:46:16
Please convert asserts to DCHECKs in this file as
| |
| 51 | |
| 52 rtc::CritScope cs(&_fileCritSect); | |
| 53 _outputFileRecording = false; | |
| 54 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1), | |
| 55 "AudioMixer::RecordFileEnded() =>" | |
| 56 "output file recorder module is shutdown"); | |
| 57 } | |
| 58 | |
| 59 int32_t AudioMixer::Create(AudioMixer*& mixer, uint32_t instanceId) { | |
| 60 WEBRTC_TRACE(kTraceMemory, kTraceVoice, instanceId, | |
| 61 "AudioMixer::Create(instanceId=%d)", instanceId); | |
| 62 mixer = new AudioMixer(instanceId); | |
| 63 if (mixer == NULL) { | |
| 64 WEBRTC_TRACE(kTraceMemory, kTraceVoice, instanceId, | |
| 65 "AudioMixer::Create() unable to allocate memory for" | |
| 66 "mixer"); | |
| 67 return -1; | |
| 68 } | |
| 69 return 0; | |
| 70 } | |
| 71 | |
| 72 AudioMixer::AudioMixer(uint32_t instanceId) | |
| 73 : _mixerModule(*NewAudioConferenceMixer::Create(instanceId)), | |
| 74 _audioLevel(), | |
| 75 _instanceId(instanceId), | |
| 76 _externalMediaCallbackPtr(NULL), | |
| 77 _externalMedia(false), | |
| 78 _panLeft(1.0f), | |
| 79 _panRight(1.0f), | |
| 80 _mixingFrequencyHz(8000), | |
| 81 _outputFileRecorderPtr(NULL), | |
| 82 _outputFileRecording(false) { | |
| 83 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1), | |
| 84 "AudioMixer::AudioMixer() - ctor"); | |
| 85 } | |
| 86 | |
| 87 void AudioMixer::Destroy(AudioMixer*& mixer) { | |
| 88 if (mixer) { | |
| 89 delete mixer; | |
| 90 mixer = NULL; | |
| 91 } | |
| 92 } | |
| 93 | |
| 94 AudioMixer::~AudioMixer() { | |
| 95 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1), | |
| 96 "AudioMixer::~AudioMixer() - dtor"); | |
| 97 if (_externalMedia) { | |
| 98 DeRegisterExternalMediaProcessing(); | |
| 99 } | |
| 100 { | |
| 101 rtc::CritScope cs(&_fileCritSect); | |
| 102 if (_outputFileRecorderPtr) { | |
| 103 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | |
| 104 _outputFileRecorderPtr->StopRecording(); | |
| 105 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | |
| 106 _outputFileRecorderPtr = NULL; | |
| 107 } | |
| 108 } | |
| 109 delete &_mixerModule; | |
| 110 } | |
| 111 | |
| 112 int32_t AudioMixer::SetEngineInformation(voe::Statistics& engineStatistics) { | |
| 113 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
| 114 "AudioMixer::SetEngineInformation()"); | |
| 115 _engineStatisticsPtr = &engineStatistics; | |
| 116 return 0; | |
| 117 } | |
| 118 | |
| 119 int32_t AudioMixer::SetAudioProcessingModule( | |
| 120 AudioProcessing* audioProcessingModule) { | |
| 121 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
| 122 "AudioMixer::SetAudioProcessingModule(" | |
| 123 "audioProcessingModule=0x%x)", | |
| 124 audioProcessingModule); | |
| 125 _audioProcessingModulePtr = audioProcessingModule; | |
| 126 return 0; | |
| 127 } | |
| 128 | |
| 129 int AudioMixer::RegisterExternalMediaProcessing( | |
| 130 VoEMediaProcess& proccess_object) { | |
| 131 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
| 132 "AudioMixer::RegisterExternalMediaProcessing()"); | |
| 133 | |
| 134 rtc::CritScope cs(&_callbackCritSect); | |
| 135 _externalMediaCallbackPtr = &proccess_object; | |
| 136 _externalMedia = true; | |
| 137 | |
| 138 return 0; | |
| 139 } | |
| 140 | |
| 141 int AudioMixer::DeRegisterExternalMediaProcessing() { | |
| 142 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
| 143 "AudioMixer::DeRegisterExternalMediaProcessing()"); | |
| 144 | |
| 145 rtc::CritScope cs(&_callbackCritSect); | |
| 146 _externalMedia = false; | |
| 147 _externalMediaCallbackPtr = NULL; | |
| 148 | |
| 149 return 0; | |
| 150 } | |
| 151 | |
| 152 int32_t AudioMixer::SetMixabilityStatus(MixerAudioSource& participant, | |
| 153 bool mixable) { | |
| 154 return _mixerModule.SetMixabilityStatus(&participant, mixable); | |
| 155 } | |
| 156 | |
| 157 int32_t AudioMixer::SetAnonymousMixabilityStatus(MixerAudioSource& participant, | |
| 158 bool mixable) { | |
| 159 return _mixerModule.SetAnonymousMixabilityStatus(&participant, mixable); | |
| 160 } | |
| 161 | |
| 162 int32_t AudioMixer::MixActiveChannels() { | |
| 163 _mixerModule.Mix(&_audioFrame); | |
| 164 return 0; | |
| 165 } | |
| 166 | |
| 167 int AudioMixer::GetSpeechOutputLevel(uint32_t& level) { | |
| 168 int8_t currentLevel = _audioLevel.Level(); | |
| 169 level = static_cast<uint32_t>(currentLevel); | |
| 170 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1), | |
| 171 "GetSpeechOutputLevel() => level=%u", level); | |
| 172 return 0; | |
| 173 } | |
| 174 | |
| 175 int AudioMixer::GetSpeechOutputLevelFullRange(uint32_t& level) { | |
| 176 int16_t currentLevel = _audioLevel.LevelFullRange(); | |
| 177 level = static_cast<uint32_t>(currentLevel); | |
| 178 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1), | |
| 179 "GetSpeechOutputLevelFullRange() => level=%u", level); | |
| 180 return 0; | |
| 181 } | |
| 182 | |
| 183 int AudioMixer::SetOutputVolumePan(float left, float right) { | |
| 184 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
| 185 "AudioMixer::SetOutputVolumePan()"); | |
| 186 _panLeft = left; | |
| 187 _panRight = right; | |
| 188 return 0; | |
| 189 } | |
| 190 | |
| 191 int AudioMixer::GetOutputVolumePan(float& left, float& right) { | |
| 192 left = _panLeft; | |
| 193 right = _panRight; | |
| 194 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1), | |
| 195 "GetOutputVolumePan() => left=%2.1f, right=%2.1f", left, right); | |
| 196 return 0; | |
| 197 } | |
| 198 | |
| 199 int AudioMixer::StartRecordingPlayout(const char* fileName, | |
| 200 const CodecInst* codecInst) { | |
| 201 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
| 202 "AudioMixer::StartRecordingPlayout(fileName=%s)", fileName); | |
| 203 | |
| 204 if (_outputFileRecording) { | |
| 205 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1), | |
| 206 "StartRecordingPlayout() is already recording"); | |
| 207 return 0; | |
| 208 } | |
| 209 | |
| 210 FileFormats format; | |
| 211 const uint32_t notificationTime(0); | |
| 212 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000}; | |
| 213 | |
| 214 if ((codecInst != NULL) && | |
| 215 ((codecInst->channels < 1) || (codecInst->channels > 2))) { | |
| 216 _engineStatisticsPtr->SetLastError( | |
| 217 VE_BAD_ARGUMENT, kTraceError, | |
| 218 "StartRecordingPlayout() invalid compression"); | |
| 219 return (-1); | |
| 220 } | |
| 221 if (codecInst == NULL) { | |
| 222 format = kFileFormatPcm16kHzFile; | |
| 223 codecInst = &dummyCodec; | |
| 224 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) || | |
| 225 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) || | |
| 226 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) { | |
| 227 format = kFileFormatWavFile; | |
| 228 } else { | |
| 229 format = kFileFormatCompressedFile; | |
| 230 } | |
| 231 | |
| 232 rtc::CritScope cs(&_fileCritSect); | |
| 233 | |
| 234 // Destroy the old instance | |
| 235 if (_outputFileRecorderPtr) { | |
| 236 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | |
| 237 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | |
| 238 _outputFileRecorderPtr = NULL; | |
| 239 } | |
| 240 | |
| 241 _outputFileRecorderPtr = | |
| 242 FileRecorder::CreateFileRecorder(_instanceId, (const FileFormats)format); | |
| 243 if (_outputFileRecorderPtr == NULL) { | |
| 244 _engineStatisticsPtr->SetLastError( | |
| 245 VE_INVALID_ARGUMENT, kTraceError, | |
| 246 "StartRecordingPlayout() fileRecorder format isnot correct"); | |
| 247 return -1; | |
| 248 } | |
| 249 | |
| 250 if (_outputFileRecorderPtr->StartRecordingAudioFile( | |
| 251 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) { | |
| 252 _engineStatisticsPtr->SetLastError( | |
| 253 VE_BAD_FILE, kTraceError, | |
| 254 "StartRecordingAudioFile() failed to start file recording"); | |
| 255 _outputFileRecorderPtr->StopRecording(); | |
| 256 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | |
| 257 _outputFileRecorderPtr = NULL; | |
| 258 return -1; | |
| 259 } | |
| 260 _outputFileRecorderPtr->RegisterModuleFileCallback(this); | |
| 261 _outputFileRecording = true; | |
| 262 | |
| 263 return 0; | |
| 264 } | |
| 265 | |
| 266 int AudioMixer::StartRecordingPlayout(OutStream* stream, | |
| 267 const CodecInst* codecInst) { | |
| 268 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
| 269 "AudioMixer::StartRecordingPlayout()"); | |
| 270 | |
| 271 if (_outputFileRecording) { | |
| 272 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1), | |
| 273 "StartRecordingPlayout() is already recording"); | |
| 274 return 0; | |
| 275 } | |
| 276 | |
| 277 FileFormats format; | |
| 278 const uint32_t notificationTime(0); | |
| 279 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000}; | |
| 280 | |
| 281 if (codecInst != NULL && codecInst->channels != 1) { | |
| 282 _engineStatisticsPtr->SetLastError( | |
| 283 VE_BAD_ARGUMENT, kTraceError, | |
| 284 "StartRecordingPlayout() invalid compression"); | |
| 285 return (-1); | |
| 286 } | |
| 287 if (codecInst == NULL) { | |
| 288 format = kFileFormatPcm16kHzFile; | |
| 289 codecInst = &dummyCodec; | |
| 290 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) || | |
| 291 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) || | |
| 292 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) { | |
| 293 format = kFileFormatWavFile; | |
| 294 } else { | |
| 295 format = kFileFormatCompressedFile; | |
| 296 } | |
| 297 | |
| 298 rtc::CritScope cs(&_fileCritSect); | |
| 299 | |
| 300 // Destroy the old instance | |
| 301 if (_outputFileRecorderPtr) { | |
| 302 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | |
| 303 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | |
| 304 _outputFileRecorderPtr = NULL; | |
| 305 } | |
| 306 | |
| 307 _outputFileRecorderPtr = | |
| 308 FileRecorder::CreateFileRecorder(_instanceId, (const FileFormats)format); | |
| 309 if (_outputFileRecorderPtr == NULL) { | |
| 310 _engineStatisticsPtr->SetLastError( | |
| 311 VE_INVALID_ARGUMENT, kTraceError, | |
| 312 "StartRecordingPlayout() fileRecorder format isnot correct"); | |
| 313 return -1; | |
| 314 } | |
| 315 | |
| 316 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst, | |
| 317 notificationTime) != 0) { | |
| 318 _engineStatisticsPtr->SetLastError( | |
| 319 VE_BAD_FILE, kTraceError, | |
| 320 "StartRecordingAudioFile() failed to start file recording"); | |
| 321 _outputFileRecorderPtr->StopRecording(); | |
| 322 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | |
| 323 _outputFileRecorderPtr = NULL; | |
| 324 return -1; | |
| 325 } | |
| 326 | |
| 327 _outputFileRecorderPtr->RegisterModuleFileCallback(this); | |
| 328 _outputFileRecording = true; | |
| 329 | |
| 330 return 0; | |
| 331 } | |
| 332 | |
| 333 int AudioMixer::StopRecordingPlayout() { | |
| 334 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
| 335 "AudioMixer::StopRecordingPlayout()"); | |
| 336 | |
| 337 if (!_outputFileRecording) { | |
| 338 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1), | |
| 339 "StopRecordingPlayout() file isnot recording"); | |
| 340 return -1; | |
| 341 } | |
| 342 | |
| 343 rtc::CritScope cs(&_fileCritSect); | |
| 344 | |
| 345 if (_outputFileRecorderPtr->StopRecording() != 0) { | |
| 346 _engineStatisticsPtr->SetLastError( | |
| 347 VE_STOP_RECORDING_FAILED, kTraceError, | |
| 348 "StopRecording(), could not stop recording"); | |
| 349 return -1; | |
| 350 } | |
| 351 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | |
| 352 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | |
| 353 _outputFileRecorderPtr = NULL; | |
| 354 _outputFileRecording = false; | |
| 355 | |
| 356 return 0; | |
| 357 } | |
| 358 | |
| 359 int AudioMixer::GetMixedAudio(int sample_rate_hz, | |
| 360 size_t num_channels, | |
| 361 AudioFrame* frame) { | |
| 362 WEBRTC_TRACE( | |
| 363 kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
| 364 "AudioMixer::GetMixedAudio(sample_rate_hz=%d, num_channels=%" PRIuS ")", | |
| 365 sample_rate_hz, num_channels); | |
| 366 | |
| 367 // --- Record playout if enabled | |
| 368 { | |
| 369 rtc::CritScope cs(&_fileCritSect); | |
| 370 if (_outputFileRecording && _outputFileRecorderPtr) | |
| 371 _outputFileRecorderPtr->RecordAudioToFile(_audioFrame); | |
| 372 } | |
| 373 | |
| 374 frame->num_channels_ = num_channels; | |
| 375 frame->sample_rate_hz_ = sample_rate_hz; | |
| 376 // TODO(andrew): Ideally the downmixing would occur much earlier, in | |
| 377 // AudioCodingModule. | |
| 378 RemixAndResample(_audioFrame, &resampler_, frame); | |
| 379 return 0; | |
| 380 } | |
| 381 | |
| 382 int32_t AudioMixer::DoOperationsOnCombinedSignal(bool feed_data_to_apm) { | |
| 383 if (_audioFrame.sample_rate_hz_ != _mixingFrequencyHz) { | |
| 384 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
| 385 "AudioMixer::DoOperationsOnCombinedSignal() => " | |
| 386 "mixing frequency = %d", | |
| 387 _audioFrame.sample_rate_hz_); | |
| 388 _mixingFrequencyHz = _audioFrame.sample_rate_hz_; | |
| 389 } | |
| 390 | |
| 391 // Scale left and/or right channel(s) if balance is active | |
| 392 if (_panLeft != 1.0 || _panRight != 1.0) { | |
| 393 if (_audioFrame.num_channels_ == 1) { | |
| 394 AudioFrameOperations::MonoToStereo(&_audioFrame); | |
| 395 } else { | |
| 396 // Pure stereo mode (we are receiving a stereo signal). | |
| 397 } | |
| 398 | |
| 399 assert(_audioFrame.num_channels_ == 2); | |
| 400 AudioFrameOperations::Scale(_panLeft, _panRight, _audioFrame); | |
| 401 } | |
| 402 | |
| 403 // --- Far-end Voice Quality Enhancement (AudioProcessing Module) | |
| 404 if (feed_data_to_apm) { | |
| 405 if (_audioProcessingModulePtr->ProcessReverseStream(&_audioFrame) != 0) { | |
| 406 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1), | |
| 407 "AudioProcessingModule::ProcessReverseStream() => error"); | |
| 408 RTC_DCHECK(false); | |
| 409 } | |
| 410 } | |
| 411 | |
| 412 // --- External media processing | |
| 413 { | |
| 414 rtc::CritScope cs(&_callbackCritSect); | |
| 415 if (_externalMedia) { | |
| 416 const bool is_stereo = (_audioFrame.num_channels_ == 2); | |
| 417 if (_externalMediaCallbackPtr) { | |
| 418 _externalMediaCallbackPtr->Process( | |
| 419 -1, kPlaybackAllChannelsMixed, | |
| 420 reinterpret_cast<int16_t*>(_audioFrame.data_), | |
| 421 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_, | |
| 422 is_stereo); | |
| 423 } | |
| 424 } | |
| 425 } | |
| 426 | |
| 427 // --- Measure audio level (0-9) for the combined signal | |
| 428 _audioLevel.ComputeLevel(_audioFrame); | |
| 429 | |
| 430 return 0; | |
| 431 } | |
| 432 } // namespace voe | |
| 433 } // namespace webrtc | |
| OLD | NEW |