OLD | NEW |
---|---|
(Empty) | |
1 /* | |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #include "webrtc/modules/audio_mixer/audio_mixer.h" | |
12 | |
13 #include "webrtc/base/format_macros.h" | |
14 #include "webrtc/modules/audio_processing/include/audio_processing.h" | |
15 #include "webrtc/modules/utility/include/audio_frame_operations.h" | |
16 #include "webrtc/system_wrappers/include/file_wrapper.h" | |
17 #include "webrtc/system_wrappers/include/trace.h" | |
18 #include "webrtc/voice_engine/include/voe_external_media.h" | |
19 #include "webrtc/voice_engine/statistics.h" | |
20 #include "webrtc/voice_engine/utility.h" | |
21 | |
22 namespace webrtc { | |
23 namespace voe { | |
24 | |
25 void AudioMixer::NewMixedAudio(int32_t id, | |
26 const AudioFrame& generalAudioFrame, | |
27 const AudioFrame** uniqueAudioFrames, | |
28 uint32_t size) { | |
29 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
30 "AudioMixer::NewMixedAudio(id=%d, size=%u)", id, size); | |
31 | |
32 _audioFrame.CopyFrom(generalAudioFrame); | |
33 _audioFrame.id_ = id; | |
34 } | |
35 | |
36 void AudioMixer::PlayNotification(int32_t id, uint32_t durationMs) { | |
37 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
38 "AudioMixer::PlayNotification(id=%d, durationMs=%d)", id, | |
39 durationMs); | |
40 // Not implement yet | |
ivoc
2016/07/04 12:07:23
Not implemented yet. (see also below)
aleloi
2016/07/04 12:34:45
Acknowledged.
| |
41 } | |
42 | |
43 void AudioMixer::RecordNotification(int32_t id, uint32_t durationMs) { | |
44 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
45 "AudioMixer::RecordNotification(id=%d, durationMs=%d)", id, | |
46 durationMs); | |
47 | |
48 // Not implement yet | |
49 } | |
50 | |
51 void AudioMixer::PlayFileEnded(int32_t id) { | |
52 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
53 "AudioMixer::PlayFileEnded(id=%d)", id); | |
54 | |
55 // not needed | |
ivoc
2016/07/04 12:07:23
Not needed.
aleloi
2016/07/04 12:34:45
Acknowledged.
| |
56 } | |
57 | |
58 void AudioMixer::RecordFileEnded(int32_t id) { | |
59 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
60 "AudioMixer::RecordFileEnded(id=%d)", id); | |
61 assert(id == _instanceId); | |
62 | |
63 rtc::CritScope cs(&_fileCritSect); | |
64 _outputFileRecording = false; | |
65 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1), | |
66 "AudioMixer::RecordFileEnded() =>" | |
67 "output file recorder module is shutdown"); | |
68 } | |
69 | |
70 int32_t AudioMixer::Create(AudioMixer*& mixer, uint32_t instanceId) { | |
71 WEBRTC_TRACE(kTraceMemory, kTraceVoice, instanceId, | |
72 "AudioMixer::Create(instanceId=%d)", instanceId); | |
73 mixer = new AudioMixer(instanceId); | |
74 if (mixer == NULL) { | |
75 WEBRTC_TRACE(kTraceMemory, kTraceVoice, instanceId, | |
76 "AudioMixer::Create() unable to allocate memory for" | |
77 "mixer"); | |
78 return -1; | |
79 } | |
80 return 0; | |
81 } | |
82 | |
83 AudioMixer::AudioMixer(uint32_t instanceId) | |
84 : _mixerModule(*NewAudioConferenceMixer::Create(instanceId)), | |
85 _audioLevel(), | |
86 _instanceId(instanceId), | |
87 _externalMediaCallbackPtr(NULL), | |
88 _externalMedia(false), | |
89 _panLeft(1.0f), | |
90 _panRight(1.0f), | |
91 _mixingFrequencyHz(8000), | |
92 _outputFileRecorderPtr(NULL), | |
93 _outputFileRecording(false) { | |
94 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1), | |
95 "AudioMixer::AudioMixer() - ctor"); | |
96 | |
97 if (_mixerModule.RegisterMixedStreamCallback(this) == -1) { | |
98 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1), | |
99 "AudioMixer::AudioMixer() failed to register mixer" | |
100 "callbacks"); | |
101 } | |
102 } | |
103 | |
104 void AudioMixer::Destroy(AudioMixer*& mixer) { | |
105 if (mixer) { | |
106 delete mixer; | |
107 mixer = NULL; | |
108 } | |
109 } | |
110 | |
111 AudioMixer::~AudioMixer() { | |
112 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1), | |
113 "AudioMixer::~AudioMixer() - dtor"); | |
114 if (_externalMedia) { | |
115 DeRegisterExternalMediaProcessing(); | |
116 } | |
117 { | |
118 rtc::CritScope cs(&_fileCritSect); | |
119 if (_outputFileRecorderPtr) { | |
120 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | |
121 _outputFileRecorderPtr->StopRecording(); | |
122 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | |
123 _outputFileRecorderPtr = NULL; | |
124 } | |
125 } | |
126 _mixerModule.UnRegisterMixedStreamCallback(); | |
127 delete &_mixerModule; | |
128 } | |
129 | |
130 int32_t AudioMixer::SetEngineInformation(voe::Statistics& engineStatistics) { | |
131 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
132 "AudioMixer::SetEngineInformation()"); | |
133 _engineStatisticsPtr = &engineStatistics; | |
134 return 0; | |
135 } | |
136 | |
137 int32_t AudioMixer::SetAudioProcessingModule( | |
138 AudioProcessing* audioProcessingModule) { | |
139 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
140 "AudioMixer::SetAudioProcessingModule(" | |
141 "audioProcessingModule=0x%x)", | |
142 audioProcessingModule); | |
143 _audioProcessingModulePtr = audioProcessingModule; | |
144 return 0; | |
145 } | |
146 | |
147 int AudioMixer::RegisterExternalMediaProcessing( | |
148 VoEMediaProcess& proccess_object) { | |
149 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
150 "AudioMixer::RegisterExternalMediaProcessing()"); | |
151 | |
152 rtc::CritScope cs(&_callbackCritSect); | |
153 _externalMediaCallbackPtr = &proccess_object; | |
154 _externalMedia = true; | |
155 | |
156 return 0; | |
157 } | |
158 | |
159 int AudioMixer::DeRegisterExternalMediaProcessing() { | |
160 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
161 "AudioMixer::DeRegisterExternalMediaProcessing()"); | |
162 | |
163 rtc::CritScope cs(&_callbackCritSect); | |
164 _externalMedia = false; | |
165 _externalMediaCallbackPtr = NULL; | |
166 | |
167 return 0; | |
168 } | |
169 | |
170 int32_t AudioMixer::SetMixabilityStatus(MixerAudioSource& participant, | |
171 bool mixable) { | |
172 return _mixerModule.SetMixabilityStatus(&participant, mixable); | |
173 } | |
174 | |
175 int32_t AudioMixer::SetAnonymousMixabilityStatus(MixerAudioSource& participant, | |
176 bool mixable) { | |
177 return _mixerModule.SetAnonymousMixabilityStatus(&participant, mixable); | |
178 } | |
179 | |
180 int32_t AudioMixer::MixActiveChannels() { | |
181 _mixerModule.Process(); | |
182 return 0; | |
183 } | |
184 | |
185 int AudioMixer::GetSpeechOutputLevel(uint32_t& level) { | |
186 int8_t currentLevel = _audioLevel.Level(); | |
187 level = static_cast<uint32_t>(currentLevel); | |
188 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1), | |
189 "GetSpeechOutputLevel() => level=%u", level); | |
190 return 0; | |
191 } | |
192 | |
193 int AudioMixer::GetSpeechOutputLevelFullRange(uint32_t& level) { | |
194 int16_t currentLevel = _audioLevel.LevelFullRange(); | |
195 level = static_cast<uint32_t>(currentLevel); | |
196 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1), | |
197 "GetSpeechOutputLevelFullRange() => level=%u", level); | |
198 return 0; | |
199 } | |
200 | |
201 int AudioMixer::SetOutputVolumePan(float left, float right) { | |
202 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
203 "AudioMixer::SetOutputVolumePan()"); | |
204 _panLeft = left; | |
205 _panRight = right; | |
206 return 0; | |
207 } | |
208 | |
209 int AudioMixer::GetOutputVolumePan(float& left, float& right) { | |
210 left = _panLeft; | |
211 right = _panRight; | |
212 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1), | |
213 "GetOutputVolumePan() => left=%2.1f, right=%2.1f", left, right); | |
214 return 0; | |
215 } | |
216 | |
217 int AudioMixer::StartRecordingPlayout(const char* fileName, | |
218 const CodecInst* codecInst) { | |
219 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
220 "AudioMixer::StartRecordingPlayout(fileName=%s)", fileName); | |
221 | |
222 if (_outputFileRecording) { | |
223 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1), | |
224 "StartRecordingPlayout() is already recording"); | |
225 return 0; | |
226 } | |
227 | |
228 FileFormats format; | |
229 const uint32_t notificationTime(0); | |
230 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000}; | |
231 | |
232 if ((codecInst != NULL) && | |
233 ((codecInst->channels < 1) || (codecInst->channels > 2))) { | |
234 _engineStatisticsPtr->SetLastError( | |
235 VE_BAD_ARGUMENT, kTraceError, | |
236 "StartRecordingPlayout() invalid compression"); | |
237 return (-1); | |
238 } | |
239 if (codecInst == NULL) { | |
240 format = kFileFormatPcm16kHzFile; | |
241 codecInst = &dummyCodec; | |
242 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) || | |
243 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) || | |
244 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) { | |
245 format = kFileFormatWavFile; | |
246 } else { | |
247 format = kFileFormatCompressedFile; | |
248 } | |
249 | |
250 rtc::CritScope cs(&_fileCritSect); | |
251 | |
252 // Destroy the old instance | |
253 if (_outputFileRecorderPtr) { | |
254 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | |
255 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | |
256 _outputFileRecorderPtr = NULL; | |
257 } | |
258 | |
259 _outputFileRecorderPtr = | |
260 FileRecorder::CreateFileRecorder(_instanceId, (const FileFormats)format); | |
261 if (_outputFileRecorderPtr == NULL) { | |
262 _engineStatisticsPtr->SetLastError( | |
263 VE_INVALID_ARGUMENT, kTraceError, | |
264 "StartRecordingPlayout() fileRecorder format isnot correct"); | |
ivoc
2016/07/04 12:07:23
isnot -> is not (see also several times below)
aleloi
2016/07/04 12:34:45
Acknowledged.
| |
265 return -1; | |
266 } | |
267 | |
268 if (_outputFileRecorderPtr->StartRecordingAudioFile( | |
269 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) { | |
270 _engineStatisticsPtr->SetLastError( | |
271 VE_BAD_FILE, kTraceError, | |
272 "StartRecordingAudioFile() failed to start file recording"); | |
273 _outputFileRecorderPtr->StopRecording(); | |
274 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | |
275 _outputFileRecorderPtr = NULL; | |
276 return -1; | |
277 } | |
278 _outputFileRecorderPtr->RegisterModuleFileCallback(this); | |
279 _outputFileRecording = true; | |
280 | |
281 return 0; | |
282 } | |
283 | |
284 int AudioMixer::StartRecordingPlayout(OutStream* stream, | |
285 const CodecInst* codecInst) { | |
286 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
287 "AudioMixer::StartRecordingPlayout()"); | |
288 | |
289 if (_outputFileRecording) { | |
290 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1), | |
291 "StartRecordingPlayout() is already recording"); | |
292 return 0; | |
293 } | |
294 | |
295 FileFormats format; | |
296 const uint32_t notificationTime(0); | |
297 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000}; | |
298 | |
299 if (codecInst != NULL && codecInst->channels != 1) { | |
300 _engineStatisticsPtr->SetLastError( | |
301 VE_BAD_ARGUMENT, kTraceError, | |
302 "StartRecordingPlayout() invalid compression"); | |
303 return (-1); | |
304 } | |
305 if (codecInst == NULL) { | |
306 format = kFileFormatPcm16kHzFile; | |
307 codecInst = &dummyCodec; | |
308 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) || | |
309 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) || | |
310 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) { | |
311 format = kFileFormatWavFile; | |
312 } else { | |
313 format = kFileFormatCompressedFile; | |
314 } | |
315 | |
316 rtc::CritScope cs(&_fileCritSect); | |
317 | |
318 // Destroy the old instance | |
319 if (_outputFileRecorderPtr) { | |
320 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | |
321 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | |
322 _outputFileRecorderPtr = NULL; | |
323 } | |
324 | |
325 _outputFileRecorderPtr = | |
326 FileRecorder::CreateFileRecorder(_instanceId, (const FileFormats)format); | |
327 if (_outputFileRecorderPtr == NULL) { | |
328 _engineStatisticsPtr->SetLastError( | |
329 VE_INVALID_ARGUMENT, kTraceError, | |
330 "StartRecordingPlayout() fileRecorder format isnot correct"); | |
331 return -1; | |
332 } | |
333 | |
334 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst, | |
335 notificationTime) != 0) { | |
336 _engineStatisticsPtr->SetLastError( | |
337 VE_BAD_FILE, kTraceError, | |
338 "StartRecordingAudioFile() failed to start file recording"); | |
339 _outputFileRecorderPtr->StopRecording(); | |
340 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | |
341 _outputFileRecorderPtr = NULL; | |
342 return -1; | |
343 } | |
344 | |
345 _outputFileRecorderPtr->RegisterModuleFileCallback(this); | |
346 _outputFileRecording = true; | |
347 | |
348 return 0; | |
349 } | |
350 | |
351 int AudioMixer::StopRecordingPlayout() { | |
352 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), | |
353 "AudioMixer::StopRecordingPlayout()"); | |
354 | |
355 if (!_outputFileRecording) { | |
356 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1), | |
357 "StopRecordingPlayout() file isnot recording"); | |
358 return -1; | |
359 } | |
360 | |
361 rtc::CritScope cs(&_fileCritSect); | |
362 | |
363 if (_outputFileRecorderPtr->StopRecording() != 0) { | |
364 _engineStatisticsPtr->SetLastError( | |
365 VE_STOP_RECORDING_FAILED, kTraceError, | |
366 "StopRecording(), could not stop recording"); | |
367 return -1; | |
368 } | |
369 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); | |
370 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); | |
371 _outputFileRecorderPtr = NULL; | |
372 _outputFileRecording = false; | |
373 | |
374 return 0; | |
375 } | |
376 | |
377 int AudioMixer::GetMixedAudio(int sample_rate_hz, | |
378 size_t num_channels, | |
379 AudioFrame* frame) { | |
380 WEBRTC_TRACE( | |
381 kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
382 "AudioMixer::GetMixedAudio(sample_rate_hz=%d, num_channels=%" PRIuS ")", | |
383 sample_rate_hz, num_channels); | |
384 | |
385 // --- Record playout if enabled | |
386 { | |
387 rtc::CritScope cs(&_fileCritSect); | |
388 if (_outputFileRecording && _outputFileRecorderPtr) | |
389 _outputFileRecorderPtr->RecordAudioToFile(_audioFrame); | |
390 } | |
391 | |
392 frame->num_channels_ = num_channels; | |
393 frame->sample_rate_hz_ = sample_rate_hz; | |
394 // TODO(andrew): Ideally the downmixing would occur much earlier, in | |
395 // AudioCodingModule. | |
396 RemixAndResample(_audioFrame, &resampler_, frame); | |
397 return 0; | |
398 } | |
399 | |
400 int32_t AudioMixer::DoOperationsOnCombinedSignal(bool feed_data_to_apm) { | |
401 if (_audioFrame.sample_rate_hz_ != _mixingFrequencyHz) { | |
402 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1), | |
403 "AudioMixer::DoOperationsOnCombinedSignal() => " | |
404 "mixing frequency = %d", | |
405 _audioFrame.sample_rate_hz_); | |
406 _mixingFrequencyHz = _audioFrame.sample_rate_hz_; | |
407 } | |
408 | |
409 // Scale left and/or right channel(s) if balance is active | |
410 if (_panLeft != 1.0 || _panRight != 1.0) { | |
411 if (_audioFrame.num_channels_ == 1) { | |
412 AudioFrameOperations::MonoToStereo(&_audioFrame); | |
413 } else { | |
414 // Pure stereo mode (we are receiving a stereo signal). | |
415 } | |
416 | |
417 assert(_audioFrame.num_channels_ == 2); | |
418 AudioFrameOperations::Scale(_panLeft, _panRight, _audioFrame); | |
419 } | |
420 | |
421 // --- Far-end Voice Quality Enhancement (AudioProcessing Module) | |
422 if (feed_data_to_apm) { | |
423 if (_audioProcessingModulePtr->ProcessReverseStream(&_audioFrame) != 0) { | |
424 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1), | |
425 "AudioProcessingModule::ProcessReverseStream() => error"); | |
426 RTC_DCHECK(false); | |
427 } | |
428 } | |
429 | |
430 // --- External media processing | |
431 { | |
432 rtc::CritScope cs(&_callbackCritSect); | |
433 if (_externalMedia) { | |
434 const bool is_stereo = (_audioFrame.num_channels_ == 2); | |
435 if (_externalMediaCallbackPtr) { | |
436 _externalMediaCallbackPtr->Process( | |
437 -1, kPlaybackAllChannelsMixed, (int16_t*)_audioFrame.data_, | |
438 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_, | |
439 is_stereo); | |
440 } | |
441 } | |
442 } | |
443 | |
444 // --- Measure audio level (0-9) for the combined signal | |
445 _audioLevel.ComputeLevel(_audioFrame); | |
446 | |
447 return 0; | |
448 } | |
449 } // namespace voe | |
450 } // namespace webrtc | |
OLD | NEW |