OLD | NEW |
| (Empty) |
1 /* | |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #include <assert.h> | |
12 | |
13 #include "webrtc/base/format_macros.h" | |
14 #include "webrtc/modules/media_file/source/media_file_impl.h" | |
15 #include "webrtc/system_wrappers/include/critical_section_wrapper.h" | |
16 #include "webrtc/system_wrappers/include/file_wrapper.h" | |
17 #include "webrtc/system_wrappers/include/tick_util.h" | |
18 #include "webrtc/system_wrappers/include/trace.h" | |
19 | |
20 namespace webrtc { | |
21 MediaFile* MediaFile::CreateMediaFile(const int32_t id) | |
22 { | |
23 return new MediaFileImpl(id); | |
24 } | |
25 | |
26 void MediaFile::DestroyMediaFile(MediaFile* module) | |
27 { | |
28 delete static_cast<MediaFileImpl*>(module); | |
29 } | |
30 | |
31 MediaFileImpl::MediaFileImpl(const int32_t id) | |
32 : _id(id), | |
33 _crit(CriticalSectionWrapper::CreateCriticalSection()), | |
34 _callbackCrit(CriticalSectionWrapper::CreateCriticalSection()), | |
35 _ptrFileUtilityObj(NULL), | |
36 codec_info_(), | |
37 _ptrInStream(NULL), | |
38 _ptrOutStream(NULL), | |
39 _fileFormat((FileFormats)-1), | |
40 _recordDurationMs(0), | |
41 _playoutPositionMs(0), | |
42 _notificationMs(0), | |
43 _playingActive(false), | |
44 _recordingActive(false), | |
45 _isStereo(false), | |
46 _openFile(false), | |
47 _fileName(), | |
48 _ptrCallback(NULL) | |
49 { | |
50 WEBRTC_TRACE(kTraceMemory, kTraceFile, id, "Created"); | |
51 | |
52 codec_info_.plname[0] = '\0'; | |
53 _fileName[0] = '\0'; | |
54 } | |
55 | |
56 | |
57 MediaFileImpl::~MediaFileImpl() | |
58 { | |
59 WEBRTC_TRACE(kTraceMemory, kTraceFile, _id, "~MediaFileImpl()"); | |
60 { | |
61 CriticalSectionScoped lock(_crit); | |
62 | |
63 if(_playingActive) | |
64 { | |
65 StopPlaying(); | |
66 } | |
67 | |
68 if(_recordingActive) | |
69 { | |
70 StopRecording(); | |
71 } | |
72 | |
73 delete _ptrFileUtilityObj; | |
74 | |
75 if(_openFile) | |
76 { | |
77 delete _ptrInStream; | |
78 _ptrInStream = NULL; | |
79 delete _ptrOutStream; | |
80 _ptrOutStream = NULL; | |
81 } | |
82 } | |
83 | |
84 delete _crit; | |
85 delete _callbackCrit; | |
86 } | |
87 | |
88 int64_t MediaFileImpl::TimeUntilNextProcess() | |
89 { | |
90 WEBRTC_TRACE( | |
91 kTraceWarning, | |
92 kTraceFile, | |
93 _id, | |
94 "TimeUntilNextProcess: This method is not used by MediaFile class."); | |
95 return -1; | |
96 } | |
97 | |
98 int32_t MediaFileImpl::Process() | |
99 { | |
100 WEBRTC_TRACE(kTraceWarning, kTraceFile, _id, | |
101 "Process: This method is not used by MediaFile class."); | |
102 return -1; | |
103 } | |
104 | |
105 int32_t MediaFileImpl::PlayoutAudioData(int8_t* buffer, | |
106 size_t& dataLengthInBytes) | |
107 { | |
108 WEBRTC_TRACE(kTraceStream, kTraceFile, _id, | |
109 "MediaFileImpl::PlayoutData(buffer= 0x%x, bufLen= %" PRIuS ")", | |
110 buffer, dataLengthInBytes); | |
111 | |
112 const size_t bufferLengthInBytes = dataLengthInBytes; | |
113 dataLengthInBytes = 0; | |
114 | |
115 if(buffer == NULL || bufferLengthInBytes == 0) | |
116 { | |
117 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
118 "Buffer pointer or length is NULL!"); | |
119 return -1; | |
120 } | |
121 | |
122 int32_t bytesRead = 0; | |
123 { | |
124 CriticalSectionScoped lock(_crit); | |
125 | |
126 if(!_playingActive) | |
127 { | |
128 WEBRTC_TRACE(kTraceWarning, kTraceFile, _id, | |
129 "Not currently playing!"); | |
130 return -1; | |
131 } | |
132 | |
133 if(!_ptrFileUtilityObj) | |
134 { | |
135 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
136 "Playing, but no FileUtility object!"); | |
137 StopPlaying(); | |
138 return -1; | |
139 } | |
140 | |
141 switch(_fileFormat) | |
142 { | |
143 case kFileFormatPcm32kHzFile: | |
144 case kFileFormatPcm16kHzFile: | |
145 case kFileFormatPcm8kHzFile: | |
146 bytesRead = _ptrFileUtilityObj->ReadPCMData( | |
147 *_ptrInStream, | |
148 buffer, | |
149 bufferLengthInBytes); | |
150 break; | |
151 case kFileFormatCompressedFile: | |
152 bytesRead = _ptrFileUtilityObj->ReadCompressedData( | |
153 *_ptrInStream, | |
154 buffer, | |
155 bufferLengthInBytes); | |
156 break; | |
157 case kFileFormatWavFile: | |
158 bytesRead = _ptrFileUtilityObj->ReadWavDataAsMono( | |
159 *_ptrInStream, | |
160 buffer, | |
161 bufferLengthInBytes); | |
162 break; | |
163 case kFileFormatPreencodedFile: | |
164 bytesRead = _ptrFileUtilityObj->ReadPreEncodedData( | |
165 *_ptrInStream, | |
166 buffer, | |
167 bufferLengthInBytes); | |
168 if(bytesRead > 0) | |
169 { | |
170 dataLengthInBytes = static_cast<size_t>(bytesRead); | |
171 return 0; | |
172 } | |
173 break; | |
174 default: | |
175 { | |
176 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
177 "Invalid file format: %d", _fileFormat); | |
178 assert(false); | |
179 break; | |
180 } | |
181 } | |
182 | |
183 if( bytesRead > 0) | |
184 { | |
185 dataLengthInBytes = static_cast<size_t>(bytesRead); | |
186 } | |
187 } | |
188 HandlePlayCallbacks(bytesRead); | |
189 return 0; | |
190 } | |
191 | |
192 void MediaFileImpl::HandlePlayCallbacks(int32_t bytesRead) | |
193 { | |
194 bool playEnded = false; | |
195 uint32_t callbackNotifyMs = 0; | |
196 | |
197 if(bytesRead > 0) | |
198 { | |
199 // Check if it's time for PlayNotification(..). | |
200 _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs(); | |
201 if(_notificationMs) | |
202 { | |
203 if(_playoutPositionMs >= _notificationMs) | |
204 { | |
205 _notificationMs = 0; | |
206 callbackNotifyMs = _playoutPositionMs; | |
207 } | |
208 } | |
209 } | |
210 else | |
211 { | |
212 // If no bytes were read assume end of file. | |
213 StopPlaying(); | |
214 playEnded = true; | |
215 } | |
216 | |
217 // Only _callbackCrit may and should be taken when making callbacks. | |
218 CriticalSectionScoped lock(_callbackCrit); | |
219 if(_ptrCallback) | |
220 { | |
221 if(callbackNotifyMs) | |
222 { | |
223 _ptrCallback->PlayNotification(_id, callbackNotifyMs); | |
224 } | |
225 if(playEnded) | |
226 { | |
227 _ptrCallback->PlayFileEnded(_id); | |
228 } | |
229 } | |
230 } | |
231 | |
232 int32_t MediaFileImpl::PlayoutStereoData( | |
233 int8_t* bufferLeft, | |
234 int8_t* bufferRight, | |
235 size_t& dataLengthInBytes) | |
236 { | |
237 WEBRTC_TRACE(kTraceStream, kTraceFile, _id, | |
238 "MediaFileImpl::PlayoutStereoData(Left = 0x%x, Right = 0x%x," | |
239 " Len= %" PRIuS ")", | |
240 bufferLeft, | |
241 bufferRight, | |
242 dataLengthInBytes); | |
243 | |
244 const size_t bufferLengthInBytes = dataLengthInBytes; | |
245 dataLengthInBytes = 0; | |
246 | |
247 if(bufferLeft == NULL || bufferRight == NULL || bufferLengthInBytes == 0) | |
248 { | |
249 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
250 "A buffer pointer or the length is NULL!"); | |
251 return -1; | |
252 } | |
253 | |
254 bool playEnded = false; | |
255 uint32_t callbackNotifyMs = 0; | |
256 { | |
257 CriticalSectionScoped lock(_crit); | |
258 | |
259 if(!_playingActive || !_isStereo) | |
260 { | |
261 WEBRTC_TRACE(kTraceWarning, kTraceFile, _id, | |
262 "Not currently playing stereo!"); | |
263 return -1; | |
264 } | |
265 | |
266 if(!_ptrFileUtilityObj) | |
267 { | |
268 WEBRTC_TRACE( | |
269 kTraceError, | |
270 kTraceFile, | |
271 _id, | |
272 "Playing stereo, but the FileUtility objects is NULL!"); | |
273 StopPlaying(); | |
274 return -1; | |
275 } | |
276 | |
277 // Stereo playout only supported for WAV files. | |
278 int32_t bytesRead = 0; | |
279 switch(_fileFormat) | |
280 { | |
281 case kFileFormatWavFile: | |
282 bytesRead = _ptrFileUtilityObj->ReadWavDataAsStereo( | |
283 *_ptrInStream, | |
284 bufferLeft, | |
285 bufferRight, | |
286 bufferLengthInBytes); | |
287 break; | |
288 default: | |
289 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
290 "Trying to read non-WAV as stereo audio\ | |
291 (not supported)"); | |
292 break; | |
293 } | |
294 | |
295 if(bytesRead > 0) | |
296 { | |
297 dataLengthInBytes = static_cast<size_t>(bytesRead); | |
298 | |
299 // Check if it's time for PlayNotification(..). | |
300 _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs(); | |
301 if(_notificationMs) | |
302 { | |
303 if(_playoutPositionMs >= _notificationMs) | |
304 { | |
305 _notificationMs = 0; | |
306 callbackNotifyMs = _playoutPositionMs; | |
307 } | |
308 } | |
309 } | |
310 else | |
311 { | |
312 // If no bytes were read assume end of file. | |
313 StopPlaying(); | |
314 playEnded = true; | |
315 } | |
316 } | |
317 | |
318 CriticalSectionScoped lock(_callbackCrit); | |
319 if(_ptrCallback) | |
320 { | |
321 if(callbackNotifyMs) | |
322 { | |
323 _ptrCallback->PlayNotification(_id, callbackNotifyMs); | |
324 } | |
325 if(playEnded) | |
326 { | |
327 _ptrCallback->PlayFileEnded(_id); | |
328 } | |
329 } | |
330 return 0; | |
331 } | |
332 | |
333 int32_t MediaFileImpl::StartPlayingAudioFile( | |
334 const char* fileName, | |
335 const uint32_t notificationTimeMs, | |
336 const bool loop, | |
337 const FileFormats format, | |
338 const CodecInst* codecInst, | |
339 const uint32_t startPointMs, | |
340 const uint32_t stopPointMs) | |
341 { | |
342 if(!ValidFileName(fileName)) | |
343 { | |
344 return -1; | |
345 } | |
346 if(!ValidFileFormat(format,codecInst)) | |
347 { | |
348 return -1; | |
349 } | |
350 if(!ValidFilePositions(startPointMs,stopPointMs)) | |
351 { | |
352 return -1; | |
353 } | |
354 | |
355 // Check that the file will play longer than notificationTimeMs ms. | |
356 if((startPointMs && stopPointMs && !loop) && | |
357 (notificationTimeMs > (stopPointMs - startPointMs))) | |
358 { | |
359 WEBRTC_TRACE( | |
360 kTraceError, | |
361 kTraceFile, | |
362 _id, | |
363 "specified notification time is longer than amount of ms that will\ | |
364 be played"); | |
365 return -1; | |
366 } | |
367 | |
368 FileWrapper* inputStream = FileWrapper::Create(); | |
369 if(inputStream == NULL) | |
370 { | |
371 WEBRTC_TRACE(kTraceMemory, kTraceFile, _id, | |
372 "Failed to allocate input stream for file %s", fileName); | |
373 return -1; | |
374 } | |
375 | |
376 if(inputStream->OpenFile(fileName, true, loop) != 0) | |
377 { | |
378 delete inputStream; | |
379 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
380 "Could not open input file %s", fileName); | |
381 return -1; | |
382 } | |
383 | |
384 if(StartPlayingStream(*inputStream, loop, notificationTimeMs, | |
385 format, codecInst, startPointMs, stopPointMs) == -1) | |
386 { | |
387 inputStream->CloseFile(); | |
388 delete inputStream; | |
389 return -1; | |
390 } | |
391 | |
392 CriticalSectionScoped lock(_crit); | |
393 _openFile = true; | |
394 strncpy(_fileName, fileName, sizeof(_fileName)); | |
395 _fileName[sizeof(_fileName) - 1] = '\0'; | |
396 return 0; | |
397 } | |
398 | |
399 int32_t MediaFileImpl::StartPlayingAudioStream( | |
400 InStream& stream, | |
401 const uint32_t notificationTimeMs, | |
402 const FileFormats format, | |
403 const CodecInst* codecInst, | |
404 const uint32_t startPointMs, | |
405 const uint32_t stopPointMs) | |
406 { | |
407 return StartPlayingStream(stream, false, notificationTimeMs, format, | |
408 codecInst, startPointMs, stopPointMs); | |
409 } | |
410 | |
411 int32_t MediaFileImpl::StartPlayingStream( | |
412 InStream& stream, | |
413 bool loop, | |
414 const uint32_t notificationTimeMs, | |
415 const FileFormats format, | |
416 const CodecInst* codecInst, | |
417 const uint32_t startPointMs, | |
418 const uint32_t stopPointMs) | |
419 { | |
420 if(!ValidFileFormat(format,codecInst)) | |
421 { | |
422 return -1; | |
423 } | |
424 | |
425 if(!ValidFilePositions(startPointMs,stopPointMs)) | |
426 { | |
427 return -1; | |
428 } | |
429 | |
430 CriticalSectionScoped lock(_crit); | |
431 if(_playingActive || _recordingActive) | |
432 { | |
433 WEBRTC_TRACE( | |
434 kTraceError, | |
435 kTraceFile, | |
436 _id, | |
437 "StartPlaying called, but already playing or recording file %s", | |
438 (_fileName[0] == '\0') ? "(name not set)" : _fileName); | |
439 return -1; | |
440 } | |
441 | |
442 if(_ptrFileUtilityObj != NULL) | |
443 { | |
444 WEBRTC_TRACE(kTraceError, | |
445 kTraceFile, | |
446 _id, | |
447 "StartPlaying called, but FileUtilityObj already exists!"); | |
448 StopPlaying(); | |
449 return -1; | |
450 } | |
451 | |
452 _ptrFileUtilityObj = new ModuleFileUtility(_id); | |
453 if(_ptrFileUtilityObj == NULL) | |
454 { | |
455 WEBRTC_TRACE(kTraceMemory, kTraceFile, _id, | |
456 "Failed to create FileUtilityObj!"); | |
457 return -1; | |
458 } | |
459 | |
460 switch(format) | |
461 { | |
462 case kFileFormatWavFile: | |
463 { | |
464 if(_ptrFileUtilityObj->InitWavReading(stream, startPointMs, | |
465 stopPointMs) == -1) | |
466 { | |
467 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
468 "Not a valid WAV file!"); | |
469 StopPlaying(); | |
470 return -1; | |
471 } | |
472 _fileFormat = kFileFormatWavFile; | |
473 break; | |
474 } | |
475 case kFileFormatCompressedFile: | |
476 { | |
477 if(_ptrFileUtilityObj->InitCompressedReading(stream, startPointMs, | |
478 stopPointMs) == -1) | |
479 { | |
480 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
481 "Not a valid Compressed file!"); | |
482 StopPlaying(); | |
483 return -1; | |
484 } | |
485 _fileFormat = kFileFormatCompressedFile; | |
486 break; | |
487 } | |
488 case kFileFormatPcm8kHzFile: | |
489 case kFileFormatPcm16kHzFile: | |
490 case kFileFormatPcm32kHzFile: | |
491 { | |
492 // ValidFileFormat() called in the beginneing of this function | |
493 // prevents codecInst from being NULL here. | |
494 assert(codecInst != NULL); | |
495 if(!ValidFrequency(codecInst->plfreq) || | |
496 _ptrFileUtilityObj->InitPCMReading(stream, startPointMs, | |
497 stopPointMs, | |
498 codecInst->plfreq) == -1) | |
499 { | |
500 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
501 "Not a valid raw 8 or 16 KHz PCM file!"); | |
502 StopPlaying(); | |
503 return -1; | |
504 } | |
505 | |
506 _fileFormat = format; | |
507 break; | |
508 } | |
509 case kFileFormatPreencodedFile: | |
510 { | |
511 // ValidFileFormat() called in the beginneing of this function | |
512 // prevents codecInst from being NULL here. | |
513 assert(codecInst != NULL); | |
514 if(_ptrFileUtilityObj->InitPreEncodedReading(stream, *codecInst) == | |
515 -1) | |
516 { | |
517 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
518 "Not a valid PreEncoded file!"); | |
519 StopPlaying(); | |
520 return -1; | |
521 } | |
522 | |
523 _fileFormat = kFileFormatPreencodedFile; | |
524 break; | |
525 } | |
526 default: | |
527 { | |
528 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
529 "Invalid file format: %d", format); | |
530 assert(false); | |
531 break; | |
532 } | |
533 } | |
534 if(_ptrFileUtilityObj->codec_info(codec_info_) == -1) | |
535 { | |
536 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
537 "Failed to retrieve codec info!"); | |
538 StopPlaying(); | |
539 return -1; | |
540 } | |
541 | |
542 _isStereo = (codec_info_.channels == 2); | |
543 if(_isStereo && (_fileFormat != kFileFormatWavFile)) | |
544 { | |
545 WEBRTC_TRACE(kTraceWarning, kTraceFile, _id, | |
546 "Stereo is only allowed for WAV files"); | |
547 StopPlaying(); | |
548 return -1; | |
549 } | |
550 _playingActive = true; | |
551 _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs(); | |
552 _ptrInStream = &stream; | |
553 _notificationMs = notificationTimeMs; | |
554 | |
555 return 0; | |
556 } | |
557 | |
558 int32_t MediaFileImpl::StopPlaying() | |
559 { | |
560 | |
561 CriticalSectionScoped lock(_crit); | |
562 _isStereo = false; | |
563 if(_ptrFileUtilityObj) | |
564 { | |
565 delete _ptrFileUtilityObj; | |
566 _ptrFileUtilityObj = NULL; | |
567 } | |
568 if(_ptrInStream) | |
569 { | |
570 // If MediaFileImpl opened the InStream it must be reclaimed here. | |
571 if(_openFile) | |
572 { | |
573 delete _ptrInStream; | |
574 _openFile = false; | |
575 } | |
576 _ptrInStream = NULL; | |
577 } | |
578 | |
579 codec_info_.pltype = 0; | |
580 codec_info_.plname[0] = '\0'; | |
581 | |
582 if(!_playingActive) | |
583 { | |
584 WEBRTC_TRACE(kTraceWarning, kTraceFile, _id, | |
585 "playing is not active!"); | |
586 return -1; | |
587 } | |
588 | |
589 _playingActive = false; | |
590 return 0; | |
591 } | |
592 | |
593 bool MediaFileImpl::IsPlaying() | |
594 { | |
595 WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsPlaying()"); | |
596 CriticalSectionScoped lock(_crit); | |
597 return _playingActive; | |
598 } | |
599 | |
600 int32_t MediaFileImpl::IncomingAudioData( | |
601 const int8_t* buffer, | |
602 const size_t bufferLengthInBytes) | |
603 { | |
604 WEBRTC_TRACE(kTraceStream, kTraceFile, _id, | |
605 "MediaFile::IncomingData(buffer= 0x%x, bufLen= %" PRIuS, | |
606 buffer, bufferLengthInBytes); | |
607 | |
608 if(buffer == NULL || bufferLengthInBytes == 0) | |
609 { | |
610 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
611 "Buffer pointer or length is NULL!"); | |
612 return -1; | |
613 } | |
614 | |
615 bool recordingEnded = false; | |
616 uint32_t callbackNotifyMs = 0; | |
617 { | |
618 CriticalSectionScoped lock(_crit); | |
619 | |
620 if(!_recordingActive) | |
621 { | |
622 WEBRTC_TRACE(kTraceWarning, kTraceFile, _id, | |
623 "Not currently recording!"); | |
624 return -1; | |
625 } | |
626 if(_ptrOutStream == NULL) | |
627 { | |
628 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
629 "Recording is active, but output stream is NULL!"); | |
630 assert(false); | |
631 return -1; | |
632 } | |
633 | |
634 int32_t bytesWritten = 0; | |
635 uint32_t samplesWritten = codec_info_.pacsize; | |
636 if(_ptrFileUtilityObj) | |
637 { | |
638 switch(_fileFormat) | |
639 { | |
640 case kFileFormatPcm8kHzFile: | |
641 case kFileFormatPcm16kHzFile: | |
642 case kFileFormatPcm32kHzFile: | |
643 bytesWritten = _ptrFileUtilityObj->WritePCMData( | |
644 *_ptrOutStream, | |
645 buffer, | |
646 bufferLengthInBytes); | |
647 | |
648 // Sample size is 2 bytes. | |
649 if(bytesWritten > 0) | |
650 { | |
651 samplesWritten = bytesWritten/sizeof(int16_t); | |
652 } | |
653 break; | |
654 case kFileFormatCompressedFile: | |
655 bytesWritten = _ptrFileUtilityObj->WriteCompressedData( | |
656 *_ptrOutStream, buffer, bufferLengthInBytes); | |
657 break; | |
658 case kFileFormatWavFile: | |
659 bytesWritten = _ptrFileUtilityObj->WriteWavData( | |
660 *_ptrOutStream, | |
661 buffer, | |
662 bufferLengthInBytes); | |
663 if(bytesWritten > 0 && STR_NCASE_CMP(codec_info_.plname, | |
664 "L16", 4) == 0) | |
665 { | |
666 // Sample size is 2 bytes. | |
667 samplesWritten = bytesWritten/sizeof(int16_t); | |
668 } | |
669 break; | |
670 case kFileFormatPreencodedFile: | |
671 bytesWritten = _ptrFileUtilityObj->WritePreEncodedData( | |
672 *_ptrOutStream, buffer, bufferLengthInBytes); | |
673 break; | |
674 default: | |
675 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
676 "Invalid file format: %d", _fileFormat); | |
677 assert(false); | |
678 break; | |
679 } | |
680 } else { | |
681 // TODO (hellner): quick look at the code makes me think that this | |
682 // code is never executed. Remove? | |
683 if(_ptrOutStream) | |
684 { | |
685 if(_ptrOutStream->Write(buffer, bufferLengthInBytes)) | |
686 { | |
687 bytesWritten = static_cast<int32_t>(bufferLengthInBytes); | |
688 } | |
689 } | |
690 } | |
691 | |
692 _recordDurationMs += samplesWritten / (codec_info_.plfreq / 1000); | |
693 | |
694 // Check if it's time for RecordNotification(..). | |
695 if(_notificationMs) | |
696 { | |
697 if(_recordDurationMs >= _notificationMs) | |
698 { | |
699 _notificationMs = 0; | |
700 callbackNotifyMs = _recordDurationMs; | |
701 } | |
702 } | |
703 if(bytesWritten < (int32_t)bufferLengthInBytes) | |
704 { | |
705 WEBRTC_TRACE(kTraceWarning, kTraceFile, _id, | |
706 "Failed to write all requested bytes!"); | |
707 StopRecording(); | |
708 recordingEnded = true; | |
709 } | |
710 } | |
711 | |
712 // Only _callbackCrit may and should be taken when making callbacks. | |
713 CriticalSectionScoped lock(_callbackCrit); | |
714 if(_ptrCallback) | |
715 { | |
716 if(callbackNotifyMs) | |
717 { | |
718 _ptrCallback->RecordNotification(_id, callbackNotifyMs); | |
719 } | |
720 if(recordingEnded) | |
721 { | |
722 _ptrCallback->RecordFileEnded(_id); | |
723 return -1; | |
724 } | |
725 } | |
726 return 0; | |
727 } | |
728 | |
729 int32_t MediaFileImpl::StartRecordingAudioFile( | |
730 const char* fileName, | |
731 const FileFormats format, | |
732 const CodecInst& codecInst, | |
733 const uint32_t notificationTimeMs, | |
734 const uint32_t maxSizeBytes) | |
735 { | |
736 if(!ValidFileName(fileName)) | |
737 { | |
738 return -1; | |
739 } | |
740 if(!ValidFileFormat(format,&codecInst)) | |
741 { | |
742 return -1; | |
743 } | |
744 | |
745 FileWrapper* outputStream = FileWrapper::Create(); | |
746 if(outputStream == NULL) | |
747 { | |
748 WEBRTC_TRACE(kTraceMemory, kTraceFile, _id, | |
749 "Failed to allocate memory for output stream"); | |
750 return -1; | |
751 } | |
752 | |
753 if(outputStream->OpenFile(fileName, false) != 0) | |
754 { | |
755 delete outputStream; | |
756 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
757 "Could not open output file '%s' for writing!", | |
758 fileName); | |
759 return -1; | |
760 } | |
761 | |
762 if(maxSizeBytes) | |
763 { | |
764 outputStream->SetMaxFileSize(maxSizeBytes); | |
765 } | |
766 | |
767 if(StartRecordingAudioStream(*outputStream, format, codecInst, | |
768 notificationTimeMs) == -1) | |
769 { | |
770 outputStream->CloseFile(); | |
771 delete outputStream; | |
772 return -1; | |
773 } | |
774 | |
775 CriticalSectionScoped lock(_crit); | |
776 _openFile = true; | |
777 strncpy(_fileName, fileName, sizeof(_fileName)); | |
778 _fileName[sizeof(_fileName) - 1] = '\0'; | |
779 return 0; | |
780 } | |
781 | |
782 int32_t MediaFileImpl::StartRecordingAudioStream( | |
783 OutStream& stream, | |
784 const FileFormats format, | |
785 const CodecInst& codecInst, | |
786 const uint32_t notificationTimeMs) | |
787 { | |
788 // Check codec info | |
789 if(!ValidFileFormat(format,&codecInst)) | |
790 { | |
791 return -1; | |
792 } | |
793 | |
794 CriticalSectionScoped lock(_crit); | |
795 if(_recordingActive || _playingActive) | |
796 { | |
797 WEBRTC_TRACE( | |
798 kTraceError, | |
799 kTraceFile, | |
800 _id, | |
801 "StartRecording called, but already recording or playing file %s!", | |
802 _fileName); | |
803 return -1; | |
804 } | |
805 | |
806 if(_ptrFileUtilityObj != NULL) | |
807 { | |
808 WEBRTC_TRACE( | |
809 kTraceError, | |
810 kTraceFile, | |
811 _id, | |
812 "StartRecording called, but fileUtilityObj already exists!"); | |
813 StopRecording(); | |
814 return -1; | |
815 } | |
816 | |
817 _ptrFileUtilityObj = new ModuleFileUtility(_id); | |
818 if(_ptrFileUtilityObj == NULL) | |
819 { | |
820 WEBRTC_TRACE(kTraceMemory, kTraceFile, _id, | |
821 "Cannot allocate fileUtilityObj!"); | |
822 return -1; | |
823 } | |
824 | |
825 CodecInst tmpAudioCodec; | |
826 memcpy(&tmpAudioCodec, &codecInst, sizeof(CodecInst)); | |
827 switch(format) | |
828 { | |
829 case kFileFormatWavFile: | |
830 { | |
831 if(_ptrFileUtilityObj->InitWavWriting(stream, codecInst) == -1) | |
832 { | |
833 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
834 "Failed to initialize WAV file!"); | |
835 delete _ptrFileUtilityObj; | |
836 _ptrFileUtilityObj = NULL; | |
837 return -1; | |
838 } | |
839 _fileFormat = kFileFormatWavFile; | |
840 break; | |
841 } | |
842 case kFileFormatCompressedFile: | |
843 { | |
844 // Write compression codec name at beginning of file | |
845 if(_ptrFileUtilityObj->InitCompressedWriting(stream, codecInst) == | |
846 -1) | |
847 { | |
848 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
849 "Failed to initialize Compressed file!"); | |
850 delete _ptrFileUtilityObj; | |
851 _ptrFileUtilityObj = NULL; | |
852 return -1; | |
853 } | |
854 _fileFormat = kFileFormatCompressedFile; | |
855 break; | |
856 } | |
857 case kFileFormatPcm8kHzFile: | |
858 case kFileFormatPcm16kHzFile: | |
859 { | |
860 if(!ValidFrequency(codecInst.plfreq) || | |
861 _ptrFileUtilityObj->InitPCMWriting(stream, codecInst.plfreq) == | |
862 -1) | |
863 { | |
864 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
865 "Failed to initialize 8 or 16KHz PCM file!"); | |
866 delete _ptrFileUtilityObj; | |
867 _ptrFileUtilityObj = NULL; | |
868 return -1; | |
869 } | |
870 _fileFormat = format; | |
871 break; | |
872 } | |
873 case kFileFormatPreencodedFile: | |
874 { | |
875 if(_ptrFileUtilityObj->InitPreEncodedWriting(stream, codecInst) == | |
876 -1) | |
877 { | |
878 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
879 "Failed to initialize Pre-Encoded file!"); | |
880 delete _ptrFileUtilityObj; | |
881 _ptrFileUtilityObj = NULL; | |
882 return -1; | |
883 } | |
884 | |
885 _fileFormat = kFileFormatPreencodedFile; | |
886 break; | |
887 } | |
888 default: | |
889 { | |
890 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
891 "Invalid file format %d specified!", format); | |
892 delete _ptrFileUtilityObj; | |
893 _ptrFileUtilityObj = NULL; | |
894 return -1; | |
895 } | |
896 } | |
897 _isStereo = (tmpAudioCodec.channels == 2); | |
898 if(_isStereo) | |
899 { | |
900 if(_fileFormat != kFileFormatWavFile) | |
901 { | |
902 WEBRTC_TRACE(kTraceWarning, kTraceFile, _id, | |
903 "Stereo is only allowed for WAV files"); | |
904 StopRecording(); | |
905 return -1; | |
906 } | |
907 if((STR_NCASE_CMP(tmpAudioCodec.plname, "L16", 4) != 0) && | |
908 (STR_NCASE_CMP(tmpAudioCodec.plname, "PCMU", 5) != 0) && | |
909 (STR_NCASE_CMP(tmpAudioCodec.plname, "PCMA", 5) != 0)) | |
910 { | |
911 WEBRTC_TRACE( | |
912 kTraceWarning, | |
913 kTraceFile, | |
914 _id, | |
915 "Stereo is only allowed for codec PCMU, PCMA and L16 "); | |
916 StopRecording(); | |
917 return -1; | |
918 } | |
919 } | |
920 memcpy(&codec_info_, &tmpAudioCodec, sizeof(CodecInst)); | |
921 _recordingActive = true; | |
922 _ptrOutStream = &stream; | |
923 _notificationMs = notificationTimeMs; | |
924 _recordDurationMs = 0; | |
925 return 0; | |
926 } | |
927 | |
928 int32_t MediaFileImpl::StopRecording() | |
929 { | |
930 | |
931 CriticalSectionScoped lock(_crit); | |
932 if(!_recordingActive) | |
933 { | |
934 WEBRTC_TRACE(kTraceWarning, kTraceFile, _id, | |
935 "recording is not active!"); | |
936 return -1; | |
937 } | |
938 | |
939 _isStereo = false; | |
940 | |
941 if(_ptrFileUtilityObj != NULL) | |
942 { | |
943 // Both AVI and WAV header has to be updated before closing the stream | |
944 // because they contain size information. | |
945 if((_fileFormat == kFileFormatWavFile) && | |
946 (_ptrOutStream != NULL)) | |
947 { | |
948 _ptrFileUtilityObj->UpdateWavHeader(*_ptrOutStream); | |
949 } | |
950 delete _ptrFileUtilityObj; | |
951 _ptrFileUtilityObj = NULL; | |
952 } | |
953 | |
954 if(_ptrOutStream != NULL) | |
955 { | |
956 // If MediaFileImpl opened the OutStream it must be reclaimed here. | |
957 if(_openFile) | |
958 { | |
959 delete _ptrOutStream; | |
960 _openFile = false; | |
961 } | |
962 _ptrOutStream = NULL; | |
963 } | |
964 | |
965 _recordingActive = false; | |
966 codec_info_.pltype = 0; | |
967 codec_info_.plname[0] = '\0'; | |
968 | |
969 return 0; | |
970 } | |
971 | |
972 bool MediaFileImpl::IsRecording() | |
973 { | |
974 WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsRecording()"); | |
975 CriticalSectionScoped lock(_crit); | |
976 return _recordingActive; | |
977 } | |
978 | |
979 int32_t MediaFileImpl::RecordDurationMs(uint32_t& durationMs) | |
980 { | |
981 | |
982 CriticalSectionScoped lock(_crit); | |
983 if(!_recordingActive) | |
984 { | |
985 durationMs = 0; | |
986 return -1; | |
987 } | |
988 durationMs = _recordDurationMs; | |
989 return 0; | |
990 } | |
991 | |
992 bool MediaFileImpl::IsStereo() | |
993 { | |
994 WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsStereo()"); | |
995 CriticalSectionScoped lock(_crit); | |
996 return _isStereo; | |
997 } | |
998 | |
999 int32_t MediaFileImpl::SetModuleFileCallback(FileCallback* callback) | |
1000 { | |
1001 | |
1002 CriticalSectionScoped lock(_callbackCrit); | |
1003 | |
1004 _ptrCallback = callback; | |
1005 return 0; | |
1006 } | |
1007 | |
1008 int32_t MediaFileImpl::FileDurationMs(const char* fileName, | |
1009 uint32_t& durationMs, | |
1010 const FileFormats format, | |
1011 const uint32_t freqInHz) | |
1012 { | |
1013 | |
1014 if(!ValidFileName(fileName)) | |
1015 { | |
1016 return -1; | |
1017 } | |
1018 if(!ValidFrequency(freqInHz)) | |
1019 { | |
1020 return -1; | |
1021 } | |
1022 | |
1023 ModuleFileUtility* utilityObj = new ModuleFileUtility(_id); | |
1024 if(utilityObj == NULL) | |
1025 { | |
1026 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
1027 "failed to allocate utility object!"); | |
1028 return -1; | |
1029 } | |
1030 | |
1031 const int32_t duration = utilityObj->FileDurationMs(fileName, format, | |
1032 freqInHz); | |
1033 delete utilityObj; | |
1034 if(duration == -1) | |
1035 { | |
1036 durationMs = 0; | |
1037 return -1; | |
1038 } | |
1039 | |
1040 durationMs = duration; | |
1041 return 0; | |
1042 } | |
1043 | |
1044 int32_t MediaFileImpl::PlayoutPositionMs(uint32_t& positionMs) const | |
1045 { | |
1046 CriticalSectionScoped lock(_crit); | |
1047 if(!_playingActive) | |
1048 { | |
1049 positionMs = 0; | |
1050 return -1; | |
1051 } | |
1052 positionMs = _playoutPositionMs; | |
1053 return 0; | |
1054 } | |
1055 | |
1056 int32_t MediaFileImpl::codec_info(CodecInst& codecInst) const | |
1057 { | |
1058 CriticalSectionScoped lock(_crit); | |
1059 if(!_playingActive && !_recordingActive) | |
1060 { | |
1061 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
1062 "Neither playout nor recording has been initialized!"); | |
1063 return -1; | |
1064 } | |
1065 if (codec_info_.pltype == 0 && codec_info_.plname[0] == '\0') | |
1066 { | |
1067 WEBRTC_TRACE(kTraceError, kTraceFile, _id, | |
1068 "The CodecInst for %s is unknown!", | |
1069 _playingActive ? "Playback" : "Recording"); | |
1070 return -1; | |
1071 } | |
1072 memcpy(&codecInst,&codec_info_,sizeof(CodecInst)); | |
1073 return 0; | |
1074 } | |
1075 | |
1076 bool MediaFileImpl::ValidFileFormat(const FileFormats format, | |
1077 const CodecInst* codecInst) | |
1078 { | |
1079 if(codecInst == NULL) | |
1080 { | |
1081 if(format == kFileFormatPreencodedFile || | |
1082 format == kFileFormatPcm8kHzFile || | |
1083 format == kFileFormatPcm16kHzFile || | |
1084 format == kFileFormatPcm32kHzFile) | |
1085 { | |
1086 WEBRTC_TRACE(kTraceError, kTraceFile, -1, | |
1087 "Codec info required for file format specified!"); | |
1088 return false; | |
1089 } | |
1090 } | |
1091 return true; | |
1092 } | |
1093 | |
1094 bool MediaFileImpl::ValidFileName(const char* fileName) | |
1095 { | |
1096 if((fileName == NULL) ||(fileName[0] == '\0')) | |
1097 { | |
1098 WEBRTC_TRACE(kTraceError, kTraceFile, -1, "FileName not specified!"); | |
1099 return false; | |
1100 } | |
1101 return true; | |
1102 } | |
1103 | |
1104 | |
1105 bool MediaFileImpl::ValidFilePositions(const uint32_t startPointMs, | |
1106 const uint32_t stopPointMs) | |
1107 { | |
1108 if(startPointMs == 0 && stopPointMs == 0) // Default values | |
1109 { | |
1110 return true; | |
1111 } | |
1112 if(stopPointMs &&(startPointMs >= stopPointMs)) | |
1113 { | |
1114 WEBRTC_TRACE(kTraceError, kTraceFile, -1, | |
1115 "startPointMs must be less than stopPointMs!"); | |
1116 return false; | |
1117 } | |
1118 if(stopPointMs &&((stopPointMs - startPointMs) < 20)) | |
1119 { | |
1120 WEBRTC_TRACE(kTraceError, kTraceFile, -1, | |
1121 "minimum play duration for files is 20 ms!"); | |
1122 return false; | |
1123 } | |
1124 return true; | |
1125 } | |
1126 | |
1127 bool MediaFileImpl::ValidFrequency(const uint32_t frequency) | |
1128 { | |
1129 if((frequency == 8000) || (frequency == 16000)|| (frequency == 32000)) | |
1130 { | |
1131 return true; | |
1132 } | |
1133 WEBRTC_TRACE(kTraceError, kTraceFile, -1, | |
1134 "Frequency should be 8000, 16000 or 32000 (Hz)"); | |
1135 return false; | |
1136 } | |
1137 } // namespace webrtc | |
OLD | NEW |