Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(159)

Side by Side Diff: webrtc/modules/media_file/source/media_file_utility.h

Issue 1435093002: Remove include dirs from modules/{media_file,pacing} (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Rebase fix Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 // Note: the class cannot be used for reading and writing at the same time.
12 #ifndef WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
13 #define WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
14
15 #include <stdio.h>
16
17 #include "webrtc/common_types.h"
18 #include "webrtc/modules/media_file/include/media_file_defines.h"
19
20 namespace webrtc {
21 class InStream;
22 class OutStream;
23
24 class ModuleFileUtility
25 {
26 public:
27
28 ModuleFileUtility(const int32_t id);
29 ~ModuleFileUtility();
30
31 // Prepare for playing audio from stream.
32 // startPointMs and stopPointMs, unless zero, specify what part of the file
33 // should be read. From startPointMs ms to stopPointMs ms.
34 int32_t InitWavReading(InStream& stream,
35 const uint32_t startPointMs = 0,
36 const uint32_t stopPointMs = 0);
37
38 // Put 10-60ms of audio data from stream into the audioBuffer depending on
39 // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
40 // The return value is the number of bytes written to audioBuffer.
41 // Note: This API only play mono audio but can be used on file containing
42 // audio with more channels (in which case the audio will be converted to
43 // mono).
44 int32_t ReadWavDataAsMono(InStream& stream, int8_t* audioBuffer,
45 const size_t dataLengthInBytes);
46
47 // Put 10-60ms, depending on codec frame size, of audio data from file into
48 // audioBufferLeft and audioBufferRight. The buffers contain the left and
49 // right channel of played out stereo audio.
50 // dataLengthInBytes indicates the size of both audioBufferLeft and
51 // audioBufferRight.
52 // The return value is the number of bytes read for each buffer.
53 // Note: This API can only be successfully called for WAV files with stereo
54 // audio.
55 int32_t ReadWavDataAsStereo(InStream& wav,
56 int8_t* audioBufferLeft,
57 int8_t* audioBufferRight,
58 const size_t bufferLength);
59
60 // Prepare for recording audio to stream.
61 // codecInst specifies the encoding of the audio data.
62 // Note: codecInst.channels should be set to 2 for stereo (and 1 for
63 // mono). Stereo is only supported for WAV files.
64 int32_t InitWavWriting(OutStream& stream, const CodecInst& codecInst);
65
66 // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
67 // to file. The audio frame size is determined by the codecInst.pacsize
68 // parameter of the last sucessfull StartRecordingAudioFile(..) call.
69 // The return value is the number of bytes written to audioBuffer.
70 int32_t WriteWavData(OutStream& stream,
71 const int8_t* audioBuffer,
72 const size_t bufferLength);
73
74 // Finalizes the WAV header so that it is correct if nothing more will be
75 // written to stream.
76 // Note: this API must be called before closing stream to ensure that the
77 // WAVE header is updated with the file size. Don't call this API
78 // if more samples are to be written to stream.
79 int32_t UpdateWavHeader(OutStream& stream);
80
81 // Prepare for playing audio from stream.
82 // startPointMs and stopPointMs, unless zero, specify what part of the file
83 // should be read. From startPointMs ms to stopPointMs ms.
84 // freqInHz is the PCM sampling frequency.
85 // NOTE, allowed frequencies are 8000, 16000 and 32000 (Hz)
86 int32_t InitPCMReading(InStream& stream,
87 const uint32_t startPointMs = 0,
88 const uint32_t stopPointMs = 0,
89 const uint32_t freqInHz = 16000);
90
91 // Put 10-60ms of audio data from stream into the audioBuffer depending on
92 // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
93 // The return value is the number of bytes written to audioBuffer.
94 int32_t ReadPCMData(InStream& stream, int8_t* audioBuffer,
95 const size_t dataLengthInBytes);
96
97 // Prepare for recording audio to stream.
98 // freqInHz is the PCM sampling frequency.
99 // NOTE, allowed frequencies are 8000, 16000 and 32000 (Hz)
100 int32_t InitPCMWriting(OutStream& stream, const uint32_t freqInHz = 16000);
101
102 // Write one 10ms audio frame, i.e. the bufferLength first bytes of
103 // audioBuffer, to file. The audio frame size is determined by the freqInHz
104 // parameter of the last sucessfull InitPCMWriting(..) call.
105 // The return value is the number of bytes written to audioBuffer.
106 int32_t WritePCMData(OutStream& stream,
107 const int8_t* audioBuffer,
108 size_t bufferLength);
109
110 // Prepare for playing audio from stream.
111 // startPointMs and stopPointMs, unless zero, specify what part of the file
112 // should be read. From startPointMs ms to stopPointMs ms.
113 int32_t InitCompressedReading(InStream& stream,
114 const uint32_t startPointMs = 0,
115 const uint32_t stopPointMs = 0);
116
117 // Put 10-60ms of audio data from stream into the audioBuffer depending on
118 // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
119 // The return value is the number of bytes written to audioBuffer.
120 int32_t ReadCompressedData(InStream& stream,
121 int8_t* audioBuffer,
122 const size_t dataLengthInBytes);
123
124 // Prepare for recording audio to stream.
125 // codecInst specifies the encoding of the audio data.
126 int32_t InitCompressedWriting(OutStream& stream,
127 const CodecInst& codecInst);
128
129 // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
130 // to file. The audio frame size is determined by the codecInst.pacsize
131 // parameter of the last sucessfull InitCompressedWriting(..) call.
132 // The return value is the number of bytes written to stream.
133 // Note: bufferLength must be exactly one frame.
134 int32_t WriteCompressedData(OutStream& stream,
135 const int8_t* audioBuffer,
136 const size_t bufferLength);
137
138 // Prepare for playing audio from stream.
139 // codecInst specifies the encoding of the audio data.
140 int32_t InitPreEncodedReading(InStream& stream,
141 const CodecInst& codecInst);
142
143 // Put 10-60ms of audio data from stream into the audioBuffer depending on
144 // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
145 // The return value is the number of bytes written to audioBuffer.
146 int32_t ReadPreEncodedData(InStream& stream,
147 int8_t* audioBuffer,
148 const size_t dataLengthInBytes);
149
150 // Prepare for recording audio to stream.
151 // codecInst specifies the encoding of the audio data.
152 int32_t InitPreEncodedWriting(OutStream& stream,
153 const CodecInst& codecInst);
154
155 // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
156 // to stream. The audio frame size is determined by the codecInst.pacsize
157 // parameter of the last sucessfull InitPreEncodedWriting(..) call.
158 // The return value is the number of bytes written to stream.
159 // Note: bufferLength must be exactly one frame.
160 int32_t WritePreEncodedData(OutStream& stream,
161 const int8_t* inData,
162 const size_t dataLengthInBytes);
163
164 // Set durationMs to the size of the file (in ms) specified by fileName.
165 // freqInHz specifies the sampling frequency of the file.
166 int32_t FileDurationMs(const char* fileName,
167 const FileFormats fileFormat,
168 const uint32_t freqInHz = 16000);
169
170 // Return the number of ms that have been played so far.
171 uint32_t PlayoutPositionMs();
172
173 // Update codecInst according to the current audio codec being used for
174 // reading or writing.
175 int32_t codec_info(CodecInst& codecInst);
176
177 private:
178 // Biggest WAV frame supported is 10 ms at 48kHz of 2 channel, 16 bit audio.
179 enum{WAV_MAX_BUFFER_SIZE = 480*2*2};
180
181
182 int32_t InitWavCodec(uint32_t samplesPerSec,
183 uint32_t channels,
184 uint32_t bitsPerSample,
185 uint32_t formatTag);
186
187 // Parse the WAV header in stream.
188 int32_t ReadWavHeader(InStream& stream);
189
190 // Update the WAV header. freqInHz, bytesPerSample, channels, format,
191 // lengthInBytes specify characterists of the audio data.
192 // freqInHz is the sampling frequency. bytesPerSample is the sample size in
193 // bytes. channels is the number of channels, e.g. 1 is mono and 2 is
194 // stereo. format is the encode format (e.g. PCMU, PCMA, PCM etc).
195 // lengthInBytes is the number of bytes the audio samples are using up.
196 int32_t WriteWavHeader(OutStream& stream,
197 const uint32_t freqInHz,
198 const uint32_t bytesPerSample,
199 const uint32_t channels,
200 const uint32_t format,
201 const uint32_t lengthInBytes);
202
203 // Put dataLengthInBytes of audio data from stream into the audioBuffer.
204 // The return value is the number of bytes written to audioBuffer.
205 int32_t ReadWavData(InStream& stream, uint8_t* audioBuffer,
206 const uint32_t dataLengthInBytes);
207
208 // Update the current audio codec being used for reading or writing
209 // according to codecInst.
210 int32_t set_codec_info(const CodecInst& codecInst);
211
212 struct WAVE_FMTINFO_header
213 {
214 int16_t formatTag;
215 int16_t nChannels;
216 int32_t nSamplesPerSec;
217 int32_t nAvgBytesPerSec;
218 int16_t nBlockAlign;
219 int16_t nBitsPerSample;
220 };
221 // Identifiers for preencoded files.
222 enum MediaFileUtility_CodecType
223 {
224 kCodecNoCodec = 0,
225 kCodecIsac,
226 kCodecIsacSwb,
227 kCodecIsacLc,
228 kCodecL16_8Khz,
229 kCodecL16_16kHz,
230 kCodecL16_32Khz,
231 kCodecPcmu,
232 kCodecPcma,
233 kCodecIlbc20Ms,
234 kCodecIlbc30Ms,
235 kCodecG722,
236 kCodecG722_1_32Kbps,
237 kCodecG722_1_24Kbps,
238 kCodecG722_1_16Kbps,
239 kCodecG722_1c_48,
240 kCodecG722_1c_32,
241 kCodecG722_1c_24,
242 kCodecAmr,
243 kCodecAmrWb,
244 kCodecG729,
245 kCodecG729_1,
246 kCodecG726_40,
247 kCodecG726_32,
248 kCodecG726_24,
249 kCodecG726_16,
250 kCodecSpeex8Khz,
251 kCodecSpeex16Khz
252 };
253
254 // TODO (hellner): why store multiple formats. Just store either codec_info_
255 // or _wavFormatObj and supply conversion functions.
256 WAVE_FMTINFO_header _wavFormatObj;
257 int32_t _dataSize; // Chunk size if reading a WAV file
258 // Number of bytes to read. I.e. frame size in bytes. May be multiple
259 // chunks if reading WAV.
260 int32_t _readSizeBytes;
261
262 int32_t _id;
263
264 uint32_t _stopPointInMs;
265 uint32_t _startPointInMs;
266 uint32_t _playoutPositionMs;
267 size_t _bytesWritten;
268
269 CodecInst codec_info_;
270 MediaFileUtility_CodecType _codecId;
271
272 // The amount of bytes, on average, used for one audio sample.
273 int32_t _bytesPerSample;
274 int32_t _readPos;
275
276 // Only reading or writing can be enabled, not both.
277 bool _reading;
278 bool _writing;
279
280 // Scratch buffer used for turning stereo audio to mono.
281 uint8_t _tempData[WAV_MAX_BUFFER_SIZE];
282 };
283 } // namespace webrtc
284 #endif // WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
OLDNEW
« no previous file with comments | « webrtc/modules/media_file/source/media_file_unittest.cc ('k') | webrtc/modules/media_file/source/media_file_utility.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698