OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #ifndef WEBRTC_COMMON_AUDIO_INCLUDE_AUDIO_UTIL_H_ | 11 #ifndef WEBRTC_COMMON_AUDIO_INCLUDE_AUDIO_UTIL_H_ |
12 #define WEBRTC_COMMON_AUDIO_INCLUDE_AUDIO_UTIL_H_ | 12 #define WEBRTC_COMMON_AUDIO_INCLUDE_AUDIO_UTIL_H_ |
13 | 13 |
14 #include <limits> | 14 #include <limits> |
| 15 #include <cstring> |
15 | 16 |
16 #include "webrtc/base/scoped_ptr.h" | 17 #include "webrtc/base/scoped_ptr.h" |
17 #include "webrtc/typedefs.h" | 18 #include "webrtc/typedefs.h" |
18 | 19 |
19 namespace webrtc { | 20 namespace webrtc { |
20 | 21 |
21 typedef std::numeric_limits<int16_t> limits_int16; | 22 typedef std::numeric_limits<int16_t> limits_int16; |
22 | 23 |
23 // The conversion functions use the following naming convention: | 24 // The conversion functions use the following naming convention: |
24 // S16: int16_t [-32768, 32767] | 25 // S16: int16_t [-32768, 32767] |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
90 for (int i = 0; i < num_channels; ++i) { | 91 for (int i = 0; i < num_channels; ++i) { |
91 const T* channel = deinterleaved[i]; | 92 const T* channel = deinterleaved[i]; |
92 int interleaved_idx = i; | 93 int interleaved_idx = i; |
93 for (int j = 0; j < samples_per_channel; ++j) { | 94 for (int j = 0; j < samples_per_channel; ++j) { |
94 interleaved[interleaved_idx] = channel[j]; | 95 interleaved[interleaved_idx] = channel[j]; |
95 interleaved_idx += num_channels; | 96 interleaved_idx += num_channels; |
96 } | 97 } |
97 } | 98 } |
98 } | 99 } |
99 | 100 |
| 101 template <typename T, typename Intermediate> |
| 102 void DownmixStereoToMono(const T* left, |
| 103 const T* right, |
| 104 int num_frames, |
| 105 T* out) { |
| 106 for (int i = 0; i < num_frames; ++i) { |
| 107 out[i] = (static_cast<Intermediate>(left[i]) + right[i]) / 2; |
| 108 } |
| 109 } |
| 110 |
| 111 template <typename T, typename Intermediate> |
| 112 void DownmixToMono(const T* const* input_channels, |
| 113 int num_frames, |
| 114 int num_channels, |
| 115 T* out) { |
| 116 if (num_channels == 2) { |
| 117 DownmixStereoToMono<T, Intermediate>(input_channels[0], input_channels[1], |
| 118 num_frames, out); |
| 119 } else { |
| 120 for (int i = 0; i < num_frames; ++i) { |
| 121 Intermediate value = input_channels[0][i]; |
| 122 for (int j = 1; j < num_channels; ++j) { |
| 123 value += input_channels[j][i]; |
| 124 } |
| 125 out[i] = value / num_channels; |
| 126 } |
| 127 } |
| 128 } |
| 129 |
| 130 // Downmixes an interleaved multichannel signal to a single channel by averaging |
| 131 // all channels. |
| 132 template <typename T, typename Intermediate> |
| 133 void DownmixInterleavedToMonoImpl(const T* interleaved, |
| 134 int num_multichannel_frames, |
| 135 int num_channels, |
| 136 T* deinterleaved) { |
| 137 assert(num_channels > 0); |
| 138 assert(num_multichannel_frames > 0); |
| 139 |
| 140 const T* const end = interleaved + num_multichannel_frames * num_channels; |
| 141 |
| 142 if (num_channels == 1) { |
| 143 std::memmove(deinterleaved, interleaved, |
| 144 num_multichannel_frames * sizeof(*deinterleaved)); |
| 145 } else if (num_channels == 2) { |
| 146 // Explicitly unroll for the common stereo case. |
| 147 while (interleaved < end) { |
| 148 *deinterleaved++ = |
| 149 (static_cast<Intermediate>(*interleaved) + *(interleaved + 1)) / 2; |
| 150 interleaved += 2; |
| 151 } |
| 152 } else { |
| 153 while (interleaved < end) { |
| 154 const T* const frame_end = interleaved + num_channels; |
| 155 |
| 156 Intermediate value = *interleaved++; |
| 157 while (interleaved < frame_end) { |
| 158 value += *interleaved++; |
| 159 } |
| 160 |
| 161 *deinterleaved++ = value / num_channels; |
| 162 } |
| 163 } |
| 164 } |
| 165 |
| 166 template <typename T> |
| 167 void DownmixInterleavedToMono(const T* interleaved, |
| 168 int num_multichannel_frames, |
| 169 int num_channels, |
| 170 T* deinterleaved) { |
| 171 DownmixInterleavedToMonoImpl<T, T>(interleaved, num_multichannel_frames, |
| 172 num_channels, deinterleaved); |
| 173 } |
| 174 |
| 175 template <> |
| 176 void DownmixInterleavedToMono<int16_t>(const int16_t* interleaved, |
| 177 int num_multichannel_frames, |
| 178 int num_channels, |
| 179 int16_t* deinterleaved); |
| 180 |
100 } // namespace webrtc | 181 } // namespace webrtc |
101 | 182 |
102 #endif // WEBRTC_COMMON_AUDIO_INCLUDE_AUDIO_UTIL_H_ | 183 #endif // WEBRTC_COMMON_AUDIO_INCLUDE_AUDIO_UTIL_H_ |
OLD | NEW |