OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
56 | 56 |
57 // Transfer input signal to an AudioMultiVector. | 57 // Transfer input signal to an AudioMultiVector. |
58 AudioMultiVector input_vector(num_channels_); | 58 AudioMultiVector input_vector(num_channels_); |
59 input_vector.PushBackInterleaved(input, input_length); | 59 input_vector.PushBackInterleaved(input, input_length); |
60 size_t input_length_per_channel = input_vector.Size(); | 60 size_t input_length_per_channel = input_vector.Size(); |
61 assert(input_length_per_channel == input_length / num_channels_); | 61 assert(input_length_per_channel == input_length / num_channels_); |
62 | 62 |
63 size_t best_correlation_index = 0; | 63 size_t best_correlation_index = 0; |
64 size_t output_length = 0; | 64 size_t output_length = 0; |
65 | 65 |
| 66 std::unique_ptr<int16_t[]> input_channel( |
| 67 new int16_t[input_length_per_channel]); |
| 68 std::unique_ptr<int16_t[]> expanded_channel(new int16_t[expanded_length]); |
66 for (size_t channel = 0; channel < num_channels_; ++channel) { | 69 for (size_t channel = 0; channel < num_channels_; ++channel) { |
67 int16_t* input_channel = &input_vector[channel][0]; | 70 input_vector[channel].CopyTo( |
68 int16_t* expanded_channel = &expanded_[channel][0]; | 71 input_length_per_channel, 0, input_channel.get()); |
| 72 expanded_[channel].CopyTo(expanded_length, 0, expanded_channel.get()); |
| 73 |
69 int16_t new_mute_factor = SignalScaling( | 74 int16_t new_mute_factor = SignalScaling( |
70 input_channel, input_length_per_channel, expanded_channel); | 75 input_channel.get(), input_length_per_channel, expanded_channel.get()); |
71 | 76 |
72 // Adjust muting factor (product of "main" muting factor and expand muting | 77 // Adjust muting factor (product of "main" muting factor and expand muting |
73 // factor). | 78 // factor). |
74 int16_t* external_mute_factor = &external_mute_factor_array[channel]; | 79 int16_t* external_mute_factor = &external_mute_factor_array[channel]; |
75 *external_mute_factor = | 80 *external_mute_factor = |
76 (*external_mute_factor * expand_->MuteFactor(channel)) >> 14; | 81 (*external_mute_factor * expand_->MuteFactor(channel)) >> 14; |
77 | 82 |
78 // Update |external_mute_factor| if it is lower than |new_mute_factor|. | 83 // Update |external_mute_factor| if it is lower than |new_mute_factor|. |
79 if (new_mute_factor > *external_mute_factor) { | 84 if (new_mute_factor > *external_mute_factor) { |
80 *external_mute_factor = std::min(new_mute_factor, | 85 *external_mute_factor = std::min(new_mute_factor, |
81 static_cast<int16_t>(16384)); | 86 static_cast<int16_t>(16384)); |
82 } | 87 } |
83 | 88 |
84 if (channel == 0) { | 89 if (channel == 0) { |
85 // Downsample, correlate, and find strongest correlation period for the | 90 // Downsample, correlate, and find strongest correlation period for the |
86 // master (i.e., first) channel only. | 91 // master (i.e., first) channel only. |
87 // Downsample to 4kHz sample rate. | 92 // Downsample to 4kHz sample rate. |
88 Downsample(input_channel, input_length_per_channel, expanded_channel, | 93 Downsample(input_channel.get(), input_length_per_channel, |
89 expanded_length); | 94 expanded_channel.get(), expanded_length); |
90 | 95 |
91 // Calculate the lag of the strongest correlation period. | 96 // Calculate the lag of the strongest correlation period. |
92 best_correlation_index = CorrelateAndPeakSearch( | 97 best_correlation_index = CorrelateAndPeakSearch( |
93 old_length, input_length_per_channel, expand_period); | 98 old_length, input_length_per_channel, expand_period); |
94 } | 99 } |
95 | 100 |
96 temp_data_.resize(input_length_per_channel + best_correlation_index); | 101 temp_data_.resize(input_length_per_channel + best_correlation_index); |
97 int16_t* decoded_output = temp_data_.data() + best_correlation_index; | 102 int16_t* decoded_output = temp_data_.data() + best_correlation_index; |
98 | 103 |
99 // Mute the new decoded data if needed (and unmute it linearly). | 104 // Mute the new decoded data if needed (and unmute it linearly). |
100 // This is the overlapping part of expanded_signal. | 105 // This is the overlapping part of expanded_signal. |
101 size_t interpolation_length = std::min( | 106 size_t interpolation_length = std::min( |
102 kMaxCorrelationLength * fs_mult_, | 107 kMaxCorrelationLength * fs_mult_, |
103 expanded_length - best_correlation_index); | 108 expanded_length - best_correlation_index); |
104 interpolation_length = std::min(interpolation_length, | 109 interpolation_length = std::min(interpolation_length, |
105 input_length_per_channel); | 110 input_length_per_channel); |
106 if (*external_mute_factor < 16384) { | 111 if (*external_mute_factor < 16384) { |
107 // Set a suitable muting slope (Q20). 0.004 for NB, 0.002 for WB, | 112 // Set a suitable muting slope (Q20). 0.004 for NB, 0.002 for WB, |
108 // and so on. | 113 // and so on. |
109 int increment = 4194 / fs_mult_; | 114 int increment = 4194 / fs_mult_; |
110 *external_mute_factor = | 115 *external_mute_factor = |
111 static_cast<int16_t>(DspHelper::RampSignal(input_channel, | 116 static_cast<int16_t>(DspHelper::RampSignal(input_channel.get(), |
112 interpolation_length, | 117 interpolation_length, |
113 *external_mute_factor, | 118 *external_mute_factor, |
114 increment)); | 119 increment)); |
115 DspHelper::UnmuteSignal(&input_channel[interpolation_length], | 120 DspHelper::UnmuteSignal(&input_channel[interpolation_length], |
116 input_length_per_channel - interpolation_length, | 121 input_length_per_channel - interpolation_length, |
117 external_mute_factor, increment, | 122 external_mute_factor, increment, |
118 &decoded_output[interpolation_length]); | 123 &decoded_output[interpolation_length]); |
119 } else { | 124 } else { |
120 // No muting needed. | 125 // No muting needed. |
121 memmove( | 126 memmove( |
122 &decoded_output[interpolation_length], | 127 &decoded_output[interpolation_length], |
123 &input_channel[interpolation_length], | 128 &input_channel[interpolation_length], |
124 sizeof(int16_t) * (input_length_per_channel - interpolation_length)); | 129 sizeof(int16_t) * (input_length_per_channel - interpolation_length)); |
125 } | 130 } |
126 | 131 |
127 // Do overlap and mix linearly. | 132 // Do overlap and mix linearly. |
128 int16_t increment = | 133 int16_t increment = |
129 static_cast<int16_t>(16384 / (interpolation_length + 1)); // In Q14. | 134 static_cast<int16_t>(16384 / (interpolation_length + 1)); // In Q14. |
130 int16_t mute_factor = 16384 - increment; | 135 int16_t mute_factor = 16384 - increment; |
131 memmove(temp_data_.data(), expanded_channel, | 136 memmove(temp_data_.data(), expanded_channel.get(), |
132 sizeof(int16_t) * best_correlation_index); | 137 sizeof(int16_t) * best_correlation_index); |
133 DspHelper::CrossFade(&expanded_channel[best_correlation_index], | 138 DspHelper::CrossFade(&expanded_channel[best_correlation_index], |
134 input_channel, interpolation_length, | 139 input_channel.get(), interpolation_length, |
135 &mute_factor, increment, decoded_output); | 140 &mute_factor, increment, decoded_output); |
136 | 141 |
137 output_length = best_correlation_index + input_length_per_channel; | 142 output_length = best_correlation_index + input_length_per_channel; |
138 if (channel == 0) { | 143 if (channel == 0) { |
139 assert(output->Empty()); // Output should be empty at this point. | 144 assert(output->Empty()); // Output should be empty at this point. |
140 output->AssertSize(output_length); | 145 output->AssertSize(output_length); |
141 } else { | 146 } else { |
142 assert(output->Size() == output_length); | 147 assert(output->Size() == output_length); |
143 } | 148 } |
144 memcpy(&(*output)[channel][0], temp_data_.data(), | 149 (*output)[channel].OverwriteAt(temp_data_.data(), output_length, 0); |
145 sizeof(temp_data_[0]) * output_length); | |
146 } | 150 } |
147 | 151 |
148 // Copy back the first part of the data to |sync_buffer_| and remove it from | 152 // Copy back the first part of the data to |sync_buffer_| and remove it from |
149 // |output|. | 153 // |output|. |
150 sync_buffer_->ReplaceAtIndex(*output, old_length, sync_buffer_->next_index()); | 154 sync_buffer_->ReplaceAtIndex(*output, old_length, sync_buffer_->next_index()); |
151 output->PopFront(old_length); | 155 output->PopFront(old_length); |
152 | 156 |
153 // Return new added length. |old_length| samples were borrowed from | 157 // Return new added length. |old_length| samples were borrowed from |
154 // |sync_buffer_|. | 158 // |sync_buffer_|. |
155 return output_length - old_length; | 159 return output_length - old_length; |
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
365 } | 369 } |
366 return best_correlation_index; | 370 return best_correlation_index; |
367 } | 371 } |
368 | 372 |
369 size_t Merge::RequiredFutureSamples() { | 373 size_t Merge::RequiredFutureSamples() { |
370 return fs_hz_ / 100 * num_channels_; // 10 ms. | 374 return fs_hz_ / 100 * num_channels_; // 10 ms. |
371 } | 375 } |
372 | 376 |
373 | 377 |
374 } // namespace webrtc | 378 } // namespace webrtc |
OLD | NEW |