| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 101 // This is the overlapping part of expanded_signal. | 101 // This is the overlapping part of expanded_signal. |
| 102 int interpolation_length = std::min( | 102 int interpolation_length = std::min( |
| 103 kMaxCorrelationLength * fs_mult_, | 103 kMaxCorrelationLength * fs_mult_, |
| 104 expanded_length - best_correlation_index); | 104 expanded_length - best_correlation_index); |
| 105 interpolation_length = std::min(interpolation_length, | 105 interpolation_length = std::min(interpolation_length, |
| 106 static_cast<int>(input_length_per_channel)); | 106 static_cast<int>(input_length_per_channel)); |
| 107 if (*external_mute_factor < 16384) { | 107 if (*external_mute_factor < 16384) { |
| 108 // Set a suitable muting slope (Q20). 0.004 for NB, 0.002 for WB, | 108 // Set a suitable muting slope (Q20). 0.004 for NB, 0.002 for WB, |
| 109 // and so on. | 109 // and so on. |
| 110 int increment = 4194 / fs_mult_; | 110 int increment = 4194 / fs_mult_; |
| 111 *external_mute_factor = DspHelper::RampSignal(input_channel, | 111 *external_mute_factor = |
| 112 interpolation_length, | 112 static_cast<int16_t>(DspHelper::RampSignal(input_channel, |
| 113 *external_mute_factor, | 113 interpolation_length, |
| 114 increment); | 114 *external_mute_factor, |
| 115 increment)); |
| 115 DspHelper::UnmuteSignal(&input_channel[interpolation_length], | 116 DspHelper::UnmuteSignal(&input_channel[interpolation_length], |
| 116 input_length_per_channel - interpolation_length, | 117 input_length_per_channel - interpolation_length, |
| 117 external_mute_factor, increment, | 118 external_mute_factor, increment, |
| 118 &decoded_output[interpolation_length]); | 119 &decoded_output[interpolation_length]); |
| 119 } else { | 120 } else { |
| 120 // No muting needed. | 121 // No muting needed. |
| 121 memmove( | 122 memmove( |
| 122 &decoded_output[interpolation_length], | 123 &decoded_output[interpolation_length], |
| 123 &input_channel[interpolation_length], | 124 &input_channel[interpolation_length], |
| 124 sizeof(int16_t) * (input_length_per_channel - interpolation_length)); | 125 sizeof(int16_t) * (input_length_per_channel - interpolation_length)); |
| 125 } | 126 } |
| 126 | 127 |
| 127 // Do overlap and mix linearly. | 128 // Do overlap and mix linearly. |
| 128 int increment = 16384 / (interpolation_length + 1); // In Q14. | 129 int16_t increment = |
| 130 static_cast<int16_t>(16384 / (interpolation_length + 1)); // In Q14. |
| 129 int16_t mute_factor = 16384 - increment; | 131 int16_t mute_factor = 16384 - increment; |
| 130 memmove(temp_data, expanded_channel, | 132 memmove(temp_data, expanded_channel, |
| 131 sizeof(int16_t) * best_correlation_index); | 133 sizeof(int16_t) * best_correlation_index); |
| 132 DspHelper::CrossFade(&expanded_channel[best_correlation_index], | 134 DspHelper::CrossFade(&expanded_channel[best_correlation_index], |
| 133 input_channel, interpolation_length, | 135 input_channel, interpolation_length, |
| 134 &mute_factor, increment, decoded_output); | 136 &mute_factor, increment, decoded_output); |
| 135 | 137 |
| 136 output_length = best_correlation_index + input_length_per_channel; | 138 output_length = best_correlation_index + input_length_per_channel; |
| 137 if (channel == 0) { | 139 if (channel == 0) { |
| 138 assert(output->Empty()); // Output should be empty at this point. | 140 assert(output->Empty()); // Output should be empty at this point. |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 239 // Calculate muting factor to use for new frame. | 241 // Calculate muting factor to use for new frame. |
| 240 int16_t mute_factor; | 242 int16_t mute_factor; |
| 241 if (energy_input > energy_expanded) { | 243 if (energy_input > energy_expanded) { |
| 242 // Normalize |energy_input| to 14 bits. | 244 // Normalize |energy_input| to 14 bits. |
| 243 int16_t temp_shift = WebRtcSpl_NormW32(energy_input) - 17; | 245 int16_t temp_shift = WebRtcSpl_NormW32(energy_input) - 17; |
| 244 energy_input = WEBRTC_SPL_SHIFT_W32(energy_input, temp_shift); | 246 energy_input = WEBRTC_SPL_SHIFT_W32(energy_input, temp_shift); |
| 245 // Put |energy_expanded| in a domain 14 higher, so that | 247 // Put |energy_expanded| in a domain 14 higher, so that |
| 246 // energy_expanded / energy_input is in Q14. | 248 // energy_expanded / energy_input is in Q14. |
| 247 energy_expanded = WEBRTC_SPL_SHIFT_W32(energy_expanded, temp_shift + 14); | 249 energy_expanded = WEBRTC_SPL_SHIFT_W32(energy_expanded, temp_shift + 14); |
| 248 // Calculate sqrt(energy_expanded / energy_input) in Q14. | 250 // Calculate sqrt(energy_expanded / energy_input) in Q14. |
| 249 mute_factor = WebRtcSpl_SqrtFloor((energy_expanded / energy_input) << 14); | 251 mute_factor = static_cast<int16_t>( |
| 252 WebRtcSpl_SqrtFloor((energy_expanded / energy_input) << 14)); |
| 250 } else { | 253 } else { |
| 251 // Set to 1 (in Q14) when |expanded| has higher energy than |input|. | 254 // Set to 1 (in Q14) when |expanded| has higher energy than |input|. |
| 252 mute_factor = 16384; | 255 mute_factor = 16384; |
| 253 } | 256 } |
| 254 | 257 |
| 255 return mute_factor; | 258 return mute_factor; |
| 256 } | 259 } |
| 257 | 260 |
| 258 // TODO(hlundin): There are some parameter values in this method that seem | 261 // TODO(hlundin): There are some parameter values in this method that seem |
| 259 // strange. Compare with Expand::Correlation. | 262 // strange. Compare with Expand::Correlation. |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 372 } | 375 } |
| 373 return best_correlation_index; | 376 return best_correlation_index; |
| 374 } | 377 } |
| 375 | 378 |
| 376 int Merge::RequiredFutureSamples() { | 379 int Merge::RequiredFutureSamples() { |
| 377 return static_cast<int>(fs_hz_ / 100 * num_channels_); // 10 ms. | 380 return static_cast<int>(fs_hz_ / 100 * num_channels_); // 10 ms. |
| 378 } | 381 } |
| 379 | 382 |
| 380 | 383 |
| 381 } // namespace webrtc | 384 } // namespace webrtc |
| OLD | NEW |