OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 /* | 11 /* |
12 * Contains the API functions for the AEC. | 12 * Contains the API functions for the AEC. |
13 */ | 13 */ |
14 #include "webrtc/modules/audio_processing/aec/echo_cancellation.h" | 14 #include "webrtc/modules/audio_processing/aec/echo_cancellation.h" |
15 | 15 |
16 #include <math.h> | 16 #include <math.h> |
17 #ifdef WEBRTC_AEC_DEBUG_DUMP | 17 #ifdef WEBRTC_AEC_DEBUG_DUMP |
18 #include <stdio.h> | 18 #include <stdio.h> |
19 #endif | 19 #endif |
20 #include <stdlib.h> | 20 #include <stdlib.h> |
21 #include <string.h> | 21 #include <string.h> |
22 | 22 |
| 23 extern "C" { |
23 #include "webrtc/common_audio/ring_buffer.h" | 24 #include "webrtc/common_audio/ring_buffer.h" |
24 #include "webrtc/common_audio/signal_processing/include/signal_processing_librar
y.h" | 25 #include "webrtc/common_audio/signal_processing/include/signal_processing_librar
y.h" |
| 26 } |
25 #include "webrtc/modules/audio_processing/aec/aec_core.h" | 27 #include "webrtc/modules/audio_processing/aec/aec_core.h" |
| 28 extern "C" { |
26 #include "webrtc/modules/audio_processing/aec/aec_resampler.h" | 29 #include "webrtc/modules/audio_processing/aec/aec_resampler.h" |
| 30 } |
27 #include "webrtc/modules/audio_processing/aec/echo_cancellation_internal.h" | 31 #include "webrtc/modules/audio_processing/aec/echo_cancellation_internal.h" |
28 #include "webrtc/typedefs.h" | 32 #include "webrtc/typedefs.h" |
29 | 33 |
30 // Measured delays [ms] | 34 // Measured delays [ms] |
31 // Device Chrome GTP | 35 // Device Chrome GTP |
32 // MacBook Air 10 | 36 // MacBook Air 10 |
33 // MacBook Retina 10 100 | 37 // MacBook Retina 10 100 |
34 // MacPro 30? | 38 // MacPro 30? |
35 // | 39 // |
36 // Win7 Desktop 70 80? | 40 // Win7 Desktop 70 80? |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
112 int32_t skew); | 116 int32_t skew); |
113 static void ProcessExtended(Aec* self, | 117 static void ProcessExtended(Aec* self, |
114 const float* const* near, | 118 const float* const* near, |
115 size_t num_bands, | 119 size_t num_bands, |
116 float* const* out, | 120 float* const* out, |
117 size_t num_samples, | 121 size_t num_samples, |
118 int16_t reported_delay_ms, | 122 int16_t reported_delay_ms, |
119 int32_t skew); | 123 int32_t skew); |
120 | 124 |
121 void* WebRtcAec_Create() { | 125 void* WebRtcAec_Create() { |
122 Aec* aecpc = malloc(sizeof(Aec)); | 126 Aec* aecpc = reinterpret_cast<Aec*>(malloc(sizeof(Aec))); |
123 | 127 |
124 if (!aecpc) { | 128 if (!aecpc) { |
125 return NULL; | 129 return NULL; |
126 } | 130 } |
127 | 131 |
128 aecpc->aec = WebRtcAec_CreateAec(); | 132 aecpc->aec = WebRtcAec_CreateAec(); |
129 if (!aecpc->aec) { | 133 if (!aecpc->aec) { |
130 WebRtcAec_Free(aecpc); | 134 WebRtcAec_Free(aecpc); |
131 return NULL; | 135 return NULL; |
132 } | 136 } |
(...skipping 10 matching lines...) Expand all Loading... |
143 if (!aecpc->far_pre_buf) { | 147 if (!aecpc->far_pre_buf) { |
144 WebRtcAec_Free(aecpc); | 148 WebRtcAec_Free(aecpc); |
145 return NULL; | 149 return NULL; |
146 } | 150 } |
147 | 151 |
148 aecpc->initFlag = 0; | 152 aecpc->initFlag = 0; |
149 | 153 |
150 #ifdef WEBRTC_AEC_DEBUG_DUMP | 154 #ifdef WEBRTC_AEC_DEBUG_DUMP |
151 { | 155 { |
152 char filename[64]; | 156 char filename[64]; |
153 sprintf(filename, "aec_buf%d.dat", webrtc_aec_instance_count); | 157 snprintf(filename, sizeof(filename), "aec_buf%d.dat", |
| 158 webrtc_aec_instance_count); |
154 aecpc->bufFile = fopen(filename, "wb"); | 159 aecpc->bufFile = fopen(filename, "wb"); |
155 sprintf(filename, "aec_skew%d.dat", webrtc_aec_instance_count); | 160 snprintf(filename, sizeof(filename), "aec_skew%d.dat", |
| 161 webrtc_aec_instance_count); |
156 aecpc->skewFile = fopen(filename, "wb"); | 162 aecpc->skewFile = fopen(filename, "wb"); |
157 sprintf(filename, "aec_delay%d.dat", webrtc_aec_instance_count); | 163 snprintf(filename, sizeof(filename), "aec_delay%d.dat", |
| 164 webrtc_aec_instance_count); |
158 aecpc->delayFile = fopen(filename, "wb"); | 165 aecpc->delayFile = fopen(filename, "wb"); |
159 webrtc_aec_instance_count++; | 166 webrtc_aec_instance_count++; |
160 } | 167 } |
161 #endif | 168 #endif |
162 | 169 |
163 return aecpc; | 170 return aecpc; |
164 } | 171 } |
165 | 172 |
166 void WebRtcAec_Free(void* aecInst) { | 173 void WebRtcAec_Free(void* aecInst) { |
167 Aec* aecpc = (Aec*)aecInst; | 174 Aec* aecpc = reinterpret_cast<Aec*>(aecInst); |
168 | 175 |
169 if (aecpc == NULL) { | 176 if (aecpc == NULL) { |
170 return; | 177 return; |
171 } | 178 } |
172 | 179 |
173 WebRtc_FreeBuffer(aecpc->far_pre_buf); | 180 WebRtc_FreeBuffer(aecpc->far_pre_buf); |
174 | 181 |
175 #ifdef WEBRTC_AEC_DEBUG_DUMP | 182 #ifdef WEBRTC_AEC_DEBUG_DUMP |
176 fclose(aecpc->bufFile); | 183 fclose(aecpc->bufFile); |
177 fclose(aecpc->skewFile); | 184 fclose(aecpc->skewFile); |
178 fclose(aecpc->delayFile); | 185 fclose(aecpc->delayFile); |
179 #endif | 186 #endif |
180 | 187 |
181 WebRtcAec_FreeAec(aecpc->aec); | 188 WebRtcAec_FreeAec(aecpc->aec); |
182 WebRtcAec_FreeResampler(aecpc->resampler); | 189 WebRtcAec_FreeResampler(aecpc->resampler); |
183 free(aecpc); | 190 free(aecpc); |
184 } | 191 } |
185 | 192 |
186 int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq) { | 193 int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq) { |
187 Aec* aecpc = (Aec*)aecInst; | 194 Aec* aecpc = reinterpret_cast<Aec*>(aecInst); |
188 AecConfig aecConfig; | 195 AecConfig aecConfig; |
189 | 196 |
190 if (sampFreq != 8000 && sampFreq != 16000 && sampFreq != 32000 && | 197 if (sampFreq != 8000 && sampFreq != 16000 && sampFreq != 32000 && |
191 sampFreq != 48000) { | 198 sampFreq != 48000) { |
192 return AEC_BAD_PARAMETER_ERROR; | 199 return AEC_BAD_PARAMETER_ERROR; |
193 } | 200 } |
194 aecpc->sampFreq = sampFreq; | 201 aecpc->sampFreq = sampFreq; |
195 | 202 |
196 if (scSampFreq < 1 || scSampFreq > 96000) { | 203 if (scSampFreq < 1 || scSampFreq > 96000) { |
197 return AEC_BAD_PARAMETER_ERROR; | 204 return AEC_BAD_PARAMETER_ERROR; |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
258 } | 265 } |
259 | 266 |
260 return 0; | 267 return 0; |
261 } | 268 } |
262 | 269 |
263 // Returns any error that is caused when buffering the | 270 // Returns any error that is caused when buffering the |
264 // far-end signal. | 271 // far-end signal. |
265 int32_t WebRtcAec_GetBufferFarendError(void* aecInst, | 272 int32_t WebRtcAec_GetBufferFarendError(void* aecInst, |
266 const float* farend, | 273 const float* farend, |
267 size_t nrOfSamples) { | 274 size_t nrOfSamples) { |
268 Aec* aecpc = (Aec*)aecInst; | 275 Aec* aecpc = reinterpret_cast<Aec*>(aecInst); |
269 | 276 |
270 if (!farend) | 277 if (!farend) |
271 return AEC_NULL_POINTER_ERROR; | 278 return AEC_NULL_POINTER_ERROR; |
272 | 279 |
273 if (aecpc->initFlag != initCheck) | 280 if (aecpc->initFlag != initCheck) |
274 return AEC_UNINITIALIZED_ERROR; | 281 return AEC_UNINITIALIZED_ERROR; |
275 | 282 |
276 // number of samples == 160 for SWB input | 283 // number of samples == 160 for SWB input |
277 if (nrOfSamples != 80 && nrOfSamples != 160) | 284 if (nrOfSamples != 80 && nrOfSamples != 160) |
278 return AEC_BAD_PARAMETER_ERROR; | 285 return AEC_BAD_PARAMETER_ERROR; |
279 | 286 |
280 return 0; | 287 return 0; |
281 } | 288 } |
282 | 289 |
283 // only buffer L band for farend | 290 // only buffer L band for farend |
284 int32_t WebRtcAec_BufferFarend(void* aecInst, | 291 int32_t WebRtcAec_BufferFarend(void* aecInst, |
285 const float* farend, | 292 const float* farend, |
286 size_t nrOfSamples) { | 293 size_t nrOfSamples) { |
287 Aec* aecpc = (Aec*)aecInst; | 294 Aec* aecpc = reinterpret_cast<Aec*>(aecInst); |
288 size_t newNrOfSamples = nrOfSamples; | 295 size_t newNrOfSamples = nrOfSamples; |
289 float new_farend[MAX_RESAMP_LEN]; | 296 float new_farend[MAX_RESAMP_LEN]; |
290 const float* farend_ptr = farend; | 297 const float* farend_ptr = farend; |
291 | 298 |
292 // Get any error caused by buffering the farend signal. | 299 // Get any error caused by buffering the farend signal. |
293 int32_t error_code = | 300 int32_t error_code = |
294 WebRtcAec_GetBufferFarendError(aecInst, farend, nrOfSamples); | 301 WebRtcAec_GetBufferFarendError(aecInst, farend, nrOfSamples); |
295 | 302 |
296 if (error_code != 0) | 303 if (error_code != 0) |
297 return error_code; | 304 return error_code; |
298 | 305 |
299 if (aecpc->skewMode == kAecTrue && aecpc->resample == kAecTrue) { | 306 if (aecpc->skewMode == kAecTrue && aecpc->resample == kAecTrue) { |
300 // Resample and get a new number of samples | 307 // Resample and get a new number of samples |
301 WebRtcAec_ResampleLinear(aecpc->resampler, farend, nrOfSamples, aecpc->skew, | 308 WebRtcAec_ResampleLinear(aecpc->resampler, farend, nrOfSamples, aecpc->skew, |
302 new_farend, &newNrOfSamples); | 309 new_farend, &newNrOfSamples); |
303 farend_ptr = new_farend; | 310 farend_ptr = new_farend; |
304 } | 311 } |
305 | 312 |
306 aecpc->farend_started = 1; | 313 aecpc->farend_started = 1; |
307 WebRtcAec_SetSystemDelay( | 314 WebRtcAec_SetSystemDelay(aecpc->aec, WebRtcAec_system_delay(aecpc->aec) + |
308 aecpc->aec, WebRtcAec_system_delay(aecpc->aec) + (int)newNrOfSamples); | 315 static_cast<int>(newNrOfSamples)); |
309 | 316 |
310 // Write the time-domain data to |far_pre_buf|. | 317 // Write the time-domain data to |far_pre_buf|. |
311 WebRtc_WriteBuffer(aecpc->far_pre_buf, farend_ptr, newNrOfSamples); | 318 WebRtc_WriteBuffer(aecpc->far_pre_buf, farend_ptr, newNrOfSamples); |
312 | 319 |
313 // TODO(minyue): reduce to |PART_LEN| samples for each buffering, when | 320 // TODO(minyue): reduce to |PART_LEN| samples for each buffering, when |
314 // WebRtcAec_BufferFarendPartition() is changed to take |PART_LEN| samples. | 321 // WebRtcAec_BufferFarendPartition() is changed to take |PART_LEN| samples. |
315 while (WebRtc_available_read(aecpc->far_pre_buf) >= PART_LEN2) { | 322 while (WebRtc_available_read(aecpc->far_pre_buf) >= PART_LEN2) { |
316 // We have enough data to pass to the FFT, hence read PART_LEN2 samples. | 323 // We have enough data to pass to the FFT, hence read PART_LEN2 samples. |
317 { | 324 { |
318 float* ptmp = NULL; | 325 float* ptmp = NULL; |
319 float tmp[PART_LEN2]; | 326 float tmp[PART_LEN2]; |
320 WebRtc_ReadBuffer(aecpc->far_pre_buf, (void**)&ptmp, tmp, PART_LEN2); | 327 WebRtc_ReadBuffer(aecpc->far_pre_buf, |
| 328 reinterpret_cast<void**>(&ptmp), tmp, PART_LEN2); |
321 WebRtcAec_BufferFarendPartition(aecpc->aec, ptmp); | 329 WebRtcAec_BufferFarendPartition(aecpc->aec, ptmp); |
322 } | 330 } |
323 | 331 |
324 // Rewind |far_pre_buf| PART_LEN samples for overlap before continuing. | 332 // Rewind |far_pre_buf| PART_LEN samples for overlap before continuing. |
325 WebRtc_MoveReadPtr(aecpc->far_pre_buf, -PART_LEN); | 333 WebRtc_MoveReadPtr(aecpc->far_pre_buf, -PART_LEN); |
326 } | 334 } |
327 | 335 |
328 return 0; | 336 return 0; |
329 } | 337 } |
330 | 338 |
331 int32_t WebRtcAec_Process(void* aecInst, | 339 int32_t WebRtcAec_Process(void* aecInst, |
332 const float* const* nearend, | 340 const float* const* nearend, |
333 size_t num_bands, | 341 size_t num_bands, |
334 float* const* out, | 342 float* const* out, |
335 size_t nrOfSamples, | 343 size_t nrOfSamples, |
336 int16_t msInSndCardBuf, | 344 int16_t msInSndCardBuf, |
337 int32_t skew) { | 345 int32_t skew) { |
338 Aec* aecpc = (Aec*)aecInst; | 346 Aec* aecpc = reinterpret_cast<Aec*>(aecInst); |
339 int32_t retVal = 0; | 347 int32_t retVal = 0; |
340 | 348 |
341 if (out == NULL) { | 349 if (out == NULL) { |
342 return AEC_NULL_POINTER_ERROR; | 350 return AEC_NULL_POINTER_ERROR; |
343 } | 351 } |
344 | 352 |
345 if (aecpc->initFlag != initCheck) { | 353 if (aecpc->initFlag != initCheck) { |
346 return AEC_UNINITIALIZED_ERROR; | 354 return AEC_UNINITIALIZED_ERROR; |
347 } | 355 } |
348 | 356 |
(...skipping 26 matching lines...) Expand all Loading... |
375 (void)fwrite(&far_buf_size_ms, 2, 1, aecpc->bufFile); | 383 (void)fwrite(&far_buf_size_ms, 2, 1, aecpc->bufFile); |
376 (void)fwrite(&aecpc->knownDelay, sizeof(aecpc->knownDelay), 1, | 384 (void)fwrite(&aecpc->knownDelay, sizeof(aecpc->knownDelay), 1, |
377 aecpc->delayFile); | 385 aecpc->delayFile); |
378 } | 386 } |
379 #endif | 387 #endif |
380 | 388 |
381 return retVal; | 389 return retVal; |
382 } | 390 } |
383 | 391 |
384 int WebRtcAec_set_config(void* handle, AecConfig config) { | 392 int WebRtcAec_set_config(void* handle, AecConfig config) { |
385 Aec* self = (Aec*)handle; | 393 Aec* self = reinterpret_cast<Aec*>(handle); |
386 if (self->initFlag != initCheck) { | 394 if (self->initFlag != initCheck) { |
387 return AEC_UNINITIALIZED_ERROR; | 395 return AEC_UNINITIALIZED_ERROR; |
388 } | 396 } |
389 | 397 |
390 if (config.skewMode != kAecFalse && config.skewMode != kAecTrue) { | 398 if (config.skewMode != kAecFalse && config.skewMode != kAecTrue) { |
391 return AEC_BAD_PARAMETER_ERROR; | 399 return AEC_BAD_PARAMETER_ERROR; |
392 } | 400 } |
393 self->skewMode = config.skewMode; | 401 self->skewMode = config.skewMode; |
394 | 402 |
395 if (config.nlpMode != kAecNlpConservative && | 403 if (config.nlpMode != kAecNlpConservative && |
396 config.nlpMode != kAecNlpModerate && | 404 config.nlpMode != kAecNlpModerate && |
397 config.nlpMode != kAecNlpAggressive) { | 405 config.nlpMode != kAecNlpAggressive) { |
398 return AEC_BAD_PARAMETER_ERROR; | 406 return AEC_BAD_PARAMETER_ERROR; |
399 } | 407 } |
400 | 408 |
401 if (config.metricsMode != kAecFalse && config.metricsMode != kAecTrue) { | 409 if (config.metricsMode != kAecFalse && config.metricsMode != kAecTrue) { |
402 return AEC_BAD_PARAMETER_ERROR; | 410 return AEC_BAD_PARAMETER_ERROR; |
403 } | 411 } |
404 | 412 |
405 if (config.delay_logging != kAecFalse && config.delay_logging != kAecTrue) { | 413 if (config.delay_logging != kAecFalse && config.delay_logging != kAecTrue) { |
406 return AEC_BAD_PARAMETER_ERROR; | 414 return AEC_BAD_PARAMETER_ERROR; |
407 } | 415 } |
408 | 416 |
409 WebRtcAec_SetConfigCore(self->aec, config.nlpMode, config.metricsMode, | 417 WebRtcAec_SetConfigCore(self->aec, config.nlpMode, config.metricsMode, |
410 config.delay_logging); | 418 config.delay_logging); |
411 return 0; | 419 return 0; |
412 } | 420 } |
413 | 421 |
414 int WebRtcAec_get_echo_status(void* handle, int* status) { | 422 int WebRtcAec_get_echo_status(void* handle, int* status) { |
415 Aec* self = (Aec*)handle; | 423 Aec* self = reinterpret_cast<Aec*>(handle); |
416 if (status == NULL) { | 424 if (status == NULL) { |
417 return AEC_NULL_POINTER_ERROR; | 425 return AEC_NULL_POINTER_ERROR; |
418 } | 426 } |
419 if (self->initFlag != initCheck) { | 427 if (self->initFlag != initCheck) { |
420 return AEC_UNINITIALIZED_ERROR; | 428 return AEC_UNINITIALIZED_ERROR; |
421 } | 429 } |
422 | 430 |
423 *status = WebRtcAec_echo_state(self->aec); | 431 *status = WebRtcAec_echo_state(self->aec); |
424 | 432 |
425 return 0; | 433 return 0; |
426 } | 434 } |
427 | 435 |
428 int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics) { | 436 int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics) { |
429 const float kUpWeight = 0.7f; | 437 const float kUpWeight = 0.7f; |
430 float dtmp; | 438 float dtmp; |
431 int stmp; | 439 int stmp; |
432 Aec* self = (Aec*)handle; | 440 Aec* self = reinterpret_cast<Aec*>(handle); |
433 Stats erl; | 441 Stats erl; |
434 Stats erle; | 442 Stats erle; |
435 Stats a_nlp; | 443 Stats a_nlp; |
436 | 444 |
437 if (handle == NULL) { | 445 if (handle == NULL) { |
438 return -1; | 446 return -1; |
439 } | 447 } |
440 if (metrics == NULL) { | 448 if (metrics == NULL) { |
441 return AEC_NULL_POINTER_ERROR; | 449 return AEC_NULL_POINTER_ERROR; |
442 } | 450 } |
443 if (self->initFlag != initCheck) { | 451 if (self->initFlag != initCheck) { |
444 return AEC_UNINITIALIZED_ERROR; | 452 return AEC_UNINITIALIZED_ERROR; |
445 } | 453 } |
446 | 454 |
447 WebRtcAec_GetEchoStats(self->aec, &erl, &erle, &a_nlp); | 455 WebRtcAec_GetEchoStats(self->aec, &erl, &erle, &a_nlp); |
448 | 456 |
449 // ERL | 457 // ERL |
450 metrics->erl.instant = (int)erl.instant; | 458 metrics->erl.instant = static_cast<int>(erl.instant); |
451 | 459 |
452 if ((erl.himean > kOffsetLevel) && (erl.average > kOffsetLevel)) { | 460 if ((erl.himean > kOffsetLevel) && (erl.average > kOffsetLevel)) { |
453 // Use a mix between regular average and upper part average. | 461 // Use a mix between regular average and upper part average. |
454 dtmp = kUpWeight * erl.himean + (1 - kUpWeight) * erl.average; | 462 dtmp = kUpWeight * erl.himean + (1 - kUpWeight) * erl.average; |
455 metrics->erl.average = (int)dtmp; | 463 metrics->erl.average = static_cast<int>(dtmp); |
456 } else { | 464 } else { |
457 metrics->erl.average = kOffsetLevel; | 465 metrics->erl.average = kOffsetLevel; |
458 } | 466 } |
459 | 467 |
460 metrics->erl.max = (int)erl.max; | 468 metrics->erl.max = static_cast<int>(erl.max); |
461 | 469 |
462 if (erl.min < (kOffsetLevel * (-1))) { | 470 if (erl.min < (kOffsetLevel * (-1))) { |
463 metrics->erl.min = (int)erl.min; | 471 metrics->erl.min = static_cast<int>(erl.min); |
464 } else { | 472 } else { |
465 metrics->erl.min = kOffsetLevel; | 473 metrics->erl.min = kOffsetLevel; |
466 } | 474 } |
467 | 475 |
468 // ERLE | 476 // ERLE |
469 metrics->erle.instant = (int)erle.instant; | 477 metrics->erle.instant = static_cast<int>(erle.instant); |
470 | 478 |
471 if ((erle.himean > kOffsetLevel) && (erle.average > kOffsetLevel)) { | 479 if ((erle.himean > kOffsetLevel) && (erle.average > kOffsetLevel)) { |
472 // Use a mix between regular average and upper part average. | 480 // Use a mix between regular average and upper part average. |
473 dtmp = kUpWeight * erle.himean + (1 - kUpWeight) * erle.average; | 481 dtmp = kUpWeight * erle.himean + (1 - kUpWeight) * erle.average; |
474 metrics->erle.average = (int)dtmp; | 482 metrics->erle.average = static_cast<int>(dtmp); |
475 } else { | 483 } else { |
476 metrics->erle.average = kOffsetLevel; | 484 metrics->erle.average = kOffsetLevel; |
477 } | 485 } |
478 | 486 |
479 metrics->erle.max = (int)erle.max; | 487 metrics->erle.max = static_cast<int>(erle.max); |
480 | 488 |
481 if (erle.min < (kOffsetLevel * (-1))) { | 489 if (erle.min < (kOffsetLevel * (-1))) { |
482 metrics->erle.min = (int)erle.min; | 490 metrics->erle.min = static_cast<int>(erle.min); |
483 } else { | 491 } else { |
484 metrics->erle.min = kOffsetLevel; | 492 metrics->erle.min = kOffsetLevel; |
485 } | 493 } |
486 | 494 |
487 // RERL | 495 // RERL |
488 if ((metrics->erl.average > kOffsetLevel) && | 496 if ((metrics->erl.average > kOffsetLevel) && |
489 (metrics->erle.average > kOffsetLevel)) { | 497 (metrics->erle.average > kOffsetLevel)) { |
490 stmp = metrics->erl.average + metrics->erle.average; | 498 stmp = metrics->erl.average + metrics->erle.average; |
491 } else { | 499 } else { |
492 stmp = kOffsetLevel; | 500 stmp = kOffsetLevel; |
493 } | 501 } |
494 metrics->rerl.average = stmp; | 502 metrics->rerl.average = stmp; |
495 | 503 |
496 // No other statistics needed, but returned for completeness. | 504 // No other statistics needed, but returned for completeness. |
497 metrics->rerl.instant = stmp; | 505 metrics->rerl.instant = stmp; |
498 metrics->rerl.max = stmp; | 506 metrics->rerl.max = stmp; |
499 metrics->rerl.min = stmp; | 507 metrics->rerl.min = stmp; |
500 | 508 |
501 // A_NLP | 509 // A_NLP |
502 metrics->aNlp.instant = (int)a_nlp.instant; | 510 metrics->aNlp.instant = static_cast<int>(a_nlp.instant); |
503 | 511 |
504 if ((a_nlp.himean > kOffsetLevel) && (a_nlp.average > kOffsetLevel)) { | 512 if ((a_nlp.himean > kOffsetLevel) && (a_nlp.average > kOffsetLevel)) { |
505 // Use a mix between regular average and upper part average. | 513 // Use a mix between regular average and upper part average. |
506 dtmp = kUpWeight * a_nlp.himean + (1 - kUpWeight) * a_nlp.average; | 514 dtmp = kUpWeight * a_nlp.himean + (1 - kUpWeight) * a_nlp.average; |
507 metrics->aNlp.average = (int)dtmp; | 515 metrics->aNlp.average = static_cast<int>(dtmp); |
508 } else { | 516 } else { |
509 metrics->aNlp.average = kOffsetLevel; | 517 metrics->aNlp.average = kOffsetLevel; |
510 } | 518 } |
511 | 519 |
512 metrics->aNlp.max = (int)a_nlp.max; | 520 metrics->aNlp.max = static_cast<int>(a_nlp.max); |
513 | 521 |
514 if (a_nlp.min < (kOffsetLevel * (-1))) { | 522 if (a_nlp.min < (kOffsetLevel * (-1))) { |
515 metrics->aNlp.min = (int)a_nlp.min; | 523 metrics->aNlp.min = static_cast<int>(a_nlp.min); |
516 } else { | 524 } else { |
517 metrics->aNlp.min = kOffsetLevel; | 525 metrics->aNlp.min = kOffsetLevel; |
518 } | 526 } |
519 | 527 |
520 return 0; | 528 return 0; |
521 } | 529 } |
522 | 530 |
523 int WebRtcAec_GetDelayMetrics(void* handle, | 531 int WebRtcAec_GetDelayMetrics(void* handle, |
524 int* median, | 532 int* median, |
525 int* std, | 533 int* std, |
526 float* fraction_poor_delays) { | 534 float* fraction_poor_delays) { |
527 Aec* self = (Aec*)handle; | 535 Aec* self = reinterpret_cast<Aec*>(handle); |
528 if (median == NULL) { | 536 if (median == NULL) { |
529 return AEC_NULL_POINTER_ERROR; | 537 return AEC_NULL_POINTER_ERROR; |
530 } | 538 } |
531 if (std == NULL) { | 539 if (std == NULL) { |
532 return AEC_NULL_POINTER_ERROR; | 540 return AEC_NULL_POINTER_ERROR; |
533 } | 541 } |
534 if (self->initFlag != initCheck) { | 542 if (self->initFlag != initCheck) { |
535 return AEC_UNINITIALIZED_ERROR; | 543 return AEC_UNINITIALIZED_ERROR; |
536 } | 544 } |
537 if (WebRtcAec_GetDelayMetricsCore(self->aec, median, std, | 545 if (WebRtcAec_GetDelayMetricsCore(self->aec, median, std, |
538 fraction_poor_delays) == -1) { | 546 fraction_poor_delays) == -1) { |
539 // Logging disabled. | 547 // Logging disabled. |
540 return AEC_UNSUPPORTED_FUNCTION_ERROR; | 548 return AEC_UNSUPPORTED_FUNCTION_ERROR; |
541 } | 549 } |
542 | 550 |
543 return 0; | 551 return 0; |
544 } | 552 } |
545 | 553 |
546 AecCore* WebRtcAec_aec_core(void* handle) { | 554 AecCore* WebRtcAec_aec_core(void* handle) { |
547 if (!handle) { | 555 if (!handle) { |
548 return NULL; | 556 return NULL; |
549 } | 557 } |
550 return ((Aec*)handle)->aec; | 558 return reinterpret_cast<Aec*>(handle)->aec; |
551 } | 559 } |
552 | 560 |
553 static int ProcessNormal(Aec* aecpc, | 561 static int ProcessNormal(Aec* aecpc, |
554 const float* const* nearend, | 562 const float* const* nearend, |
555 size_t num_bands, | 563 size_t num_bands, |
556 float* const* out, | 564 float* const* out, |
557 size_t nrOfSamples, | 565 size_t nrOfSamples, |
558 int16_t msInSndCardBuf, | 566 int16_t msInSndCardBuf, |
559 int32_t skew) { | 567 int32_t skew) { |
560 int retVal = 0; | 568 int retVal = 0; |
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
788 | 796 |
789 // 3) Compensate for non-causality, if needed, by flushing one block. | 797 // 3) Compensate for non-causality, if needed, by flushing one block. |
790 if (current_delay < PART_LEN) { | 798 if (current_delay < PART_LEN) { |
791 current_delay += WebRtcAec_MoveFarReadPtr(aecpc->aec, 1) * PART_LEN; | 799 current_delay += WebRtcAec_MoveFarReadPtr(aecpc->aec, 1) * PART_LEN; |
792 } | 800 } |
793 | 801 |
794 // We use -1 to signal an initialized state in the "extended" implementation; | 802 // We use -1 to signal an initialized state in the "extended" implementation; |
795 // compensate for that. | 803 // compensate for that. |
796 aecpc->filtDelay = aecpc->filtDelay < 0 ? 0 : aecpc->filtDelay; | 804 aecpc->filtDelay = aecpc->filtDelay < 0 ? 0 : aecpc->filtDelay; |
797 aecpc->filtDelay = | 805 aecpc->filtDelay = |
798 WEBRTC_SPL_MAX(0, (short)(0.8 * aecpc->filtDelay + 0.2 * current_delay)); | 806 WEBRTC_SPL_MAX(0, static_cast<int16_t>(0.8 * |
| 807 aecpc->filtDelay + |
| 808 0.2 * current_delay)); |
799 | 809 |
800 delay_difference = aecpc->filtDelay - aecpc->knownDelay; | 810 delay_difference = aecpc->filtDelay - aecpc->knownDelay; |
801 if (delay_difference > 224) { | 811 if (delay_difference > 224) { |
802 if (aecpc->lastDelayDiff < 96) { | 812 if (aecpc->lastDelayDiff < 96) { |
803 aecpc->timeForDelayChange = 0; | 813 aecpc->timeForDelayChange = 0; |
804 } else { | 814 } else { |
805 aecpc->timeForDelayChange++; | 815 aecpc->timeForDelayChange++; |
806 } | 816 } |
807 } else if (delay_difference < 96 && aecpc->knownDelay > 0) { | 817 } else if (delay_difference < 96 && aecpc->knownDelay > 0) { |
808 if (aecpc->lastDelayDiff > 224) { | 818 if (aecpc->lastDelayDiff > 224) { |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
841 | 851 |
842 // 3) Compensate for non-causality, if needed, by flushing two blocks. | 852 // 3) Compensate for non-causality, if needed, by flushing two blocks. |
843 if (current_delay < PART_LEN) { | 853 if (current_delay < PART_LEN) { |
844 current_delay += WebRtcAec_MoveFarReadPtr(self->aec, 2) * PART_LEN; | 854 current_delay += WebRtcAec_MoveFarReadPtr(self->aec, 2) * PART_LEN; |
845 } | 855 } |
846 | 856 |
847 if (self->filtDelay == -1) { | 857 if (self->filtDelay == -1) { |
848 self->filtDelay = WEBRTC_SPL_MAX(0, 0.5 * current_delay); | 858 self->filtDelay = WEBRTC_SPL_MAX(0, 0.5 * current_delay); |
849 } else { | 859 } else { |
850 self->filtDelay = WEBRTC_SPL_MAX( | 860 self->filtDelay = WEBRTC_SPL_MAX( |
851 0, (short)(0.95 * self->filtDelay + 0.05 * current_delay)); | 861 0, static_cast<int16_t>(0.95 * self->filtDelay + 0.05 * current_delay)); |
852 } | 862 } |
853 | 863 |
854 delay_difference = self->filtDelay - self->knownDelay; | 864 delay_difference = self->filtDelay - self->knownDelay; |
855 if (delay_difference > 384) { | 865 if (delay_difference > 384) { |
856 if (self->lastDelayDiff < 128) { | 866 if (self->lastDelayDiff < 128) { |
857 self->timeForDelayChange = 0; | 867 self->timeForDelayChange = 0; |
858 } else { | 868 } else { |
859 self->timeForDelayChange++; | 869 self->timeForDelayChange++; |
860 } | 870 } |
861 } else if (delay_difference < 128 && self->knownDelay > 0) { | 871 } else if (delay_difference < 128 && self->knownDelay > 0) { |
862 if (self->lastDelayDiff > 384) { | 872 if (self->lastDelayDiff > 384) { |
863 self->timeForDelayChange = 0; | 873 self->timeForDelayChange = 0; |
864 } else { | 874 } else { |
865 self->timeForDelayChange++; | 875 self->timeForDelayChange++; |
866 } | 876 } |
867 } else { | 877 } else { |
868 self->timeForDelayChange = 0; | 878 self->timeForDelayChange = 0; |
869 } | 879 } |
870 self->lastDelayDiff = delay_difference; | 880 self->lastDelayDiff = delay_difference; |
871 | 881 |
872 if (self->timeForDelayChange > 25) { | 882 if (self->timeForDelayChange > 25) { |
873 self->knownDelay = WEBRTC_SPL_MAX((int)self->filtDelay - 256, 0); | 883 self->knownDelay = WEBRTC_SPL_MAX((int)self->filtDelay - 256, 0); |
874 } | 884 } |
875 } | 885 } |
OLD | NEW |