| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 117 | 117 |
| 118 return 0; | 118 return 0; |
| 119 #else | 119 #else |
| 120 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 120 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 121 "SetNsStatus() Ns is not supported"); | 121 "SetNsStatus() Ns is not supported"); |
| 122 return -1; | 122 return -1; |
| 123 #endif | 123 #endif |
| 124 } | 124 } |
| 125 | 125 |
| 126 int VoEAudioProcessingImpl::GetNsStatus(bool& enabled, NsModes& mode) { | 126 int VoEAudioProcessingImpl::GetNsStatus(bool& enabled, NsModes& mode) { |
| 127 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 128 "GetNsStatus(enabled=?, mode=?)"); | |
| 129 #ifdef WEBRTC_VOICE_ENGINE_NR | 127 #ifdef WEBRTC_VOICE_ENGINE_NR |
| 130 if (!_shared->statistics().Initialized()) { | 128 if (!_shared->statistics().Initialized()) { |
| 131 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 129 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 132 return -1; | 130 return -1; |
| 133 } | 131 } |
| 134 | 132 |
| 135 enabled = _shared->audio_processing()->noise_suppression()->is_enabled(); | 133 enabled = _shared->audio_processing()->noise_suppression()->is_enabled(); |
| 136 NoiseSuppression::Level nsLevel = | 134 NoiseSuppression::Level nsLevel = |
| 137 _shared->audio_processing()->noise_suppression()->level(); | 135 _shared->audio_processing()->noise_suppression()->level(); |
| 138 | 136 |
| 139 switch (nsLevel) { | 137 switch (nsLevel) { |
| 140 case NoiseSuppression::kLow: | 138 case NoiseSuppression::kLow: |
| 141 mode = kNsLowSuppression; | 139 mode = kNsLowSuppression; |
| 142 break; | 140 break; |
| 143 case NoiseSuppression::kModerate: | 141 case NoiseSuppression::kModerate: |
| 144 mode = kNsModerateSuppression; | 142 mode = kNsModerateSuppression; |
| 145 break; | 143 break; |
| 146 case NoiseSuppression::kHigh: | 144 case NoiseSuppression::kHigh: |
| 147 mode = kNsHighSuppression; | 145 mode = kNsHighSuppression; |
| 148 break; | 146 break; |
| 149 case NoiseSuppression::kVeryHigh: | 147 case NoiseSuppression::kVeryHigh: |
| 150 mode = kNsVeryHighSuppression; | 148 mode = kNsVeryHighSuppression; |
| 151 break; | 149 break; |
| 152 } | 150 } |
| 153 | |
| 154 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 155 "GetNsStatus() => enabled=% d, mode=%d", enabled, mode); | |
| 156 return 0; | 151 return 0; |
| 157 #else | 152 #else |
| 158 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 153 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 159 "GetNsStatus() Ns is not supported"); | 154 "GetNsStatus() Ns is not supported"); |
| 160 return -1; | 155 return -1; |
| 161 #endif | 156 #endif |
| 162 } | 157 } |
| 163 | 158 |
| 164 int VoEAudioProcessingImpl::SetAgcStatus(bool enable, AgcModes mode) { | 159 int VoEAudioProcessingImpl::SetAgcStatus(bool enable, AgcModes mode) { |
| 165 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | 160 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 221 | 216 |
| 222 return 0; | 217 return 0; |
| 223 #else | 218 #else |
| 224 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 219 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 225 "SetAgcStatus() Agc is not supported"); | 220 "SetAgcStatus() Agc is not supported"); |
| 226 return -1; | 221 return -1; |
| 227 #endif | 222 #endif |
| 228 } | 223 } |
| 229 | 224 |
| 230 int VoEAudioProcessingImpl::GetAgcStatus(bool& enabled, AgcModes& mode) { | 225 int VoEAudioProcessingImpl::GetAgcStatus(bool& enabled, AgcModes& mode) { |
| 231 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 232 "GetAgcStatus(enabled=?, mode=?)"); | |
| 233 #ifdef WEBRTC_VOICE_ENGINE_AGC | 226 #ifdef WEBRTC_VOICE_ENGINE_AGC |
| 234 if (!_shared->statistics().Initialized()) { | 227 if (!_shared->statistics().Initialized()) { |
| 235 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 228 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 236 return -1; | 229 return -1; |
| 237 } | 230 } |
| 238 | 231 |
| 239 enabled = _shared->audio_processing()->gain_control()->is_enabled(); | 232 enabled = _shared->audio_processing()->gain_control()->is_enabled(); |
| 240 GainControl::Mode agcMode = | 233 GainControl::Mode agcMode = |
| 241 _shared->audio_processing()->gain_control()->mode(); | 234 _shared->audio_processing()->gain_control()->mode(); |
| 242 | 235 |
| 243 switch (agcMode) { | 236 switch (agcMode) { |
| 244 case GainControl::kFixedDigital: | 237 case GainControl::kFixedDigital: |
| 245 mode = kAgcFixedDigital; | 238 mode = kAgcFixedDigital; |
| 246 break; | 239 break; |
| 247 case GainControl::kAdaptiveAnalog: | 240 case GainControl::kAdaptiveAnalog: |
| 248 mode = kAgcAdaptiveAnalog; | 241 mode = kAgcAdaptiveAnalog; |
| 249 break; | 242 break; |
| 250 case GainControl::kAdaptiveDigital: | 243 case GainControl::kAdaptiveDigital: |
| 251 mode = kAgcAdaptiveDigital; | 244 mode = kAgcAdaptiveDigital; |
| 252 break; | 245 break; |
| 253 } | 246 } |
| 254 | 247 |
| 255 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 256 "GetAgcStatus() => enabled=%d, mode=%d", enabled, mode); | |
| 257 return 0; | 248 return 0; |
| 258 #else | 249 #else |
| 259 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 250 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 260 "GetAgcStatus() Agc is not supported"); | 251 "GetAgcStatus() Agc is not supported"); |
| 261 return -1; | 252 return -1; |
| 262 #endif | 253 #endif |
| 263 } | 254 } |
| 264 | 255 |
| 265 int VoEAudioProcessingImpl::SetAgcConfig(AgcConfig config) { | 256 int VoEAudioProcessingImpl::SetAgcConfig(AgcConfig config) { |
| 266 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | 257 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), |
| (...skipping 28 matching lines...) Expand all Loading... |
| 295 | 286 |
| 296 return 0; | 287 return 0; |
| 297 #else | 288 #else |
| 298 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 289 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 299 "SetAgcConfig() EC is not supported"); | 290 "SetAgcConfig() EC is not supported"); |
| 300 return -1; | 291 return -1; |
| 301 #endif | 292 #endif |
| 302 } | 293 } |
| 303 | 294 |
| 304 int VoEAudioProcessingImpl::GetAgcConfig(AgcConfig& config) { | 295 int VoEAudioProcessingImpl::GetAgcConfig(AgcConfig& config) { |
| 305 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 306 "GetAgcConfig(config=?)"); | |
| 307 #ifdef WEBRTC_VOICE_ENGINE_AGC | 296 #ifdef WEBRTC_VOICE_ENGINE_AGC |
| 308 if (!_shared->statistics().Initialized()) { | 297 if (!_shared->statistics().Initialized()) { |
| 309 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 298 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 310 return -1; | 299 return -1; |
| 311 } | 300 } |
| 312 | 301 |
| 313 config.targetLeveldBOv = | 302 config.targetLeveldBOv = |
| 314 _shared->audio_processing()->gain_control()->target_level_dbfs(); | 303 _shared->audio_processing()->gain_control()->target_level_dbfs(); |
| 315 config.digitalCompressionGaindB = | 304 config.digitalCompressionGaindB = |
| 316 _shared->audio_processing()->gain_control()->compression_gain_db(); | 305 _shared->audio_processing()->gain_control()->compression_gain_db(); |
| 317 config.limiterEnable = | 306 config.limiterEnable = |
| 318 _shared->audio_processing()->gain_control()->is_limiter_enabled(); | 307 _shared->audio_processing()->gain_control()->is_limiter_enabled(); |
| 319 | 308 |
| 320 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 321 "GetAgcConfig() => targetLeveldBOv=%u, " | |
| 322 "digitalCompressionGaindB=%u, limiterEnable=%d", | |
| 323 config.targetLeveldBOv, config.digitalCompressionGaindB, | |
| 324 config.limiterEnable); | |
| 325 | |
| 326 return 0; | 309 return 0; |
| 327 #else | 310 #else |
| 328 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 311 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 329 "GetAgcConfig() EC is not supported"); | 312 "GetAgcConfig() EC is not supported"); |
| 330 return -1; | 313 return -1; |
| 331 #endif | 314 #endif |
| 332 } | 315 } |
| 333 | 316 |
| 334 int VoEAudioProcessingImpl::SetRxNsStatus(int channel, | 317 int VoEAudioProcessingImpl::SetRxNsStatus(int channel, |
| 335 bool enable, | 318 bool enable, |
| (...skipping 16 matching lines...) Expand all Loading... |
| 352 #else | 335 #else |
| 353 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 336 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 354 "SetRxNsStatus() NS is not supported"); | 337 "SetRxNsStatus() NS is not supported"); |
| 355 return -1; | 338 return -1; |
| 356 #endif | 339 #endif |
| 357 } | 340 } |
| 358 | 341 |
| 359 int VoEAudioProcessingImpl::GetRxNsStatus(int channel, | 342 int VoEAudioProcessingImpl::GetRxNsStatus(int channel, |
| 360 bool& enabled, | 343 bool& enabled, |
| 361 NsModes& mode) { | 344 NsModes& mode) { |
| 362 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 363 "GetRxNsStatus(channel=%d, enable=?, mode=?)", channel); | |
| 364 #ifdef WEBRTC_VOICE_ENGINE_NR | 345 #ifdef WEBRTC_VOICE_ENGINE_NR |
| 365 if (!_shared->statistics().Initialized()) { | 346 if (!_shared->statistics().Initialized()) { |
| 366 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 347 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 367 return -1; | 348 return -1; |
| 368 } | 349 } |
| 369 | 350 |
| 370 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel); | 351 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel); |
| 371 voe::Channel* channelPtr = ch.channel(); | 352 voe::Channel* channelPtr = ch.channel(); |
| 372 if (channelPtr == NULL) { | 353 if (channelPtr == NULL) { |
| 373 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError, | 354 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError, |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 405 #else | 386 #else |
| 406 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 387 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 407 "SetRxAgcStatus() Agc is not supported"); | 388 "SetRxAgcStatus() Agc is not supported"); |
| 408 return -1; | 389 return -1; |
| 409 #endif | 390 #endif |
| 410 } | 391 } |
| 411 | 392 |
| 412 int VoEAudioProcessingImpl::GetRxAgcStatus(int channel, | 393 int VoEAudioProcessingImpl::GetRxAgcStatus(int channel, |
| 413 bool& enabled, | 394 bool& enabled, |
| 414 AgcModes& mode) { | 395 AgcModes& mode) { |
| 415 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 416 "GetRxAgcStatus(channel=%d, enable=?, mode=?)", channel); | |
| 417 #ifdef WEBRTC_VOICE_ENGINE_AGC | 396 #ifdef WEBRTC_VOICE_ENGINE_AGC |
| 418 if (!_shared->statistics().Initialized()) { | 397 if (!_shared->statistics().Initialized()) { |
| 419 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 398 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 420 return -1; | 399 return -1; |
| 421 } | 400 } |
| 422 | 401 |
| 423 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel); | 402 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel); |
| 424 voe::Channel* channelPtr = ch.channel(); | 403 voe::Channel* channelPtr = ch.channel(); |
| 425 if (channelPtr == NULL) { | 404 if (channelPtr == NULL) { |
| 426 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError, | 405 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError, |
| (...skipping 26 matching lines...) Expand all Loading... |
| 453 } | 432 } |
| 454 return channelPtr->SetRxAgcConfig(config); | 433 return channelPtr->SetRxAgcConfig(config); |
| 455 #else | 434 #else |
| 456 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 435 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 457 "SetRxAgcConfig() Agc is not supported"); | 436 "SetRxAgcConfig() Agc is not supported"); |
| 458 return -1; | 437 return -1; |
| 459 #endif | 438 #endif |
| 460 } | 439 } |
| 461 | 440 |
| 462 int VoEAudioProcessingImpl::GetRxAgcConfig(int channel, AgcConfig& config) { | 441 int VoEAudioProcessingImpl::GetRxAgcConfig(int channel, AgcConfig& config) { |
| 463 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 464 "GetRxAgcConfig(channel=%d)", channel); | |
| 465 #ifdef WEBRTC_VOICE_ENGINE_AGC | 442 #ifdef WEBRTC_VOICE_ENGINE_AGC |
| 466 if (!_shared->statistics().Initialized()) { | 443 if (!_shared->statistics().Initialized()) { |
| 467 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 444 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 468 return -1; | 445 return -1; |
| 469 } | 446 } |
| 470 | 447 |
| 471 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel); | 448 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel); |
| 472 voe::Channel* channelPtr = ch.channel(); | 449 voe::Channel* channelPtr = ch.channel(); |
| 473 if (channelPtr == NULL) { | 450 if (channelPtr == NULL) { |
| 474 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError, | 451 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError, |
| (...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 602 | 579 |
| 603 return 0; | 580 return 0; |
| 604 #else | 581 #else |
| 605 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 582 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 606 "SetEcStatus() EC is not supported"); | 583 "SetEcStatus() EC is not supported"); |
| 607 return -1; | 584 return -1; |
| 608 #endif | 585 #endif |
| 609 } | 586 } |
| 610 | 587 |
| 611 int VoEAudioProcessingImpl::GetEcStatus(bool& enabled, EcModes& mode) { | 588 int VoEAudioProcessingImpl::GetEcStatus(bool& enabled, EcModes& mode) { |
| 612 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 613 "GetEcStatus()"); | |
| 614 #ifdef WEBRTC_VOICE_ENGINE_ECHO | 589 #ifdef WEBRTC_VOICE_ENGINE_ECHO |
| 615 if (!_shared->statistics().Initialized()) { | 590 if (!_shared->statistics().Initialized()) { |
| 616 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 591 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 617 return -1; | 592 return -1; |
| 618 } | 593 } |
| 619 | 594 |
| 620 if (_isAecMode == true) { | 595 if (_isAecMode == true) { |
| 621 mode = kEcAec; | 596 mode = kEcAec; |
| 622 enabled = _shared->audio_processing()->echo_cancellation()->is_enabled(); | 597 enabled = _shared->audio_processing()->echo_cancellation()->is_enabled(); |
| 623 } else { | 598 } else { |
| 624 mode = kEcAecm; | 599 mode = kEcAecm; |
| 625 enabled = _shared->audio_processing()->echo_control_mobile()->is_enabled(); | 600 enabled = _shared->audio_processing()->echo_control_mobile()->is_enabled(); |
| 626 } | 601 } |
| 627 | 602 |
| 628 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 629 "GetEcStatus() => enabled=%i, mode=%i", enabled, (int)mode); | |
| 630 return 0; | 603 return 0; |
| 631 #else | 604 #else |
| 632 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 605 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 633 "GetEcStatus() EC is not supported"); | 606 "GetEcStatus() EC is not supported"); |
| 634 return -1; | 607 return -1; |
| 635 #endif | 608 #endif |
| 636 } | 609 } |
| 637 | 610 |
| 638 void VoEAudioProcessingImpl::SetDelayOffsetMs(int offset) { | 611 void VoEAudioProcessingImpl::SetDelayOffsetMs(int offset) { |
| 639 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | 612 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), |
| 640 "SetDelayOffsetMs(offset = %d)", offset); | 613 "SetDelayOffsetMs(offset = %d)", offset); |
| 641 _shared->audio_processing()->set_delay_offset_ms(offset); | 614 _shared->audio_processing()->set_delay_offset_ms(offset); |
| 642 } | 615 } |
| 643 | 616 |
| 644 int VoEAudioProcessingImpl::DelayOffsetMs() { | 617 int VoEAudioProcessingImpl::DelayOffsetMs() { |
| 645 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 646 "DelayOffsetMs()"); | |
| 647 return _shared->audio_processing()->delay_offset_ms(); | 618 return _shared->audio_processing()->delay_offset_ms(); |
| 648 } | 619 } |
| 649 | 620 |
| 650 int VoEAudioProcessingImpl::SetAecmMode(AecmModes mode, bool enableCNG) { | 621 int VoEAudioProcessingImpl::SetAecmMode(AecmModes mode, bool enableCNG) { |
| 651 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | 622 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), |
| 652 "SetAECMMode(mode = %d)", mode); | 623 "SetAECMMode(mode = %d)", mode); |
| 653 #ifdef WEBRTC_VOICE_ENGINE_ECHO | 624 #ifdef WEBRTC_VOICE_ENGINE_ECHO |
| 654 if (!_shared->statistics().Initialized()) { | 625 if (!_shared->statistics().Initialized()) { |
| 655 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 626 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 656 return -1; | 627 return -1; |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 693 | 664 |
| 694 return 0; | 665 return 0; |
| 695 #else | 666 #else |
| 696 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 667 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 697 "SetAECMMode() EC is not supported"); | 668 "SetAECMMode() EC is not supported"); |
| 698 return -1; | 669 return -1; |
| 699 #endif | 670 #endif |
| 700 } | 671 } |
| 701 | 672 |
| 702 int VoEAudioProcessingImpl::GetAecmMode(AecmModes& mode, bool& enabledCNG) { | 673 int VoEAudioProcessingImpl::GetAecmMode(AecmModes& mode, bool& enabledCNG) { |
| 703 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 704 "GetAECMMode(mode=?)"); | |
| 705 #ifdef WEBRTC_VOICE_ENGINE_ECHO | 674 #ifdef WEBRTC_VOICE_ENGINE_ECHO |
| 706 if (!_shared->statistics().Initialized()) { | 675 if (!_shared->statistics().Initialized()) { |
| 707 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 676 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 708 return -1; | 677 return -1; |
| 709 } | 678 } |
| 710 | 679 |
| 711 enabledCNG = false; | 680 enabledCNG = false; |
| 712 | 681 |
| 713 EchoControlMobile::RoutingMode aecmMode = | 682 EchoControlMobile::RoutingMode aecmMode = |
| 714 _shared->audio_processing()->echo_control_mobile()->routing_mode(); | 683 _shared->audio_processing()->echo_control_mobile()->routing_mode(); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 749 AudioProcessing::kNoError) { | 718 AudioProcessing::kNoError) { |
| 750 _shared->SetLastError(VE_APM_ERROR, kTraceError, | 719 _shared->SetLastError(VE_APM_ERROR, kTraceError, |
| 751 "HighPassFilter::Enable() failed."); | 720 "HighPassFilter::Enable() failed."); |
| 752 return -1; | 721 return -1; |
| 753 } | 722 } |
| 754 | 723 |
| 755 return 0; | 724 return 0; |
| 756 } | 725 } |
| 757 | 726 |
| 758 bool VoEAudioProcessingImpl::IsHighPassFilterEnabled() { | 727 bool VoEAudioProcessingImpl::IsHighPassFilterEnabled() { |
| 759 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 760 "IsHighPassFilterEnabled()"); | |
| 761 return _shared->audio_processing()->high_pass_filter()->is_enabled(); | 728 return _shared->audio_processing()->high_pass_filter()->is_enabled(); |
| 762 } | 729 } |
| 763 | 730 |
| 764 int VoEAudioProcessingImpl::RegisterRxVadObserver(int channel, | 731 int VoEAudioProcessingImpl::RegisterRxVadObserver(int channel, |
| 765 VoERxVadCallback& observer) { | 732 VoERxVadCallback& observer) { |
| 766 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | 733 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), |
| 767 "RegisterRxVadObserver()"); | 734 "RegisterRxVadObserver()"); |
| 768 if (!_shared->statistics().Initialized()) { | 735 if (!_shared->statistics().Initialized()) { |
| 769 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 736 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 770 return -1; | 737 return -1; |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 837 } | 804 } |
| 838 return 0; | 805 return 0; |
| 839 #else | 806 #else |
| 840 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 807 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 841 "SetEcStatus() EC is not supported"); | 808 "SetEcStatus() EC is not supported"); |
| 842 return -1; | 809 return -1; |
| 843 #endif | 810 #endif |
| 844 } | 811 } |
| 845 | 812 |
| 846 int VoEAudioProcessingImpl::GetEcMetricsStatus(bool& enabled) { | 813 int VoEAudioProcessingImpl::GetEcMetricsStatus(bool& enabled) { |
| 847 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 848 "GetEcMetricsStatus(enabled=?)"); | |
| 849 #ifdef WEBRTC_VOICE_ENGINE_ECHO | 814 #ifdef WEBRTC_VOICE_ENGINE_ECHO |
| 850 if (!_shared->statistics().Initialized()) { | 815 if (!_shared->statistics().Initialized()) { |
| 851 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 816 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 852 return -1; | 817 return -1; |
| 853 } | 818 } |
| 854 | 819 |
| 855 bool echo_mode = | 820 bool echo_mode = |
| 856 _shared->audio_processing()->echo_cancellation()->are_metrics_enabled(); | 821 _shared->audio_processing()->echo_cancellation()->are_metrics_enabled(); |
| 857 bool delay_mode = _shared->audio_processing() | 822 bool delay_mode = _shared->audio_processing() |
| 858 ->echo_cancellation() | 823 ->echo_cancellation() |
| 859 ->is_delay_logging_enabled(); | 824 ->is_delay_logging_enabled(); |
| 860 | 825 |
| 861 if (echo_mode != delay_mode) { | 826 if (echo_mode != delay_mode) { |
| 862 _shared->SetLastError( | 827 _shared->SetLastError( |
| 863 VE_APM_ERROR, kTraceError, | 828 VE_APM_ERROR, kTraceError, |
| 864 "GetEcMetricsStatus() delay logging and echo mode are not the same"); | 829 "GetEcMetricsStatus() delay logging and echo mode are not the same"); |
| 865 return -1; | 830 return -1; |
| 866 } | 831 } |
| 867 | 832 |
| 868 enabled = echo_mode; | 833 enabled = echo_mode; |
| 869 | 834 |
| 870 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 871 "GetEcMetricsStatus() => enabled=%d", enabled); | |
| 872 return 0; | 835 return 0; |
| 873 #else | 836 #else |
| 874 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 837 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 875 "SetEcStatus() EC is not supported"); | 838 "SetEcStatus() EC is not supported"); |
| 876 return -1; | 839 return -1; |
| 877 #endif | 840 #endif |
| 878 } | 841 } |
| 879 | 842 |
| 880 int VoEAudioProcessingImpl::GetEchoMetrics(int& ERL, | 843 int VoEAudioProcessingImpl::GetEchoMetrics(int& ERL, |
| 881 int& ERLE, | 844 int& ERLE, |
| 882 int& RERL, | 845 int& RERL, |
| 883 int& A_NLP) { | 846 int& A_NLP) { |
| 884 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 885 "GetEchoMetrics(ERL=?, ERLE=?, RERL=?, A_NLP=?)"); | |
| 886 #ifdef WEBRTC_VOICE_ENGINE_ECHO | 847 #ifdef WEBRTC_VOICE_ENGINE_ECHO |
| 887 if (!_shared->statistics().Initialized()) { | 848 if (!_shared->statistics().Initialized()) { |
| 888 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 849 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 889 return -1; | 850 return -1; |
| 890 } | 851 } |
| 891 if (!_shared->audio_processing()->echo_cancellation()->is_enabled()) { | 852 if (!_shared->audio_processing()->echo_cancellation()->is_enabled()) { |
| 892 _shared->SetLastError( | 853 _shared->SetLastError( |
| 893 VE_APM_ERROR, kTraceWarning, | 854 VE_APM_ERROR, kTraceWarning, |
| 894 "GetEchoMetrics() AudioProcessingModule AEC is not enabled"); | 855 "GetEchoMetrics() AudioProcessingModule AEC is not enabled"); |
| 895 return -1; | 856 return -1; |
| 896 } | 857 } |
| 897 | 858 |
| 898 // Get Echo Metrics from Audio Processing Module. | 859 // Get Echo Metrics from Audio Processing Module. |
| 899 EchoCancellation::Metrics echoMetrics; | 860 EchoCancellation::Metrics echoMetrics; |
| 900 if (_shared->audio_processing()->echo_cancellation()->GetMetrics( | 861 if (_shared->audio_processing()->echo_cancellation()->GetMetrics( |
| 901 &echoMetrics)) { | 862 &echoMetrics)) { |
| 902 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_shared->instance_id(), -1), | 863 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_shared->instance_id(), -1), |
| 903 "GetEchoMetrics(), AudioProcessingModule metrics error"); | 864 "GetEchoMetrics(), AudioProcessingModule metrics error"); |
| 904 return -1; | 865 return -1; |
| 905 } | 866 } |
| 906 | 867 |
| 907 // Echo quality metrics. | 868 // Echo quality metrics. |
| 908 ERL = echoMetrics.echo_return_loss.instant; | 869 ERL = echoMetrics.echo_return_loss.instant; |
| 909 ERLE = echoMetrics.echo_return_loss_enhancement.instant; | 870 ERLE = echoMetrics.echo_return_loss_enhancement.instant; |
| 910 RERL = echoMetrics.residual_echo_return_loss.instant; | 871 RERL = echoMetrics.residual_echo_return_loss.instant; |
| 911 A_NLP = echoMetrics.a_nlp.instant; | 872 A_NLP = echoMetrics.a_nlp.instant; |
| 912 | 873 |
| 913 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 914 "GetEchoMetrics() => ERL=%d, ERLE=%d, RERL=%d, A_NLP=%d", ERL, | |
| 915 ERLE, RERL, A_NLP); | |
| 916 return 0; | 874 return 0; |
| 917 #else | 875 #else |
| 918 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 876 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 919 "SetEcStatus() EC is not supported"); | 877 "SetEcStatus() EC is not supported"); |
| 920 return -1; | 878 return -1; |
| 921 #endif | 879 #endif |
| 922 } | 880 } |
| 923 | 881 |
| 924 int VoEAudioProcessingImpl::GetEcDelayMetrics(int& delay_median, | 882 int VoEAudioProcessingImpl::GetEcDelayMetrics(int& delay_median, |
| 925 int& delay_std, | 883 int& delay_std, |
| 926 float& fraction_poor_delays) { | 884 float& fraction_poor_delays) { |
| 927 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 928 "GetEcDelayMetrics(median=?, std=?, fraction_poor_delays=?)"); | |
| 929 #ifdef WEBRTC_VOICE_ENGINE_ECHO | 885 #ifdef WEBRTC_VOICE_ENGINE_ECHO |
| 930 if (!_shared->statistics().Initialized()) { | 886 if (!_shared->statistics().Initialized()) { |
| 931 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 887 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 932 return -1; | 888 return -1; |
| 933 } | 889 } |
| 934 if (!_shared->audio_processing()->echo_cancellation()->is_enabled()) { | 890 if (!_shared->audio_processing()->echo_cancellation()->is_enabled()) { |
| 935 _shared->SetLastError( | 891 _shared->SetLastError( |
| 936 VE_APM_ERROR, kTraceWarning, | 892 VE_APM_ERROR, kTraceWarning, |
| 937 "GetEcDelayMetrics() AudioProcessingModule AEC is not enabled"); | 893 "GetEcDelayMetrics() AudioProcessingModule AEC is not enabled"); |
| 938 return -1; | 894 return -1; |
| 939 } | 895 } |
| 940 | 896 |
| 941 int median = 0; | 897 int median = 0; |
| 942 int std = 0; | 898 int std = 0; |
| 943 float poor_fraction = 0; | 899 float poor_fraction = 0; |
| 944 // Get delay-logging values from Audio Processing Module. | 900 // Get delay-logging values from Audio Processing Module. |
| 945 if (_shared->audio_processing()->echo_cancellation()->GetDelayMetrics( | 901 if (_shared->audio_processing()->echo_cancellation()->GetDelayMetrics( |
| 946 &median, &std, &poor_fraction)) { | 902 &median, &std, &poor_fraction)) { |
| 947 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_shared->instance_id(), -1), | 903 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_shared->instance_id(), -1), |
| 948 "GetEcDelayMetrics(), AudioProcessingModule delay-logging " | 904 "GetEcDelayMetrics(), AudioProcessingModule delay-logging " |
| 949 "error"); | 905 "error"); |
| 950 return -1; | 906 return -1; |
| 951 } | 907 } |
| 952 | 908 |
| 953 // EC delay-logging metrics | 909 // EC delay-logging metrics |
| 954 delay_median = median; | 910 delay_median = median; |
| 955 delay_std = std; | 911 delay_std = std; |
| 956 fraction_poor_delays = poor_fraction; | 912 fraction_poor_delays = poor_fraction; |
| 957 | 913 |
| 958 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 959 "GetEcDelayMetrics() => delay_median=%d, delay_std=%d, " | |
| 960 "fraction_poor_delays=%f", | |
| 961 delay_median, delay_std, fraction_poor_delays); | |
| 962 return 0; | 914 return 0; |
| 963 #else | 915 #else |
| 964 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, | 916 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 965 "SetEcStatus() EC is not supported"); | 917 "SetEcStatus() EC is not supported"); |
| 966 return -1; | 918 return -1; |
| 967 #endif | 919 #endif |
| 968 } | 920 } |
| 969 | 921 |
| 970 int VoEAudioProcessingImpl::StartDebugRecording(const char* fileNameUTF8) { | 922 int VoEAudioProcessingImpl::StartDebugRecording(const char* fileNameUTF8) { |
| 971 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | 923 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1025 VE_APM_ERROR, kTraceWarning, | 977 VE_APM_ERROR, kTraceWarning, |
| 1026 "SetTypingDetectionStatus() failed to set VAD likelihood to low"); | 978 "SetTypingDetectionStatus() failed to set VAD likelihood to low"); |
| 1027 return -1; | 979 return -1; |
| 1028 } | 980 } |
| 1029 | 981 |
| 1030 return 0; | 982 return 0; |
| 1031 #endif | 983 #endif |
| 1032 } | 984 } |
| 1033 | 985 |
| 1034 int VoEAudioProcessingImpl::GetTypingDetectionStatus(bool& enabled) { | 986 int VoEAudioProcessingImpl::GetTypingDetectionStatus(bool& enabled) { |
| 1035 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 1036 "GetTypingDetectionStatus()"); | |
| 1037 if (!_shared->statistics().Initialized()) { | 987 if (!_shared->statistics().Initialized()) { |
| 1038 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 988 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 1039 return -1; | 989 return -1; |
| 1040 } | 990 } |
| 1041 // Just use the VAD state to determine if we should enable typing | 991 // Just use the VAD state to determine if we should enable typing |
| 1042 // detection or not | 992 // detection or not |
| 1043 | 993 |
| 1044 enabled = _shared->audio_processing()->voice_detection()->is_enabled(); | 994 enabled = _shared->audio_processing()->voice_detection()->is_enabled(); |
| 1045 | 995 |
| 1046 return 0; | 996 return 0; |
| 1047 } | 997 } |
| 1048 | 998 |
| 1049 int VoEAudioProcessingImpl::TimeSinceLastTyping(int& seconds) { | 999 int VoEAudioProcessingImpl::TimeSinceLastTyping(int& seconds) { |
| 1050 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), | |
| 1051 "TimeSinceLastTyping()"); | |
| 1052 #if !defined(WEBRTC_VOICE_ENGINE_TYPING_DETECTION) | 1000 #if !defined(WEBRTC_VOICE_ENGINE_TYPING_DETECTION) |
| 1053 NOT_SUPPORTED(_shared->statistics()); | 1001 NOT_SUPPORTED(_shared->statistics()); |
| 1054 #else | 1002 #else |
| 1055 if (!_shared->statistics().Initialized()) { | 1003 if (!_shared->statistics().Initialized()) { |
| 1056 _shared->SetLastError(VE_NOT_INITED, kTraceError); | 1004 _shared->SetLastError(VE_NOT_INITED, kTraceError); |
| 1057 return -1; | 1005 return -1; |
| 1058 } | 1006 } |
| 1059 // Check if typing detection is enabled | 1007 // Check if typing detection is enabled |
| 1060 bool enabled = _shared->audio_processing()->voice_detection()->is_enabled(); | 1008 bool enabled = _shared->audio_processing()->voice_detection()->is_enabled(); |
| 1061 if (enabled) { | 1009 if (enabled) { |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1095 } | 1043 } |
| 1096 | 1044 |
| 1097 bool VoEAudioProcessingImpl::IsStereoChannelSwappingEnabled() { | 1045 bool VoEAudioProcessingImpl::IsStereoChannelSwappingEnabled() { |
| 1098 LOG_API0(); | 1046 LOG_API0(); |
| 1099 return _shared->transmit_mixer()->IsStereoChannelSwappingEnabled(); | 1047 return _shared->transmit_mixer()->IsStereoChannelSwappingEnabled(); |
| 1100 } | 1048 } |
| 1101 | 1049 |
| 1102 #endif // #ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API | 1050 #endif // #ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API |
| 1103 | 1051 |
| 1104 } // namespace webrtc | 1052 } // namespace webrtc |
| OLD | NEW |