Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1049)

Side by Side Diff: webrtc/modules/audio_device/win/audio_device_core_win.cc

Issue 2712743004: Support 4 channel mic in Windows Core Audio (Closed)
Patch Set: Fix another non-Windows build error Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 506 matching lines...) Expand 10 before | Expand all | Expand 10 after
517 _hCaptureStartedEvent = CreateEvent(NULL, FALSE, FALSE, NULL); 517 _hCaptureStartedEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
518 _hSetCaptureVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL); 518 _hSetCaptureVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
519 519
520 _perfCounterFreq.QuadPart = 1; 520 _perfCounterFreq.QuadPart = 1;
521 _perfCounterFactor = 0.0; 521 _perfCounterFactor = 0.0;
522 _avgCPULoad = 0.0; 522 _avgCPULoad = 0.0;
523 523
524 // list of number of channels to use on recording side 524 // list of number of channels to use on recording side
525 _recChannelsPrioList[0] = 2; // stereo is prio 1 525 _recChannelsPrioList[0] = 2; // stereo is prio 1
526 _recChannelsPrioList[1] = 1; // mono is prio 2 526 _recChannelsPrioList[1] = 1; // mono is prio 2
527 _recChannelsPrioList[2] = 4; // quad is prio 3
527 528
528 // list of number of channels to use on playout side 529 // list of number of channels to use on playout side
529 _playChannelsPrioList[0] = 2; // stereo is prio 1 530 _playChannelsPrioList[0] = 2; // stereo is prio 1
530 _playChannelsPrioList[1] = 1; // mono is prio 2 531 _playChannelsPrioList[1] = 1; // mono is prio 2
531 532
532 HRESULT hr; 533 HRESULT hr;
533 534
534 // We know that this API will work since it has already been verified in 535 // We know that this API will work since it has already been verified in
535 // CoreAudioIsSupported, hence no need to check for errors here as well. 536 // CoreAudioIsSupported, hence no need to check for errors here as well.
536 537
(...skipping 1987 matching lines...) Expand 10 before | Expand all | Expand 10 after
2524 } 2525 }
2525 2526
2526 if (_builtInAecEnabled) 2527 if (_builtInAecEnabled)
2527 { 2528 {
2528 // The DMO will configure the capture device. 2529 // The DMO will configure the capture device.
2529 return InitRecordingDMO(); 2530 return InitRecordingDMO();
2530 } 2531 }
2531 2532
2532 HRESULT hr = S_OK; 2533 HRESULT hr = S_OK;
2533 WAVEFORMATEX* pWfxIn = NULL; 2534 WAVEFORMATEX* pWfxIn = NULL;
2534 WAVEFORMATEX Wfx = WAVEFORMATEX(); 2535 WAVEFORMATEXTENSIBLE Wfx = WAVEFORMATEXTENSIBLE();
2535 WAVEFORMATEX* pWfxClosestMatch = NULL; 2536 WAVEFORMATEX* pWfxClosestMatch = NULL;
2536 2537
2537 // Create COM object with IAudioClient interface. 2538 // Create COM object with IAudioClient interface.
2538 SAFE_RELEASE(_ptrClientIn); 2539 SAFE_RELEASE(_ptrClientIn);
2539 hr = _ptrDeviceIn->Activate( 2540 hr = _ptrDeviceIn->Activate(
2540 __uuidof(IAudioClient), 2541 __uuidof(IAudioClient),
2541 CLSCTX_ALL, 2542 CLSCTX_ALL,
2542 NULL, 2543 NULL,
2543 (void**)&_ptrClientIn); 2544 (void**)&_ptrClientIn);
2544 EXIT_ON_ERROR(hr); 2545 EXIT_ON_ERROR(hr);
(...skipping 13 matching lines...) Expand all
2558 // for buffer estimation 2559 // for buffer estimation
2559 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nAvgBytesPerSec: %d", pWfxIn->nAvgBytesPerSec); 2560 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nAvgBytesPerSec: %d", pWfxIn->nAvgBytesPerSec);
2560 // block size of data 2561 // block size of data
2561 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nBlockAlign : %d", pWfxIn->nBlockAlign); 2562 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nBlockAlign : %d", pWfxIn->nBlockAlign);
2562 // number of bits per sample of mono data 2563 // number of bits per sample of mono data
2563 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wBitsPerSample : %d", pWfxIn->wBitsPerSample); 2564 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wBitsPerSample : %d", pWfxIn->wBitsPerSample);
2564 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cbSize : %d", pWfxIn->cbSize); 2565 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cbSize : %d", pWfxIn->cbSize);
2565 } 2566 }
2566 2567
2567 // Set wave format 2568 // Set wave format
2568 Wfx.wFormatTag = WAVE_FORMAT_PCM; 2569 Wfx.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
2569 Wfx.wBitsPerSample = 16; 2570 Wfx.Format.wBitsPerSample = 16;
2570 Wfx.cbSize = 0; 2571 Wfx.Format.cbSize = 22;
2572 Wfx.dwChannelMask = 0;
2573 Wfx.Samples.wValidBitsPerSample = Wfx.Format.wBitsPerSample;
2574 Wfx.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
2571 2575
2572 const int freqs[6] = {48000, 44100, 16000, 96000, 32000, 8000}; 2576 const int freqs[6] = {48000, 44100, 16000, 96000, 32000, 8000};
2573 hr = S_FALSE; 2577 hr = S_FALSE;
2574 2578
2575 // Iterate over frequencies and channels, in order of priority 2579 // Iterate over frequencies and channels, in order of priority
2576 for (unsigned int freq = 0; freq < sizeof(freqs)/sizeof(freqs[0]); freq++) 2580 for (unsigned int freq = 0; freq < sizeof(freqs)/sizeof(freqs[0]); freq++)
2577 { 2581 {
2578 for (unsigned int chan = 0; chan < sizeof(_recChannelsPrioList)/sizeof(_ recChannelsPrioList[0]); chan++) 2582 for (unsigned int chan = 0; chan < sizeof(_recChannelsPrioList)/sizeof(_ recChannelsPrioList[0]); chan++)
2579 { 2583 {
2580 Wfx.nChannels = _recChannelsPrioList[chan]; 2584 Wfx.Format.nChannels = _recChannelsPrioList[chan];
2581 Wfx.nSamplesPerSec = freqs[freq]; 2585 Wfx.Format.nSamplesPerSec = freqs[freq];
2582 Wfx.nBlockAlign = Wfx.nChannels * Wfx.wBitsPerSample / 8; 2586 Wfx.Format.nBlockAlign = Wfx.Format.nChannels *
2583 Wfx.nAvgBytesPerSec = Wfx.nSamplesPerSec * Wfx.nBlockAlign; 2587 Wfx.Format.wBitsPerSample / 8;
2588 Wfx.Format.nAvgBytesPerSec = Wfx.Format.nSamplesPerSec *
2589 Wfx.Format.nBlockAlign;
2584 // If the method succeeds and the audio endpoint device supports the specified stream format, 2590 // If the method succeeds and the audio endpoint device supports the specified stream format,
2585 // it returns S_OK. If the method succeeds and provides a closest ma tch to the specified format, 2591 // it returns S_OK. If the method succeeds and provides a closest ma tch to the specified format,
2586 // it returns S_FALSE. 2592 // it returns S_FALSE.
2587 hr = _ptrClientIn->IsFormatSupported( 2593 hr = _ptrClientIn->IsFormatSupported(
2588 AUDCLNT_SHAREMODE_SHARED, 2594 AUDCLNT_SHAREMODE_SHARED,
2589 &Wfx, 2595 (WAVEFORMATEX*)&Wfx,
2590 &pWfxClosestMatch); 2596 &pWfxClosestMatch);
2591 if (hr == S_OK) 2597 if (hr == S_OK)
2592 { 2598 {
2593 break; 2599 break;
2594 } 2600 }
2595 else 2601 else
2596 { 2602 {
2597 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels=%d, nSamplesPerSec=%d is not supported", 2603 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels=%d, nSamplesPerSec=%d is not supported",
2598 Wfx.nChannels, Wfx.nSamplesPerSec); 2604 Wfx.Format.nChannels, Wfx.Format.nSamplesPerSec);
2599 } 2605 }
2600 } 2606 }
2601 if (hr == S_OK) 2607 if (hr == S_OK)
2602 break; 2608 break;
2603 } 2609 }
2604 2610
2605 if (hr == S_OK) 2611 if (hr == S_OK)
2606 { 2612 {
2607 _recAudioFrameSize = Wfx.nBlockAlign; 2613 _recAudioFrameSize = Wfx.Format.nBlockAlign;
2608 _recSampleRate = Wfx.nSamplesPerSec; 2614 _recSampleRate = Wfx.Format.nSamplesPerSec;
2609 _recBlockSize = Wfx.nSamplesPerSec/100; 2615 _recBlockSize = Wfx.Format.nSamplesPerSec/100;
2610 _recChannels = Wfx.nChannels; 2616 _recChannels = Wfx.Format.nChannels;
2611 2617
2612 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "VoE selected this capt uring format:"); 2618 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "VoE selected this capt uring format:");
2613 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wFormatTag : 0x %X (%u)", Wfx.wFormatTag, Wfx.wFormatTag); 2619 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wFormatTag : 0x %X (%u)", Wfx.Format.wFormatTag,
2614 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels : %d ", Wfx.nChannels); 2620 Wfx.Format.wFormatTag);
2615 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nSamplesPerSec : %d ", Wfx.nSamplesPerSec); 2621 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels : %d ", Wfx.Format.nChannels);
2616 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nAvgBytesPerSec : %d ", Wfx.nAvgBytesPerSec); 2622 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nSamplesPerSec : %d ", Wfx.Format.nSamplesPerSec);
2617 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nBlockAlign : %d ", Wfx.nBlockAlign); 2623 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nAvgBytesPerSec : %d ", Wfx.Format.nAvgBytesPerSec);
2618 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wBitsPerSample : %d ", Wfx.wBitsPerSample); 2624 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nBlockAlign : %d ", Wfx.Format.nBlockAlign);
2619 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cbSize : %d ", Wfx.cbSize); 2625 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wBitsPerSample : %d ", Wfx.Format.wBitsPerSample);
2626 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cbSize : %d ", Wfx.Format.cbSize);
2620 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Additional settings:") ; 2627 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Additional settings:") ;
2621 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_recAudioFrameSize: %d ", _recAudioFrameSize); 2628 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_recAudioFrameSize: %d ", _recAudioFrameSize);
2622 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_recBlockSize : %d ", _recBlockSize); 2629 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_recBlockSize : %d ", _recBlockSize);
2623 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_recChannels : %d ", _recChannels); 2630 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_recChannels : %d ", _recChannels);
2624 } 2631 }
2625 2632
2626 // Create a capturing stream. 2633 // Create a capturing stream.
2627 hr = _ptrClientIn->Initialize( 2634 hr = _ptrClientIn->Initialize(
2628 AUDCLNT_SHAREMODE_SHARED, // share Audio E ngine with other applications 2635 AUDCLNT_SHAREMODE_SHARED, // share Audio E ngine with other applications
2629 AUDCLNT_STREAMFLAGS_EVENTCALLBACK | // processing of the audio buffer by the client will be event driven 2636 AUDCLNT_STREAMFLAGS_EVENTCALLBACK | // processing of the audio buffer by the client will be event driven
2630 AUDCLNT_STREAMFLAGS_NOPERSIST, // volume and mu te settings for an audio session will not persist across system restarts 2637 AUDCLNT_STREAMFLAGS_NOPERSIST, // volume and mu te settings for an audio session will not persist across system restarts
2631 0, // required for event-driven shared mode 2638 0, // required for event-driven shared mode
2632 0, // periodicity 2639 0, // periodicity
2633 &Wfx, // selected wave format 2640 (WAVEFORMATEX*)&Wfx, // selected wave format
2634 NULL); // session GUID 2641 NULL); // session GUID
2635 2642
2636 2643
2637 if (hr != S_OK) 2644 if (hr != S_OK)
2638 { 2645 {
2639 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "IAudioClient::Initial ize() failed:"); 2646 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "IAudioClient::Initial ize() failed:");
2640 if (pWfxClosestMatch != NULL) 2647 if (pWfxClosestMatch != NULL)
2641 { 2648 {
2642 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "closest mix forma t: #channels=%d, samples/sec=%d, bits/sample=%d", 2649 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "closest mix forma t: #channels=%d, samples/sec=%d, bits/sample=%d",
2643 pWfxClosestMatch->nChannels, pWfxClosestMatch->nSamplesPerSec, p WfxClosestMatch->wBitsPerSample); 2650 pWfxClosestMatch->nChannels, pWfxClosestMatch->nSamplesPerSec, p WfxClosestMatch->wBitsPerSample);
(...skipping 2447 matching lines...) Expand 10 before | Expand all | Expand 10 after
5091 int key_down = 0; 5098 int key_down = 0;
5092 for (int key = VK_SPACE; key < VK_NUMLOCK; key++) { 5099 for (int key = VK_SPACE; key < VK_NUMLOCK; key++) {
5093 short res = GetAsyncKeyState(key); 5100 short res = GetAsyncKeyState(key);
5094 key_down |= res & 0x1; // Get the LSB 5101 key_down |= res & 0x1; // Get the LSB
5095 } 5102 }
5096 return (key_down > 0); 5103 return (key_down > 0);
5097 } 5104 }
5098 } // namespace webrtc 5105 } // namespace webrtc
5099 5106
5100 #endif // WEBRTC_WINDOWS_CORE_AUDIO_BUILD 5107 #endif // WEBRTC_WINDOWS_CORE_AUDIO_BUILD
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698