1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "webrtc/modules/audio_device/audio_device_config.h"
12 #include "webrtc/modules/audio_device/audio_device_utility.h"
13 #include "webrtc/modules/audio_device/win/audio_device_wave_win.h"
14
15 #include "webrtc/system_wrappers/interface/event_wrapper.h"
16 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
17 #include "webrtc/system_wrappers/interface/trace.h"
18
19 #include <windows.h>
20 #include <objbase.h> // CoTaskMemAlloc, CoTaskMemFree
21 #include <strsafe.h> // StringCchCopy(), StringCchCat(), StringCchPrintf()
22 #include <assert.h>
23
24 // Avoids the need of Windows 7 SDK
25 #ifndef WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE
26 #define WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE 0x0010
27 #endif
28
29 // Supported in Windows Vista and Windows 7.
30 // http://msdn.microsoft.com/en-us/library/dd370819(v=VS.85).aspx
31 // Taken from Mmddk.h.
32 #define DRV_RESERVED 0x0800
33 #define DRV_QUERYFUNCTIONINSTANCEID (DRV_RESERVED + 17)
34 #define DRV_QUERYFUNCTIONINSTANCEIDSIZE (DRV_RESERVED + 18)
35
36 #define POW2(A) (2 << ((A) - 1))
37
38 namespace webrtc {
39
40 // ============================================================================
41 // Construction & Destruction
42 // ============================================================================
43
44 // ----------------------------------------------------------------------------
45 // AudioDeviceWindowsWave - ctor
46 // ----------------------------------------------------------------------------
47
AudioDeviceWindowsWave(const int32_t id)48 AudioDeviceWindowsWave::AudioDeviceWindowsWave(const int32_t id) :
49 _ptrAudioBuffer(NULL),
50 _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
51 _timeEvent(*EventWrapper::Create()),
52 _recStartEvent(*EventWrapper::Create()),
53 _playStartEvent(*EventWrapper::Create()),
54 _hGetCaptureVolumeThread(NULL),
55 _hShutdownGetVolumeEvent(NULL),
56 _hSetCaptureVolumeThread(NULL),
57 _hShutdownSetVolumeEvent(NULL),
58 _hSetCaptureVolumeEvent(NULL),
59 _ptrThread(NULL),
60 _threadID(0),
61 _critSectCb(*CriticalSectionWrapper::CreateCriticalSection()),
62 _id(id),
63 _mixerManager(id),
64 _usingInputDeviceIndex(false),
65 _usingOutputDeviceIndex(false),
66 _inputDevice(AudioDeviceModule::kDefaultDevice),
67 _outputDevice(AudioDeviceModule::kDefaultDevice),
68 _inputDeviceIndex(0),
69 _outputDeviceIndex(0),
70 _inputDeviceIsSpecified(false),
71 _outputDeviceIsSpecified(false),
72 _initialized(false),
73 _recIsInitialized(false),
74 _playIsInitialized(false),
75 _recording(false),
76 _playing(false),
77 _startRec(false),
78 _stopRec(false),
79 _startPlay(false),
80 _stopPlay(false),
81 _AGC(false),
82 _hWaveIn(NULL),
83 _hWaveOut(NULL),
84 _recChannels(N_REC_CHANNELS),
85 _playChannels(N_PLAY_CHANNELS),
86 _recBufCount(0),
87 _recPutBackDelay(0),
88 _recDelayCount(0),
89 _playBufCount(0),
90 _prevPlayTime(0),
91 _prevRecTime(0),
92 _prevTimerCheckTime(0),
93 _timesdwBytes(0),
94 _timerFaults(0),
95 _timerRestartAttempts(0),
96 _no_of_msecleft_warnings(0),
97 _MAX_minBuffer(65),
98 _useHeader(0),
99 _dTcheckPlayBufDelay(10),
100 _playBufDelay(80),
101 _playBufDelayFixed(80),
102 _minPlayBufDelay(20),
103 _avgCPULoad(0),
104 _sndCardPlayDelay(0),
105 _sndCardRecDelay(0),
106 _plSampOld(0),
107 _rcSampOld(0),
108 _playBufType(AudioDeviceModule::kAdaptiveBufferSize),
109 _recordedBytes(0),
110 _playWarning(0),
111 _playError(0),
112 _recWarning(0),
113 _recError(0),
114 _newMicLevel(0),
115 _minMicVolume(0),
116 _maxMicVolume(0)
117 {
118 WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created", __FUNCTION__);
119
120 // Initialize value, set to 0 if it fails
121 if (!QueryPerformanceFrequency(&_perfFreq))
122 {
123 _perfFreq.QuadPart = 0;
124 }
125
126 _hShutdownGetVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
127 _hShutdownSetVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
128 _hSetCaptureVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
129 }
130
131 // ----------------------------------------------------------------------------
132 // AudioDeviceWindowsWave - dtor
133 // ----------------------------------------------------------------------------
134
~AudioDeviceWindowsWave()135 AudioDeviceWindowsWave::~AudioDeviceWindowsWave()
136 {
137 WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__);
138
139 Terminate();
140
141 delete &_recStartEvent;
142 delete &_playStartEvent;
143 delete &_timeEvent;
144 delete &_critSect;
145 delete &_critSectCb;
146
147 if (NULL != _hShutdownGetVolumeEvent)
148 {
149 CloseHandle(_hShutdownGetVolumeEvent);
150 _hShutdownGetVolumeEvent = NULL;
151 }
152
153 if (NULL != _hShutdownSetVolumeEvent)
154 {
155 CloseHandle(_hShutdownSetVolumeEvent);
156 _hShutdownSetVolumeEvent = NULL;
157 }
158
159 if (NULL != _hSetCaptureVolumeEvent)
160 {
161 CloseHandle(_hSetCaptureVolumeEvent);
162 _hSetCaptureVolumeEvent = NULL;
163 }
164 }
165
166 // ============================================================================
167 // API
168 // ============================================================================
169
170 // ----------------------------------------------------------------------------
171 // AttachAudioBuffer
172 // ----------------------------------------------------------------------------
173
AttachAudioBuffer(AudioDeviceBuffer * audioBuffer)174 void AudioDeviceWindowsWave::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
175 {
176
177 CriticalSectionScoped lock(&_critSect);
178
179 _ptrAudioBuffer = audioBuffer;
180
181 // inform the AudioBuffer about default settings for this implementation
182 _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
183 _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
184 _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
185 _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
186 }
187
188 // ----------------------------------------------------------------------------
189 // ActiveAudioLayer
190 // ----------------------------------------------------------------------------
191
ActiveAudioLayer(AudioDeviceModule::AudioLayer & audioLayer) const192 int32_t AudioDeviceWindowsWave::ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const
193 {
194 audioLayer = AudioDeviceModule::kWindowsWaveAudio;
195 return 0;
196 }
197
198 // ----------------------------------------------------------------------------
199 // Init
200 // ----------------------------------------------------------------------------
201
Init()202 int32_t AudioDeviceWindowsWave::Init()
203 {
204
205 CriticalSectionScoped lock(&_critSect);
206
207 if (_initialized)
208 {
209 return 0;
210 }
211
212 const uint32_t nowTime(AudioDeviceUtility::GetTimeInMS());
213
214 _recordedBytes = 0;
215 _prevRecByteCheckTime = nowTime;
216 _prevRecTime = nowTime;
217 _prevPlayTime = nowTime;
218 _prevTimerCheckTime = nowTime;
219
220 _playWarning = 0;
221 _playError = 0;
222 _recWarning = 0;
223 _recError = 0;
224
225 _mixerManager.EnumerateAll();
226
227 if (_ptrThread)
228 {
229 // thread is already created and active
230 return 0;
231 }
232
233 const char* threadName = "webrtc_audio_module_thread";
234 _ptrThread = ThreadWrapper::CreateThread(ThreadFunc,
235 this,
236 kRealtimePriority,
237 threadName);
238 if (_ptrThread == NULL)
239 {
240 WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
241 "failed to create the audio thread");
242 return -1;
243 }
244
245 unsigned int threadID(0);
246 if (!_ptrThread->Start(threadID))
247 {
248 WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
249 "failed to start the audio thread");
250 delete _ptrThread;
251 _ptrThread = NULL;
252 return -1;
253 }
254 _threadID = threadID;
255
256 const bool periodic(true);
257 if (!_timeEvent.StartTimer(periodic, TIMER_PERIOD_MS))
258 {
259 WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
260 "failed to start the timer event");
261 if (_ptrThread->Stop())
262 {
263 delete _ptrThread;
264 _ptrThread = NULL;
265 }
266 else
267 {
268 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
269 "unable to stop the activated thread");
270 }
271 return -1;
272 }
273 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
274 "periodic timer (dT=%d) is now active", TIMER_PERIOD_MS);
275
276 _hGetCaptureVolumeThread = CreateThread(NULL,
277 0,
278 GetCaptureVolumeThread,
279 this,
280 0,
281 NULL);
282 if (_hGetCaptureVolumeThread == NULL)
283 {
284 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
285 " failed to create the volume getter thread");
286 return -1;
287 }
288
289 SetThreadPriority(_hGetCaptureVolumeThread, THREAD_PRIORITY_NORMAL);
290
291 _hSetCaptureVolumeThread = CreateThread(NULL,
292 0,
293 SetCaptureVolumeThread,
294 this,
295 0,
296 NULL);
297 if (_hSetCaptureVolumeThread == NULL)
298 {
299 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
300 " failed to create the volume setter thread");
301 return -1;
302 }
303
304 SetThreadPriority(_hSetCaptureVolumeThread, THREAD_PRIORITY_NORMAL);
305
306 _initialized = true;
307
308 return 0;
309 }
310
311 // ----------------------------------------------------------------------------
312 // Terminate
313 // ----------------------------------------------------------------------------
314
Terminate()315 int32_t AudioDeviceWindowsWave::Terminate()
316 {
317
318 if (!_initialized)
319 {
320 return 0;
321 }
322
323 _critSect.Enter();
324
325 _mixerManager.Close();
326
327 if (_ptrThread)
328 {
329 ThreadWrapper* tmpThread = _ptrThread;
330 _ptrThread = NULL;
331 _critSect.Leave();
332
333 tmpThread->SetNotAlive();
334 _timeEvent.Set();
335
336 if (tmpThread->Stop())
337 {
338 delete tmpThread;
339 }
340 else
341 {
342 _critSect.Leave();
343 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
344 "failed to close down the audio thread");
345 return -1;
346 }
347 }
348 else
349 {
350 _critSect.Leave();
351 }
352
353 _critSect.Enter();
354 SetEvent(_hShutdownGetVolumeEvent);
355 _critSect.Leave();
356 int32_t ret = WaitForSingleObject(_hGetCaptureVolumeThread, 2000);
357 if (ret != WAIT_OBJECT_0)
358 {
359 // the thread did not stop as it should
360 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
361 " failed to close down volume getter thread");
362 CloseHandle(_hGetCaptureVolumeThread);
363 _hGetCaptureVolumeThread = NULL;
364 return -1;
365 }
366 _critSect.Enter();
367 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
368 " volume getter thread is now closed");
369
370 SetEvent(_hShutdownSetVolumeEvent);
371 _critSect.Leave();
372 ret = WaitForSingleObject(_hSetCaptureVolumeThread, 2000);
373 if (ret != WAIT_OBJECT_0)
374 {
375 // the thread did not stop as it should
376 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
377 " failed to close down volume setter thread");
378 CloseHandle(_hSetCaptureVolumeThread);
379 _hSetCaptureVolumeThread = NULL;
380 return -1;
381 }
382 _critSect.Enter();
383 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
384 " volume setter thread is now closed");
385
386 CloseHandle(_hGetCaptureVolumeThread);
387 _hGetCaptureVolumeThread = NULL;
388
389 CloseHandle(_hSetCaptureVolumeThread);
390 _hSetCaptureVolumeThread = NULL;
391
392 _critSect.Leave();
393
394 _timeEvent.StopTimer();
395
396 _initialized = false;
397 _outputDeviceIsSpecified = false;
398 _inputDeviceIsSpecified = false;
399
400 return 0;
401 }
402
403
GetCaptureVolumeThread(LPVOID context)404 DWORD WINAPI AudioDeviceWindowsWave::GetCaptureVolumeThread(LPVOID context)
405 {
406 return(((AudioDeviceWindowsWave*)context)->DoGetCaptureVolumeThread());
407 }
408
SetCaptureVolumeThread(LPVOID context)409 DWORD WINAPI AudioDeviceWindowsWave::SetCaptureVolumeThread(LPVOID context)
410 {
411 return(((AudioDeviceWindowsWave*)context)->DoSetCaptureVolumeThread());
412 }
413
DoGetCaptureVolumeThread()414 DWORD AudioDeviceWindowsWave::DoGetCaptureVolumeThread()
415 {
416 HANDLE waitObject = _hShutdownGetVolumeEvent;
417
418 while (1)
419 {
420 DWORD waitResult = WaitForSingleObject(waitObject,
421 GET_MIC_VOLUME_INTERVAL_MS);
422 switch (waitResult)
423 {
424 case WAIT_OBJECT_0: // _hShutdownGetVolumeEvent
425 return 0;
426 case WAIT_TIMEOUT: // timeout notification
427 break;
428 default: // unexpected error
429 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
430 " unknown wait termination on get volume thread");
431 return -1;
432 }
433
434 if (AGC())
435 {
436 uint32_t currentMicLevel = 0;
437 if (MicrophoneVolume(currentMicLevel) == 0)
438 {
439 // This doesn't set the system volume, just stores it.
440 _critSect.Enter();
441 if (_ptrAudioBuffer)
442 {
443 _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
444 }
445 _critSect.Leave();
446 }
447 }
448 }
449 }
450
DoSetCaptureVolumeThread()451 DWORD AudioDeviceWindowsWave::DoSetCaptureVolumeThread()
452 {
453 HANDLE waitArray[2] = {_hShutdownSetVolumeEvent, _hSetCaptureVolumeEvent};
454
455 while (1)
456 {
457 DWORD waitResult = WaitForMultipleObjects(2, waitArray, FALSE, INFINITE);
458 switch (waitResult)
459 {
460 case WAIT_OBJECT_0: // _hShutdownSetVolumeEvent
461 return 0;
462 case WAIT_OBJECT_0 + 1: // _hSetCaptureVolumeEvent
463 break;
464 default: // unexpected error
465 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
466 " unknown wait termination on set volume thread");
467 return -1;
468 }
469
470 _critSect.Enter();
471 uint32_t newMicLevel = _newMicLevel;
472 _critSect.Leave();
473
474 if (SetMicrophoneVolume(newMicLevel) == -1)
475 {
476 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
477 " the required modification of the microphone volume failed");
478 }
479 }
480 return 0;
481 }
482
483 // ----------------------------------------------------------------------------
484 // Initialized
485 // ----------------------------------------------------------------------------
486
Initialized() const487 bool AudioDeviceWindowsWave::Initialized() const
488 {
489 return (_initialized);
490 }
491
492 // ----------------------------------------------------------------------------
493 // InitSpeaker
494 // ----------------------------------------------------------------------------
495
InitSpeaker()496 int32_t AudioDeviceWindowsWave::InitSpeaker()
497 {
498
499 CriticalSectionScoped lock(&_critSect);
500
501 if (_playing)
502 {
503 return -1;
504 }
505
506 if (_mixerManager.EnumerateSpeakers() == -1)
507 {
508 // failed to locate any valid/controllable speaker
509 return -1;
510 }
511
512 if (IsUsingOutputDeviceIndex())
513 {
514 if (_mixerManager.OpenSpeaker(OutputDeviceIndex()) == -1)
515 {
516 return -1;
517 }
518 }
519 else
520 {
521 if (_mixerManager.OpenSpeaker(OutputDevice()) == -1)
522 {
523 return -1;
524 }
525 }
526
527 return 0;
528 }
529
530 // ----------------------------------------------------------------------------
531 // InitMicrophone
532 // ----------------------------------------------------------------------------
533
InitMicrophone()534 int32_t AudioDeviceWindowsWave::InitMicrophone()
535 {
536
537 CriticalSectionScoped lock(&_critSect);
538
539 if (_recording)
540 {
541 return -1;
542 }
543
544 if (_mixerManager.EnumerateMicrophones() == -1)
545 {
546 // failed to locate any valid/controllable microphone
547 return -1;
548 }
549
550 if (IsUsingInputDeviceIndex())
551 {
552 if (_mixerManager.OpenMicrophone(InputDeviceIndex()) == -1)
553 {
554 return -1;
555 }
556 }
557 else
558 {
559 if (_mixerManager.OpenMicrophone(InputDevice()) == -1)
560 {
561 return -1;
562 }
563 }
564
565 uint32_t maxVol = 0;
566 if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1)
567 {
568 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
569 " unable to retrieve max microphone volume");
570 }
571 _maxMicVolume = maxVol;
572
573 uint32_t minVol = 0;
574 if (_mixerManager.MinMicrophoneVolume(minVol) == -1)
575 {
576 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
577 " unable to retrieve min microphone volume");
578 }
579 _minMicVolume = minVol;
580
581 return 0;
582 }
583
584 // ----------------------------------------------------------------------------
585 // SpeakerIsInitialized
586 // ----------------------------------------------------------------------------
587
SpeakerIsInitialized() const588 bool AudioDeviceWindowsWave::SpeakerIsInitialized() const
589 {
590 return (_mixerManager.SpeakerIsInitialized());
591 }
592
593 // ----------------------------------------------------------------------------
594 // MicrophoneIsInitialized
595 // ----------------------------------------------------------------------------
596
MicrophoneIsInitialized() const597 bool AudioDeviceWindowsWave::MicrophoneIsInitialized() const
598 {
599 return (_mixerManager.MicrophoneIsInitialized());
600 }
601
602 // ----------------------------------------------------------------------------
603 // SpeakerVolumeIsAvailable
604 // ----------------------------------------------------------------------------
605
SpeakerVolumeIsAvailable(bool & available)606 int32_t AudioDeviceWindowsWave::SpeakerVolumeIsAvailable(bool& available)
607 {
608
609 bool isAvailable(false);
610
611 // Enumerate all avaliable speakers and make an attempt to open up the
612 // output mixer corresponding to the currently selected output device.
613 //
614 if (InitSpeaker() == -1)
615 {
616 // failed to find a valid speaker
617 available = false;
618 return 0;
619 }
620
621 // Check if the selected speaker has a volume control
622 //
623 _mixerManager.SpeakerVolumeIsAvailable(isAvailable);
624 available = isAvailable;
625
626 // Close the initialized output mixer
627 //
628 _mixerManager.CloseSpeaker();
629
630 return 0;
631 }
632
633 // ----------------------------------------------------------------------------
634 // SetSpeakerVolume
635 // ----------------------------------------------------------------------------
636
SetSpeakerVolume(uint32_t volume)637 int32_t AudioDeviceWindowsWave::SetSpeakerVolume(uint32_t volume)
638 {
639
640 return (_mixerManager.SetSpeakerVolume(volume));
641 }
642
643 // ----------------------------------------------------------------------------
644 // SpeakerVolume
645 // ----------------------------------------------------------------------------
646
SpeakerVolume(uint32_t & volume) const647 int32_t AudioDeviceWindowsWave::SpeakerVolume(uint32_t& volume) const
648 {
649
650 uint32_t level(0);
651
652 if (_mixerManager.SpeakerVolume(level) == -1)
653 {
654 return -1;
655 }
656
657 volume = level;
658 return 0;
659 }
660
661 // ----------------------------------------------------------------------------
662 // SetWaveOutVolume
663 //
664 // The low-order word contains the left-channel volume setting, and the
665 // high-order word contains the right-channel setting.
666 // A value of 0xFFFF represents full volume, and a value of 0x0000 is silence.
667 //
668 // If a device does not support both left and right volume control,
669 // the low-order word of dwVolume specifies the volume level,
670 // and the high-order word is ignored.
671 //
672 // Most devices do not support the full 16 bits of volume-level control
673 // and will not use the least-significant bits of the requested volume setting.
674 // For example, if a device supports 4 bits of volume control, the values
675 // 0x4000, 0x4FFF, and 0x43BE will all be truncated to 0x4000.
676 // ----------------------------------------------------------------------------
677
SetWaveOutVolume(uint16_t volumeLeft,uint16_t volumeRight)678 int32_t AudioDeviceWindowsWave::SetWaveOutVolume(uint16_t volumeLeft, uint16_t volumeRight)
679 {
680
681 MMRESULT res(0);
682 WAVEOUTCAPS caps;
683
684 CriticalSectionScoped lock(&_critSect);
685
686 if (_hWaveOut == NULL)
687 {
688 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no open playout device exists => using default");
689 }
690
691 // To determine whether the device supports volume control on both
692 // the left and right channels, use the WAVECAPS_LRVOLUME flag.
693 //
694 res = waveOutGetDevCaps((UINT_PTR)_hWaveOut, &caps, sizeof(WAVEOUTCAPS));
695 if (MMSYSERR_NOERROR != res)
696 {
697 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCaps() failed (err=%d)", res);
698 TraceWaveOutError(res);
699 }
700 if (!(caps.dwSupport & WAVECAPS_VOLUME))
701 {
702 // this device does not support volume control using the waveOutSetVolume API
703 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device does not support volume control using the Wave API");
704 return -1;
705 }
706 if (!(caps.dwSupport & WAVECAPS_LRVOLUME))
707 {
708 // high-order word (right channel) is ignored
709 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "device does not support volume control on both channels");
710 }
711
712 DWORD dwVolume(0x00000000);
713 dwVolume = (DWORD)(((volumeRight & 0xFFFF) << 16) | (volumeLeft & 0xFFFF));
714
715 res = waveOutSetVolume(_hWaveOut, dwVolume);
716 if (MMSYSERR_NOERROR != res)
717 {
718 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveOutSetVolume() failed (err=%d)", res);
719 TraceWaveOutError(res);
720 return -1;
721 }
722
723 return 0;
724 }
725
726 // ----------------------------------------------------------------------------
727 // WaveOutVolume
728 //
729 // The low-order word of this location contains the left-channel volume setting,
730 // and the high-order word contains the right-channel setting.
731 // A value of 0xFFFF (65535) represents full volume, and a value of 0x0000
732 // is silence.
733 //
734 // If a device does not support both left and right volume control,
735 // the low-order word of the specified location contains the mono volume level.
736 //
737 // The full 16-bit setting(s) set with the waveOutSetVolume function is returned,
738 // regardless of whether the device supports the full 16 bits of volume-level
739 // control.
740 // ----------------------------------------------------------------------------
741
WaveOutVolume(uint16_t & volumeLeft,uint16_t & volumeRight) const742 int32_t AudioDeviceWindowsWave::WaveOutVolume(uint16_t& volumeLeft, uint16_t& volumeRight) const
743 {
744
745 MMRESULT res(0);
746 WAVEOUTCAPS caps;
747
748 CriticalSectionScoped lock(&_critSect);
749
750 if (_hWaveOut == NULL)
751 {
752 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no open playout device exists => using default");
753 }
754
755 // To determine whether the device supports volume control on both
756 // the left and right channels, use the WAVECAPS_LRVOLUME flag.
757 //
758 res = waveOutGetDevCaps((UINT_PTR)_hWaveOut, &caps, sizeof(WAVEOUTCAPS));
759 if (MMSYSERR_NOERROR != res)
760 {
761 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCaps() failed (err=%d)", res);
762 TraceWaveOutError(res);
763 }
764 if (!(caps.dwSupport & WAVECAPS_VOLUME))
765 {
766 // this device does not support volume control using the waveOutSetVolume API
767 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device does not support volume control using the Wave API");
768 return -1;
769 }
770 if (!(caps.dwSupport & WAVECAPS_LRVOLUME))
771 {
772 // high-order word (right channel) is ignored
773 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "device does not support volume control on both channels");
774 }
775
776 DWORD dwVolume(0x00000000);
777
778 res = waveOutGetVolume(_hWaveOut, &dwVolume);
779 if (MMSYSERR_NOERROR != res)
780 {
781 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveOutGetVolume() failed (err=%d)", res);
782 TraceWaveOutError(res);
783 return -1;
784 }
785
786 WORD wVolumeLeft = LOWORD(dwVolume);
787 WORD wVolumeRight = HIWORD(dwVolume);
788
789 volumeLeft = static_cast<uint16_t> (wVolumeLeft);
790 volumeRight = static_cast<uint16_t> (wVolumeRight);
791
792 return 0;
793 }
794
795 // ----------------------------------------------------------------------------
796 // MaxSpeakerVolume
797 // ----------------------------------------------------------------------------
798
MaxSpeakerVolume(uint32_t & maxVolume) const799 int32_t AudioDeviceWindowsWave::MaxSpeakerVolume(uint32_t& maxVolume) const
800 {
801
802 uint32_t maxVol(0);
803
804 if (_mixerManager.MaxSpeakerVolume(maxVol) == -1)
805 {
806 return -1;
807 }
808
809 maxVolume = maxVol;
810 return 0;
811 }
812
813 // ----------------------------------------------------------------------------
814 // MinSpeakerVolume
815 // ----------------------------------------------------------------------------
816
MinSpeakerVolume(uint32_t & minVolume) const817 int32_t AudioDeviceWindowsWave::MinSpeakerVolume(uint32_t& minVolume) const
818 {
819
820 uint32_t minVol(0);
821
822 if (_mixerManager.MinSpeakerVolume(minVol) == -1)
823 {
824 return -1;
825 }
826
827 minVolume = minVol;
828 return 0;
829 }
830
831 // ----------------------------------------------------------------------------
832 // SpeakerVolumeStepSize
833 // ----------------------------------------------------------------------------
834
SpeakerVolumeStepSize(uint16_t & stepSize) const835 int32_t AudioDeviceWindowsWave::SpeakerVolumeStepSize(uint16_t& stepSize) const
836 {
837
838 uint16_t delta(0);
839
840 if (_mixerManager.SpeakerVolumeStepSize(delta) == -1)
841 {
842 return -1;
843 }
844
845 stepSize = delta;
846 return 0;
847 }
848
849 // ----------------------------------------------------------------------------
850 // SpeakerMuteIsAvailable
851 // ----------------------------------------------------------------------------
852
SpeakerMuteIsAvailable(bool & available)853 int32_t AudioDeviceWindowsWave::SpeakerMuteIsAvailable(bool& available)
854 {
855
856 bool isAvailable(false);
857
858 // Enumerate all avaliable speakers and make an attempt to open up the
859 // output mixer corresponding to the currently selected output device.
860 //
861 if (InitSpeaker() == -1)
862 {
863 // If we end up here it means that the selected speaker has no volume
864 // control, hence it is safe to state that there is no mute control
865 // already at this stage.
866 available = false;
867 return 0;
868 }
869
870 // Check if the selected speaker has a mute control
871 //
872 _mixerManager.SpeakerMuteIsAvailable(isAvailable);
873 available = isAvailable;
874
875 // Close the initialized output mixer
876 //
877 _mixerManager.CloseSpeaker();
878
879 return 0;
880 }
881
882 // ----------------------------------------------------------------------------
883 // SetSpeakerMute
884 // ----------------------------------------------------------------------------
885
SetSpeakerMute(bool enable)886 int32_t AudioDeviceWindowsWave::SetSpeakerMute(bool enable)
887 {
888 return (_mixerManager.SetSpeakerMute(enable));
889 }
890
891 // ----------------------------------------------------------------------------
892 // SpeakerMute
893 // ----------------------------------------------------------------------------
894
SpeakerMute(bool & enabled) const895 int32_t AudioDeviceWindowsWave::SpeakerMute(bool& enabled) const
896 {
897
898 bool muted(0);
899
900 if (_mixerManager.SpeakerMute(muted) == -1)
901 {
902 return -1;
903 }
904
905 enabled = muted;
906 return 0;
907 }
908
909 // ----------------------------------------------------------------------------
910 // MicrophoneMuteIsAvailable
911 // ----------------------------------------------------------------------------
912
MicrophoneMuteIsAvailable(bool & available)913 int32_t AudioDeviceWindowsWave::MicrophoneMuteIsAvailable(bool& available)
914 {
915
916 bool isAvailable(false);
917
918 // Enumerate all avaliable microphones and make an attempt to open up the
919 // input mixer corresponding to the currently selected input device.
920 //
921 if (InitMicrophone() == -1)
922 {
923 // If we end up here it means that the selected microphone has no volume
924 // control, hence it is safe to state that there is no boost control
925 // already at this stage.
926 available = false;
927 return 0;
928 }
929
930 // Check if the selected microphone has a mute control
931 //
932 _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
933 available = isAvailable;
934
935 // Close the initialized input mixer
936 //
937 _mixerManager.CloseMicrophone();
938
939 return 0;
940 }
941
942 // ----------------------------------------------------------------------------
943 // SetMicrophoneMute
944 // ----------------------------------------------------------------------------
945
SetMicrophoneMute(bool enable)946 int32_t AudioDeviceWindowsWave::SetMicrophoneMute(bool enable)
947 {
948 return (_mixerManager.SetMicrophoneMute(enable));
949 }
950
951 // ----------------------------------------------------------------------------
952 // MicrophoneMute
953 // ----------------------------------------------------------------------------
954
MicrophoneMute(bool & enabled) const955 int32_t AudioDeviceWindowsWave::MicrophoneMute(bool& enabled) const
956 {
957
958 bool muted(0);
959
960 if (_mixerManager.MicrophoneMute(muted) == -1)
961 {
962 return -1;
963 }
964
965 enabled = muted;
966 return 0;
967 }
968
969 // ----------------------------------------------------------------------------
970 // MicrophoneBoostIsAvailable
971 // ----------------------------------------------------------------------------
972
MicrophoneBoostIsAvailable(bool & available)973 int32_t AudioDeviceWindowsWave::MicrophoneBoostIsAvailable(bool& available)
974 {
975
976 bool isAvailable(false);
977
978 // Enumerate all avaliable microphones and make an attempt to open up the
979 // input mixer corresponding to the currently selected input device.
980 //
981 if (InitMicrophone() == -1)
982 {
983 // If we end up here it means that the selected microphone has no volume
984 // control, hence it is safe to state that there is no boost control
985 // already at this stage.
986 available = false;
987 return 0;
988 }
989
990 // Check if the selected microphone has a boost control
991 //
992 _mixerManager.MicrophoneBoostIsAvailable(isAvailable);
993 available = isAvailable;
994
995 // Close the initialized input mixer
996 //
997 _mixerManager.CloseMicrophone();
998
999 return 0;
1000 }
1001
1002 // ----------------------------------------------------------------------------
1003 // SetMicrophoneBoost
1004 // ----------------------------------------------------------------------------
1005
SetMicrophoneBoost(bool enable)1006 int32_t AudioDeviceWindowsWave::SetMicrophoneBoost(bool enable)
1007 {
1008
1009 return (_mixerManager.SetMicrophoneBoost(enable));
1010 }
1011
1012 // ----------------------------------------------------------------------------
1013 // MicrophoneBoost
1014 // ----------------------------------------------------------------------------
1015
MicrophoneBoost(bool & enabled) const1016 int32_t AudioDeviceWindowsWave::MicrophoneBoost(bool& enabled) const
1017 {
1018
1019 bool onOff(0);
1020
1021 if (_mixerManager.MicrophoneBoost(onOff) == -1)
1022 {
1023 return -1;
1024 }
1025
1026 enabled = onOff;
1027 return 0;
1028 }
1029
1030 // ----------------------------------------------------------------------------
1031 // StereoRecordingIsAvailable
1032 // ----------------------------------------------------------------------------
1033
StereoRecordingIsAvailable(bool & available)1034 int32_t AudioDeviceWindowsWave::StereoRecordingIsAvailable(bool& available)
1035 {
1036 available = true;
1037 return 0;
1038 }
1039
1040 // ----------------------------------------------------------------------------
1041 // SetStereoRecording
1042 // ----------------------------------------------------------------------------
1043
SetStereoRecording(bool enable)1044 int32_t AudioDeviceWindowsWave::SetStereoRecording(bool enable)
1045 {
1046
1047 if (enable)
1048 _recChannels = 2;
1049 else
1050 _recChannels = 1;
1051
1052 return 0;
1053 }
1054
1055 // ----------------------------------------------------------------------------
1056 // StereoRecording
1057 // ----------------------------------------------------------------------------
1058
StereoRecording(bool & enabled) const1059 int32_t AudioDeviceWindowsWave::StereoRecording(bool& enabled) const
1060 {
1061
1062 if (_recChannels == 2)
1063 enabled = true;
1064 else
1065 enabled = false;
1066
1067 return 0;
1068 }
1069
1070 // ----------------------------------------------------------------------------
1071 // StereoPlayoutIsAvailable
1072 // ----------------------------------------------------------------------------
1073
StereoPlayoutIsAvailable(bool & available)1074 int32_t AudioDeviceWindowsWave::StereoPlayoutIsAvailable(bool& available)
1075 {
1076 available = true;
1077 return 0;
1078 }
1079
1080 // ----------------------------------------------------------------------------
1081 // SetStereoPlayout
1082 //
1083 // Specifies the number of output channels.
1084 //
1085 // NOTE - the setting will only have an effect after InitPlayout has
1086 // been called.
1087 //
1088 // 16-bit mono:
1089 //
1090 // Each sample is 2 bytes. Sample 1 is followed by samples 2, 3, 4, and so on.
1091 // For each sample, the first byte is the low-order byte of channel 0 and the
1092 // second byte is the high-order byte of channel 0.
1093 //
1094 // 16-bit stereo:
1095 //
1096 // Each sample is 4 bytes. Sample 1 is followed by samples 2, 3, 4, and so on.
1097 // For each sample, the first byte is the low-order byte of channel 0 (left channel);
1098 // the second byte is the high-order byte of channel 0; the third byte is the
1099 // low-order byte of channel 1 (right channel); and the fourth byte is the
1100 // high-order byte of channel 1.
1101 // ----------------------------------------------------------------------------
1102
SetStereoPlayout(bool enable)1103 int32_t AudioDeviceWindowsWave::SetStereoPlayout(bool enable)
1104 {
1105
1106 if (enable)
1107 _playChannels = 2;
1108 else
1109 _playChannels = 1;
1110
1111 return 0;
1112 }
1113
1114 // ----------------------------------------------------------------------------
1115 // StereoPlayout
1116 // ----------------------------------------------------------------------------
1117
StereoPlayout(bool & enabled) const1118 int32_t AudioDeviceWindowsWave::StereoPlayout(bool& enabled) const
1119 {
1120
1121 if (_playChannels == 2)
1122 enabled = true;
1123 else
1124 enabled = false;
1125
1126 return 0;
1127 }
1128
1129 // ----------------------------------------------------------------------------
1130 // SetAGC
1131 // ----------------------------------------------------------------------------
1132
SetAGC(bool enable)1133 int32_t AudioDeviceWindowsWave::SetAGC(bool enable)
1134 {
1135
1136 _AGC = enable;
1137
1138 return 0;
1139 }
1140
1141 // ----------------------------------------------------------------------------
1142 // AGC
1143 // ----------------------------------------------------------------------------
1144
AGC() const1145 bool AudioDeviceWindowsWave::AGC() const
1146 {
1147 return _AGC;
1148 }
1149
1150 // ----------------------------------------------------------------------------
1151 // MicrophoneVolumeIsAvailable
1152 // ----------------------------------------------------------------------------
1153
MicrophoneVolumeIsAvailable(bool & available)1154 int32_t AudioDeviceWindowsWave::MicrophoneVolumeIsAvailable(bool& available)
1155 {
1156
1157 bool isAvailable(false);
1158
1159 // Enumerate all avaliable microphones and make an attempt to open up the
1160 // input mixer corresponding to the currently selected output device.
1161 //
1162 if (InitMicrophone() == -1)
1163 {
1164 // Failed to find valid microphone
1165 available = false;
1166 return 0;
1167 }
1168
1169 // Check if the selected microphone has a volume control
1170 //
1171 _mixerManager.MicrophoneVolumeIsAvailable(isAvailable);
1172 available = isAvailable;
1173
1174 // Close the initialized input mixer
1175 //
1176 _mixerManager.CloseMicrophone();
1177
1178 return 0;
1179 }
1180
1181 // ----------------------------------------------------------------------------
1182 // SetMicrophoneVolume
1183 // ----------------------------------------------------------------------------
1184
SetMicrophoneVolume(uint32_t volume)1185 int32_t AudioDeviceWindowsWave::SetMicrophoneVolume(uint32_t volume)
1186 {
1187 return (_mixerManager.SetMicrophoneVolume(volume));
1188 }
1189
1190 // ----------------------------------------------------------------------------
1191 // MicrophoneVolume
1192 // ----------------------------------------------------------------------------
1193
MicrophoneVolume(uint32_t & volume) const1194 int32_t AudioDeviceWindowsWave::MicrophoneVolume(uint32_t& volume) const
1195 {
1196 uint32_t level(0);
1197
1198 if (_mixerManager.MicrophoneVolume(level) == -1)
1199 {
1200 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to retrive current microphone level");
1201 return -1;
1202 }
1203
1204 volume = level;
1205 return 0;
1206 }
1207
1208 // ----------------------------------------------------------------------------
1209 // MaxMicrophoneVolume
1210 // ----------------------------------------------------------------------------
1211
MaxMicrophoneVolume(uint32_t & maxVolume) const1212 int32_t AudioDeviceWindowsWave::MaxMicrophoneVolume(uint32_t& maxVolume) const
1213 {
1214 // _maxMicVolume can be zero in AudioMixerManager::MaxMicrophoneVolume():
1215 // (1) API GetLineControl() returns failure at querying the max Mic level.
1216 // (2) API GetLineControl() returns maxVolume as zero in rare cases.
1217 // Both cases show we don't have access to the mixer controls.
1218 // We return -1 here to indicate that.
1219 if (_maxMicVolume == 0)
1220 {
1221 return -1;
1222 }
1223
1224 maxVolume = _maxMicVolume;;
1225 return 0;
1226 }
1227
1228 // ----------------------------------------------------------------------------
1229 // MinMicrophoneVolume
1230 // ----------------------------------------------------------------------------
1231
MinMicrophoneVolume(uint32_t & minVolume) const1232 int32_t AudioDeviceWindowsWave::MinMicrophoneVolume(uint32_t& minVolume) const
1233 {
1234 minVolume = _minMicVolume;
1235 return 0;
1236 }
1237
1238 // ----------------------------------------------------------------------------
1239 // MicrophoneVolumeStepSize
1240 // ----------------------------------------------------------------------------
1241
MicrophoneVolumeStepSize(uint16_t & stepSize) const1242 int32_t AudioDeviceWindowsWave::MicrophoneVolumeStepSize(uint16_t& stepSize) const
1243 {
1244
1245 uint16_t delta(0);
1246
1247 if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1)
1248 {
1249 return -1;
1250 }
1251
1252 stepSize = delta;
1253 return 0;
1254 }
1255
1256 // ----------------------------------------------------------------------------
1257 // PlayoutDevices
1258 // ----------------------------------------------------------------------------
1259
PlayoutDevices()1260 int16_t AudioDeviceWindowsWave::PlayoutDevices()
1261 {
1262
1263 return (waveOutGetNumDevs());
1264 }
1265
1266 // ----------------------------------------------------------------------------
1267 // SetPlayoutDevice I (II)
1268 // ----------------------------------------------------------------------------
1269
SetPlayoutDevice(uint16_t index)1270 int32_t AudioDeviceWindowsWave::SetPlayoutDevice(uint16_t index)
1271 {
1272
1273 if (_playIsInitialized)
1274 {
1275 return -1;
1276 }
1277
1278 UINT nDevices = waveOutGetNumDevs();
1279 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "number of availiable waveform-audio output devices is %u", nDevices);
1280
1281 if (index < 0 || index > (nDevices-1))
1282 {
1283 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device index is out of range [0,%u]", (nDevices-1));
1284 return -1;
1285 }
1286
1287 _usingOutputDeviceIndex = true;
1288 _outputDeviceIndex = index;
1289 _outputDeviceIsSpecified = true;
1290
1291 return 0;
1292 }
1293
1294 // ----------------------------------------------------------------------------
1295 // SetPlayoutDevice II (II)
1296 // ----------------------------------------------------------------------------
1297
SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device)1298 int32_t AudioDeviceWindowsWave::SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device)
1299 {
1300 if (_playIsInitialized)
1301 {
1302 return -1;
1303 }
1304
1305 if (device == AudioDeviceModule::kDefaultDevice)
1306 {
1307 }
1308 else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
1309 {
1310 }
1311
1312 _usingOutputDeviceIndex = false;
1313 _outputDevice = device;
1314 _outputDeviceIsSpecified = true;
1315
1316 return 0;
1317 }
1318
1319 // ----------------------------------------------------------------------------
1320 // PlayoutDeviceName
1321 // ----------------------------------------------------------------------------
1322
PlayoutDeviceName(uint16_t index,char name[kAdmMaxDeviceNameSize],char guid[kAdmMaxGuidSize])1323 int32_t AudioDeviceWindowsWave::PlayoutDeviceName(
1324 uint16_t index,
1325 char name[kAdmMaxDeviceNameSize],
1326 char guid[kAdmMaxGuidSize])
1327 {
1328
1329 uint16_t nDevices(PlayoutDevices());
1330
1331 // Special fix for the case when the user asks for the name of the default device.
1332 //
1333 if (index == (uint16_t)(-1))
1334 {
1335 index = 0;
1336 }
1337
1338 if ((index > (nDevices-1)) || (name == NULL))
1339 {
1340 return -1;
1341 }
1342
1343 memset(name, 0, kAdmMaxDeviceNameSize);
1344
1345 if (guid != NULL)
1346 {
1347 memset(guid, 0, kAdmMaxGuidSize);
1348 }
1349
1350 WAVEOUTCAPSW caps; // szPname member (product name (NULL terminated) is a WCHAR
1351 MMRESULT res;
1352
1353 res = waveOutGetDevCapsW(index, &caps, sizeof(WAVEOUTCAPSW));
1354 if (res != MMSYSERR_NOERROR)
1355 {
1356 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCapsW() failed (err=%d)", res);
1357 return -1;
1358 }
1359 if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, name, kAdmMaxDeviceNameSize, NULL, NULL) == 0)
1360 {
1361 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 1", GetLastError());
1362 }
1363
1364 if (guid == NULL)
1365 {
1366 return 0;
1367 }
1368
1369 // It is possible to get the unique endpoint ID string using the Wave API.
1370 // However, it is only supported on Windows Vista and Windows 7.
1371
1372 size_t cbEndpointId(0);
1373
1374 // Get the size (including the terminating null) of the endpoint ID string of the waveOut device.
1375 // Windows Vista supports the DRV_QUERYFUNCTIONINSTANCEIDSIZE and DRV_QUERYFUNCTIONINSTANCEID messages.
1376 res = waveOutMessage((HWAVEOUT)IntToPtr(index),
1377 DRV_QUERYFUNCTIONINSTANCEIDSIZE,
1378 (DWORD_PTR)&cbEndpointId, NULL);
1379 if (res != MMSYSERR_NOERROR)
1380 {
1381 // DRV_QUERYFUNCTIONINSTANCEIDSIZE is not supported <=> earlier version of Windows than Vista
1382 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveOutMessage(DRV_QUERYFUNCTIONINSTANCEIDSIZE) failed (err=%d)", res);
1383 TraceWaveOutError(res);
1384 // Best we can do is to copy the friendly name and use it as guid
1385 if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
1386 {
1387 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 2", GetLastError());
1388 }
1389 return 0;
1390 }
1391
1392 // waveOutMessage(DRV_QUERYFUNCTIONINSTANCEIDSIZE) worked => we are on a Vista or Windows 7 device
1393
1394 WCHAR *pstrEndpointId = NULL;
1395 pstrEndpointId = (WCHAR*)CoTaskMemAlloc(cbEndpointId);
1396
1397 // Get the endpoint ID string for this waveOut device.
1398 res = waveOutMessage((HWAVEOUT)IntToPtr(index),
1399 DRV_QUERYFUNCTIONINSTANCEID,
1400 (DWORD_PTR)pstrEndpointId,
1401 cbEndpointId);
1402 if (res != MMSYSERR_NOERROR)
1403 {
1404 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveOutMessage(DRV_QUERYFUNCTIONINSTANCEID) failed (err=%d)", res);
1405 TraceWaveOutError(res);
1406 // Best we can do is to copy the friendly name and use it as guid
1407 if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
1408 {
1409 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 3", GetLastError());
1410 }
1411 CoTaskMemFree(pstrEndpointId);
1412 return 0;
1413 }
1414
1415 if (WideCharToMultiByte(CP_UTF8, 0, pstrEndpointId, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
1416 {
1417 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 4", GetLastError());
1418 }
1419 CoTaskMemFree(pstrEndpointId);
1420
1421 return 0;
1422 }
1423
1424 // ----------------------------------------------------------------------------
1425 // RecordingDeviceName
1426 // ----------------------------------------------------------------------------
1427
RecordingDeviceName(uint16_t index,char name[kAdmMaxDeviceNameSize],char guid[kAdmMaxGuidSize])1428 int32_t AudioDeviceWindowsWave::RecordingDeviceName(
1429 uint16_t index,
1430 char name[kAdmMaxDeviceNameSize],
1431 char guid[kAdmMaxGuidSize])
1432 {
1433
1434 uint16_t nDevices(RecordingDevices());
1435
1436 // Special fix for the case when the user asks for the name of the default device.
1437 //
1438 if (index == (uint16_t)(-1))
1439 {
1440 index = 0;
1441 }
1442
1443 if ((index > (nDevices-1)) || (name == NULL))
1444 {
1445 return -1;
1446 }
1447
1448 memset(name, 0, kAdmMaxDeviceNameSize);
1449
1450 if (guid != NULL)
1451 {
1452 memset(guid, 0, kAdmMaxGuidSize);
1453 }
1454
1455 WAVEINCAPSW caps; // szPname member (product name (NULL terminated) is a WCHAR
1456 MMRESULT res;
1457
1458 res = waveInGetDevCapsW(index, &caps, sizeof(WAVEINCAPSW));
1459 if (res != MMSYSERR_NOERROR)
1460 {
1461 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetDevCapsW() failed (err=%d)", res);
1462 return -1;
1463 }
1464 if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, name, kAdmMaxDeviceNameSize, NULL, NULL) == 0)
1465 {
1466 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 1", GetLastError());
1467 }
1468
1469 if (guid == NULL)
1470 {
1471 return 0;
1472 }
1473
1474 // It is possible to get the unique endpoint ID string using the Wave API.
1475 // However, it is only supported on Windows Vista and Windows 7.
1476
1477 size_t cbEndpointId(0);
1478
1479 // Get the size (including the terminating null) of the endpoint ID string of the waveOut device.
1480 // Windows Vista supports the DRV_QUERYFUNCTIONINSTANCEIDSIZE and DRV_QUERYFUNCTIONINSTANCEID messages.
1481 res = waveInMessage((HWAVEIN)IntToPtr(index),
1482 DRV_QUERYFUNCTIONINSTANCEIDSIZE,
1483 (DWORD_PTR)&cbEndpointId, NULL);
1484 if (res != MMSYSERR_NOERROR)
1485 {
1486 // DRV_QUERYFUNCTIONINSTANCEIDSIZE is not supported <=> earlier version of Windows than Vista
1487 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveInMessage(DRV_QUERYFUNCTIONINSTANCEIDSIZE) failed (err=%d)", res);
1488 TraceWaveInError(res);
1489 // Best we can do is to copy the friendly name and use it as guid
1490 if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
1491 {
1492 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 2", GetLastError());
1493 }
1494 return 0;
1495 }
1496
1497 // waveOutMessage(DRV_QUERYFUNCTIONINSTANCEIDSIZE) worked => we are on a Vista or Windows 7 device
1498
1499 WCHAR *pstrEndpointId = NULL;
1500 pstrEndpointId = (WCHAR*)CoTaskMemAlloc(cbEndpointId);
1501
1502 // Get the endpoint ID string for this waveOut device.
1503 res = waveInMessage((HWAVEIN)IntToPtr(index),
1504 DRV_QUERYFUNCTIONINSTANCEID,
1505 (DWORD_PTR)pstrEndpointId,
1506 cbEndpointId);
1507 if (res != MMSYSERR_NOERROR)
1508 {
1509 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveInMessage(DRV_QUERYFUNCTIONINSTANCEID) failed (err=%d)", res);
1510 TraceWaveInError(res);
1511 // Best we can do is to copy the friendly name and use it as guid
1512 if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
1513 {
1514 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 3", GetLastError());
1515 }
1516 CoTaskMemFree(pstrEndpointId);
1517 return 0;
1518 }
1519
1520 if (WideCharToMultiByte(CP_UTF8, 0, pstrEndpointId, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
1521 {
1522 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 4", GetLastError());
1523 }
1524 CoTaskMemFree(pstrEndpointId);
1525
1526 return 0;
1527 }
1528
1529 // ----------------------------------------------------------------------------
1530 // RecordingDevices
1531 // ----------------------------------------------------------------------------
1532
RecordingDevices()1533 int16_t AudioDeviceWindowsWave::RecordingDevices()
1534 {
1535
1536 return (waveInGetNumDevs());
1537 }
1538
1539 // ----------------------------------------------------------------------------
1540 // SetRecordingDevice I (II)
1541 // ----------------------------------------------------------------------------
1542
SetRecordingDevice(uint16_t index)1543 int32_t AudioDeviceWindowsWave::SetRecordingDevice(uint16_t index)
1544 {
1545
1546 if (_recIsInitialized)
1547 {
1548 return -1;
1549 }
1550
1551 UINT nDevices = waveInGetNumDevs();
1552 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "number of availiable waveform-audio input devices is %u", nDevices);
1553
1554 if (index < 0 || index > (nDevices-1))
1555 {
1556 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device index is out of range [0,%u]", (nDevices-1));
1557 return -1;
1558 }
1559
1560 _usingInputDeviceIndex = true;
1561 _inputDeviceIndex = index;
1562 _inputDeviceIsSpecified = true;
1563
1564 return 0;
1565 }
1566
1567 // ----------------------------------------------------------------------------
1568 // SetRecordingDevice II (II)
1569 // ----------------------------------------------------------------------------
1570
SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device)1571 int32_t AudioDeviceWindowsWave::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device)
1572 {
1573 if (device == AudioDeviceModule::kDefaultDevice)
1574 {
1575 }
1576 else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
1577 {
1578 }
1579
1580 if (_recIsInitialized)
1581 {
1582 return -1;
1583 }
1584
1585 _usingInputDeviceIndex = false;
1586 _inputDevice = device;
1587 _inputDeviceIsSpecified = true;
1588
1589 return 0;
1590 }
1591
1592 // ----------------------------------------------------------------------------
1593 // PlayoutIsAvailable
1594 // ----------------------------------------------------------------------------
1595
PlayoutIsAvailable(bool & available)1596 int32_t AudioDeviceWindowsWave::PlayoutIsAvailable(bool& available)
1597 {
1598
1599 available = false;
1600
1601 // Try to initialize the playout side
1602 int32_t res = InitPlayout();
1603
1604 // Cancel effect of initialization
1605 StopPlayout();
1606
1607 if (res != -1)
1608 {
1609 available = true;
1610 }
1611
1612 return 0;
1613 }
1614
1615 // ----------------------------------------------------------------------------
1616 // RecordingIsAvailable
1617 // ----------------------------------------------------------------------------
1618
RecordingIsAvailable(bool & available)1619 int32_t AudioDeviceWindowsWave::RecordingIsAvailable(bool& available)
1620 {
1621
1622 available = false;
1623
1624 // Try to initialize the recording side
1625 int32_t res = InitRecording();
1626
1627 // Cancel effect of initialization
1628 StopRecording();
1629
1630 if (res != -1)
1631 {
1632 available = true;
1633 }
1634
1635 return 0;
1636 }
1637
1638 // ----------------------------------------------------------------------------
1639 // InitPlayout
1640 // ----------------------------------------------------------------------------
1641
InitPlayout()1642 int32_t AudioDeviceWindowsWave::InitPlayout()
1643 {
1644
1645 CriticalSectionScoped lock(&_critSect);
1646
1647 if (_playing)
1648 {
1649 return -1;
1650 }
1651
1652 if (!_outputDeviceIsSpecified)
1653 {
1654 return -1;
1655 }
1656
1657 if (_playIsInitialized)
1658 {
1659 return 0;
1660 }
1661
1662 // Initialize the speaker (devices might have been added or removed)
1663 if (InitSpeaker() == -1)
1664 {
1665 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "InitSpeaker() failed");
1666 }
1667
1668 // Enumerate all availiable output devices
1669 EnumeratePlayoutDevices();
1670
1671 // Start by closing any existing wave-output devices
1672 //
1673 MMRESULT res(MMSYSERR_ERROR);
1674
1675 if (_hWaveOut != NULL)
1676 {
1677 res = waveOutClose(_hWaveOut);
1678 if (MMSYSERR_NOERROR != res)
1679 {
1680 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutClose() failed (err=%d)", res);
1681 TraceWaveOutError(res);
1682 }
1683 }
1684
1685 // Set the output wave format
1686 //
1687 WAVEFORMATEX waveFormat;
1688
1689 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
1690 waveFormat.nChannels = _playChannels; // mono <=> 1, stereo <=> 2
1691 waveFormat.nSamplesPerSec = N_PLAY_SAMPLES_PER_SEC;
1692 waveFormat.wBitsPerSample = 16;
1693 waveFormat.nBlockAlign = waveFormat.nChannels * (waveFormat.wBitsPerSample/8);
1694 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
1695 waveFormat.cbSize = 0;
1696
1697 // Open the given waveform-audio output device for playout
1698 //
1699 HWAVEOUT hWaveOut(NULL);
1700
1701 if (IsUsingOutputDeviceIndex())
1702 {
1703 // verify settings first
1704 res = waveOutOpen(NULL, _outputDeviceIndex, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_FORMAT_QUERY);
1705 if (MMSYSERR_NOERROR == res)
1706 {
1707 // open the given waveform-audio output device for recording
1708 res = waveOutOpen(&hWaveOut, _outputDeviceIndex, &waveFormat, 0, 0, CALLBACK_NULL);
1709 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening output device corresponding to device ID %u", _outputDeviceIndex);
1710 }
1711 }
1712 else
1713 {
1714 if (_outputDevice == AudioDeviceModule::kDefaultCommunicationDevice)
1715 {
1716 // check if it is possible to open the default communication device (supported on Windows 7)
1717 res = waveOutOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE | WAVE_FORMAT_QUERY);
1718 if (MMSYSERR_NOERROR == res)
1719 {
1720 // if so, open the default communication device for real
1721 res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE);
1722 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default communication device");
1723 }
1724 else
1725 {
1726 // use default device since default communication device was not avaliable
1727 res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
1728 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "unable to open default communication device => using default instead");
1729 }
1730 }
1731 else if (_outputDevice == AudioDeviceModule::kDefaultDevice)
1732 {
1733 // open default device since it has been requested
1734 res = waveOutOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_FORMAT_QUERY);
1735 if (MMSYSERR_NOERROR == res)
1736 {
1737 res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
1738 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default output device");
1739 }
1740 }
1741 }
1742
1743 if (MMSYSERR_NOERROR != res)
1744 {
1745 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveOutOpen() failed (err=%d)", res);
1746 TraceWaveOutError(res);
1747 return -1;
1748 }
1749
1750 // Log information about the aquired output device
1751 //
1752 WAVEOUTCAPS caps;
1753
1754 res = waveOutGetDevCaps((UINT_PTR)hWaveOut, &caps, sizeof(WAVEOUTCAPS));
1755 if (res != MMSYSERR_NOERROR)
1756 {
1757 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCaps() failed (err=%d)", res);
1758 TraceWaveOutError(res);
1759 }
1760
1761 UINT deviceID(0);
1762 res = waveOutGetID(hWaveOut, &deviceID);
1763 if (res != MMSYSERR_NOERROR)
1764 {
1765 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetID() failed (err=%d)", res);
1766 TraceWaveOutError(res);
1767 }
1768 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "utilized device ID : %u", deviceID);
1769 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name : %s", caps.szPname);
1770
1771 // Store valid handle for the open waveform-audio output device
1772 _hWaveOut = hWaveOut;
1773
1774 // Store the input wave header as well
1775 _waveFormatOut = waveFormat;
1776
1777 // Prepare wave-out headers
1778 //
1779 const uint8_t bytesPerSample = 2*_playChannels;
1780
1781 for (int n = 0; n < N_BUFFERS_OUT; n++)
1782 {
1783 // set up the output wave header
1784 _waveHeaderOut[n].lpData = reinterpret_cast<LPSTR>(&_playBuffer[n]);
1785 _waveHeaderOut[n].dwBufferLength = bytesPerSample*PLAY_BUF_SIZE_IN_SAMPLES;
1786 _waveHeaderOut[n].dwFlags = 0;
1787 _waveHeaderOut[n].dwLoops = 0;
1788
1789 memset(_playBuffer[n], 0, bytesPerSample*PLAY_BUF_SIZE_IN_SAMPLES);
1790
1791 // The waveOutPrepareHeader function prepares a waveform-audio data block for playback.
1792 // The lpData, dwBufferLength, and dwFlags members of the WAVEHDR structure must be set
1793 // before calling this function.
1794 //
1795 res = waveOutPrepareHeader(_hWaveOut, &_waveHeaderOut[n], sizeof(WAVEHDR));
1796 if (MMSYSERR_NOERROR != res)
1797 {
1798 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutPrepareHeader(%d) failed (err=%d)", n, res);
1799 TraceWaveOutError(res);
1800 }
1801
1802 // perform extra check to ensure that the header is prepared
1803 if (_waveHeaderOut[n].dwFlags != WHDR_PREPARED)
1804 {
1805 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutPrepareHeader(%d) failed (dwFlags != WHDR_PREPARED)", n);
1806 }
1807 }
1808
1809 // Mark playout side as initialized
1810 _playIsInitialized = true;
1811
1812 _dTcheckPlayBufDelay = 10; // check playback buffer delay every 10 ms
1813 _playBufCount = 0; // index of active output wave header (<=> output buffer index)
1814 _playBufDelay = 80; // buffer delay/size is initialized to 80 ms and slowly decreased until er < 25
1815 _minPlayBufDelay = 25; // minimum playout buffer delay
1816 _MAX_minBuffer = 65; // adaptive minimum playout buffer delay cannot be larger than this value
1817 _intro = 1; // Used to make sure that adaption starts after (2000-1700)/100 seconds
1818 _waitCounter = 1700; // Counter for start of adaption of playback buffer
1819 _erZeroCounter = 0; // Log how many times er = 0 in consequtive calls to RecTimeProc
1820 _useHeader = 0; // Counts number of "useHeader" detections. Stops at 2.
1821
1822 _writtenSamples = 0;
1823 _writtenSamplesOld = 0;
1824 _playedSamplesOld = 0;
1825 _sndCardPlayDelay = 0;
1826 _sndCardRecDelay = 0;
1827
1828 WEBRTC_TRACE(kTraceInfo, kTraceUtility, _id,"initial playout status: _playBufDelay=%d, _minPlayBufDelay=%d",
1829 _playBufDelay, _minPlayBufDelay);
1830
1831 return 0;
1832 }
1833
1834 // ----------------------------------------------------------------------------
1835 // InitRecording
1836 // ----------------------------------------------------------------------------
1837
InitRecording()1838 int32_t AudioDeviceWindowsWave::InitRecording()
1839 {
1840
1841 CriticalSectionScoped lock(&_critSect);
1842
1843 if (_recording)
1844 {
1845 return -1;
1846 }
1847
1848 if (!_inputDeviceIsSpecified)
1849 {
1850 return -1;
1851 }
1852
1853 if (_recIsInitialized)
1854 {
1855 return 0;
1856 }
1857
1858 _avgCPULoad = 0;
1859 _playAcc = 0;
1860
1861 // Initialize the microphone (devices might have been added or removed)
1862 if (InitMicrophone() == -1)
1863 {
1864 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "InitMicrophone() failed");
1865 }
1866
1867 // Enumerate all availiable input devices
1868 EnumerateRecordingDevices();
1869
1870 // Start by closing any existing wave-input devices
1871 //
1872 MMRESULT res(MMSYSERR_ERROR);
1873
1874 if (_hWaveIn != NULL)
1875 {
1876 res = waveInClose(_hWaveIn);
1877 if (MMSYSERR_NOERROR != res)
1878 {
1879 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInClose() failed (err=%d)", res);
1880 TraceWaveInError(res);
1881 }
1882 }
1883
1884 // Set the input wave format
1885 //
1886 WAVEFORMATEX waveFormat;
1887
1888 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
1889 waveFormat.nChannels = _recChannels; // mono <=> 1, stereo <=> 2
1890 waveFormat.nSamplesPerSec = N_REC_SAMPLES_PER_SEC;
1891 waveFormat.wBitsPerSample = 16;
1892 waveFormat.nBlockAlign = waveFormat.nChannels * (waveFormat.wBitsPerSample/8);
1893 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
1894 waveFormat.cbSize = 0;
1895
1896 // Open the given waveform-audio input device for recording
1897 //
1898 HWAVEIN hWaveIn(NULL);
1899
1900 if (IsUsingInputDeviceIndex())
1901 {
1902 // verify settings first
1903 res = waveInOpen(NULL, _inputDeviceIndex, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_FORMAT_QUERY);
1904 if (MMSYSERR_NOERROR == res)
1905 {
1906 // open the given waveform-audio input device for recording
1907 res = waveInOpen(&hWaveIn, _inputDeviceIndex, &waveFormat, 0, 0, CALLBACK_NULL);
1908 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening input device corresponding to device ID %u", _inputDeviceIndex);
1909 }
1910 }
1911 else
1912 {
1913 if (_inputDevice == AudioDeviceModule::kDefaultCommunicationDevice)
1914 {
1915 // check if it is possible to open the default communication device (supported on Windows 7)
1916 res = waveInOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE | WAVE_FORMAT_QUERY);
1917 if (MMSYSERR_NOERROR == res)
1918 {
1919 // if so, open the default communication device for real
1920 res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE);
1921 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default communication device");
1922 }
1923 else
1924 {
1925 // use default device since default communication device was not avaliable
1926 res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
1927 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "unable to open default communication device => using default instead");
1928 }
1929 }
1930 else if (_inputDevice == AudioDeviceModule::kDefaultDevice)
1931 {
1932 // open default device since it has been requested
1933 res = waveInOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_FORMAT_QUERY);
1934 if (MMSYSERR_NOERROR == res)
1935 {
1936 res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
1937 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default input device");
1938 }
1939 }
1940 }
1941
1942 if (MMSYSERR_NOERROR != res)
1943 {
1944 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveInOpen() failed (err=%d)", res);
1945 TraceWaveInError(res);
1946 return -1;
1947 }
1948
1949 // Log information about the aquired input device
1950 //
1951 WAVEINCAPS caps;
1952
1953 res = waveInGetDevCaps((UINT_PTR)hWaveIn, &caps, sizeof(WAVEINCAPS));
1954 if (res != MMSYSERR_NOERROR)
1955 {
1956 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetDevCaps() failed (err=%d)", res);
1957 TraceWaveInError(res);
1958 }
1959
1960 UINT deviceID(0);
1961 res = waveInGetID(hWaveIn, &deviceID);
1962 if (res != MMSYSERR_NOERROR)
1963 {
1964 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetID() failed (err=%d)", res);
1965 TraceWaveInError(res);
1966 }
1967 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "utilized device ID : %u", deviceID);
1968 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name : %s", caps.szPname);
1969
1970 // Store valid handle for the open waveform-audio input device
1971 _hWaveIn = hWaveIn;
1972
1973 // Store the input wave header as well
1974 _waveFormatIn = waveFormat;
1975
1976 // Mark recording side as initialized
1977 _recIsInitialized = true;
1978
1979 _recBufCount = 0; // index of active input wave header (<=> input buffer index)
1980 _recDelayCount = 0; // ensures that input buffers are returned with certain delay
1981
1982 return 0;
1983 }
1984
1985 // ----------------------------------------------------------------------------
1986 // StartRecording
1987 // ----------------------------------------------------------------------------
1988
StartRecording()1989 int32_t AudioDeviceWindowsWave::StartRecording()
1990 {
1991
1992 if (!_recIsInitialized)
1993 {
1994 return -1;
1995 }
1996
1997 if (_recording)
1998 {
1999 return 0;
2000 }
2001
2002 // set state to ensure that the recording starts from the audio thread
2003 _startRec = true;
2004
2005 // the audio thread will signal when recording has stopped
2006 if (kEventTimeout == _recStartEvent.Wait(10000))
2007 {
2008 _startRec = false;
2009 StopRecording();
2010 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to activate recording");
2011 return -1;
2012 }
2013
2014 if (_recording)
2015 {
2016 // the recording state is set by the audio thread after recording has started
2017 }
2018 else
2019 {
2020 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to activate recording");
2021 return -1;
2022 }
2023
2024 return 0;
2025 }
2026
2027 // ----------------------------------------------------------------------------
2028 // StopRecording
2029 // ----------------------------------------------------------------------------
2030
StopRecording()2031 int32_t AudioDeviceWindowsWave::StopRecording()
2032 {
2033
2034 CriticalSectionScoped lock(&_critSect);
2035
2036 if (!_recIsInitialized)
2037 {
2038 return 0;
2039 }
2040
2041 if (_hWaveIn == NULL)
2042 {
2043 return -1;
2044 }
2045
2046 bool wasRecording = _recording;
2047 _recIsInitialized = false;
2048 _recording = false;
2049
2050 MMRESULT res;
2051
2052 // Stop waveform-adio input. If there are any buffers in the queue, the
2053 // current buffer will be marked as done (the dwBytesRecorded member in
2054 // the header will contain the length of data), but any empty buffers in
2055 // the queue will remain there.
2056 //
2057 res = waveInStop(_hWaveIn);
2058 if (MMSYSERR_NOERROR != res)
2059 {
2060 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInStop() failed (err=%d)", res);
2061 TraceWaveInError(res);
2062 }
2063
2064 // Stop input on the given waveform-audio input device and resets the current
2065 // position to zero. All pending buffers are marked as done and returned to
2066 // the application.
2067 //
2068 res = waveInReset(_hWaveIn);
2069 if (MMSYSERR_NOERROR != res)
2070 {
2071 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInReset() failed (err=%d)", res);
2072 TraceWaveInError(res);
2073 }
2074
2075 // Clean up the preparation performed by the waveInPrepareHeader function.
2076 // Only unprepare header if recording was ever started (and headers are prepared).
2077 //
2078 if (wasRecording)
2079 {
2080 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveInUnprepareHeader() will be performed");
2081 for (int n = 0; n < N_BUFFERS_IN; n++)
2082 {
2083 res = waveInUnprepareHeader(_hWaveIn, &_waveHeaderIn[n], sizeof(WAVEHDR));
2084 if (MMSYSERR_NOERROR != res)
2085 {
2086 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInUnprepareHeader() failed (err=%d)", res);
2087 TraceWaveInError(res);
2088 }
2089 }
2090 }
2091
2092 // Close the given waveform-audio input device.
2093 //
2094 res = waveInClose(_hWaveIn);
2095 if (MMSYSERR_NOERROR != res)
2096 {
2097 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInClose() failed (err=%d)", res);
2098 TraceWaveInError(res);
2099 }
2100
2101 // Set the wave input handle to NULL
2102 //
2103 _hWaveIn = NULL;
2104 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_hWaveIn is now set to NULL");
2105
2106 return 0;
2107 }
2108
2109 // ----------------------------------------------------------------------------
2110 // RecordingIsInitialized
2111 // ----------------------------------------------------------------------------
2112
RecordingIsInitialized() const2113 bool AudioDeviceWindowsWave::RecordingIsInitialized() const
2114 {
2115 return (_recIsInitialized);
2116 }
2117
2118 // ----------------------------------------------------------------------------
2119 // Recording
2120 // ----------------------------------------------------------------------------
2121
Recording() const2122 bool AudioDeviceWindowsWave::Recording() const
2123 {
2124 return (_recording);
2125 }
2126
2127 // ----------------------------------------------------------------------------
2128 // PlayoutIsInitialized
2129 // ----------------------------------------------------------------------------
2130
PlayoutIsInitialized() const2131 bool AudioDeviceWindowsWave::PlayoutIsInitialized() const
2132 {
2133 return (_playIsInitialized);
2134 }
2135
2136 // ----------------------------------------------------------------------------
2137 // StartPlayout
2138 // ----------------------------------------------------------------------------
2139
StartPlayout()2140 int32_t AudioDeviceWindowsWave::StartPlayout()
2141 {
2142
2143 if (!_playIsInitialized)
2144 {
2145 return -1;
2146 }
2147
2148 if (_playing)
2149 {
2150 return 0;
2151 }
2152
2153 // set state to ensure that playout starts from the audio thread
2154 _startPlay = true;
2155
2156 // the audio thread will signal when recording has started
2157 if (kEventTimeout == _playStartEvent.Wait(10000))
2158 {
2159 _startPlay = false;
2160 StopPlayout();
2161 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to activate playout");
2162 return -1;
2163 }
2164
2165 if (_playing)
2166 {
2167 // the playing state is set by the audio thread after playout has started
2168 }
2169 else
2170 {
2171 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to activate playing");
2172 return -1;
2173 }
2174
2175 return 0;
2176 }
2177
2178 // ----------------------------------------------------------------------------
2179 // StopPlayout
2180 // ----------------------------------------------------------------------------
2181
StopPlayout()2182 int32_t AudioDeviceWindowsWave::StopPlayout()
2183 {
2184
2185 CriticalSectionScoped lock(&_critSect);
2186
2187 if (!_playIsInitialized)
2188 {
2189 return 0;
2190 }
2191
2192 if (_hWaveOut == NULL)
2193 {
2194 return -1;
2195 }
2196
2197 _playIsInitialized = false;
2198 _playing = false;
2199 _sndCardPlayDelay = 0;
2200 _sndCardRecDelay = 0;
2201
2202 MMRESULT res;
2203
2204 // The waveOutReset function stops playback on the given waveform-audio
2205 // output device and resets the current position to zero. All pending
2206 // playback buffers are marked as done (WHDR_DONE) and returned to the application.
2207 // After this function returns, the application can send new playback buffers
2208 // to the device by calling waveOutWrite, or close the device by calling waveOutClose.
2209 //
2210 res = waveOutReset(_hWaveOut);
2211 if (MMSYSERR_NOERROR != res)
2212 {
2213 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutReset() failed (err=%d)", res);
2214 TraceWaveOutError(res);
2215 }
2216
2217 // The waveOutUnprepareHeader function cleans up the preparation performed
2218 // by the waveOutPrepareHeader function. This function must be called after
2219 // the device driver is finished with a data block.
2220 // You must call this function before freeing the buffer.
2221 //
2222 for (int n = 0; n < N_BUFFERS_OUT; n++)
2223 {
2224 res = waveOutUnprepareHeader(_hWaveOut, &_waveHeaderOut[n], sizeof(WAVEHDR));
2225 if (MMSYSERR_NOERROR != res)
2226 {
2227 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutUnprepareHeader() failed (err=%d)", res);
2228 TraceWaveOutError(res);
2229 }
2230 }
2231
2232 // The waveOutClose function closes the given waveform-audio output device.
2233 // The close operation fails if the device is still playing a waveform-audio
2234 // buffer that was previously sent by calling waveOutWrite. Before calling
2235 // waveOutClose, the application must wait for all buffers to finish playing
2236 // or call the waveOutReset function to terminate playback.
2237 //
2238 res = waveOutClose(_hWaveOut);
2239 if (MMSYSERR_NOERROR != res)
2240 {
2241 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutClose() failed (err=%d)", res);
2242 TraceWaveOutError(res);
2243 }
2244
2245 _hWaveOut = NULL;
2246 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_hWaveOut is now set to NULL");
2247
2248 return 0;
2249 }
2250
2251 // ----------------------------------------------------------------------------
2252 // PlayoutDelay
2253 // ----------------------------------------------------------------------------
2254
PlayoutDelay(uint16_t & delayMS) const2255 int32_t AudioDeviceWindowsWave::PlayoutDelay(uint16_t& delayMS) const
2256 {
2257 CriticalSectionScoped lock(&_critSect);
2258 delayMS = (uint16_t)_sndCardPlayDelay;
2259 return 0;
2260 }
2261
2262 // ----------------------------------------------------------------------------
2263 // RecordingDelay
2264 // ----------------------------------------------------------------------------
2265
RecordingDelay(uint16_t & delayMS) const2266 int32_t AudioDeviceWindowsWave::RecordingDelay(uint16_t& delayMS) const
2267 {
2268 CriticalSectionScoped lock(&_critSect);
2269 delayMS = (uint16_t)_sndCardRecDelay;
2270 return 0;
2271 }
2272
2273 // ----------------------------------------------------------------------------
2274 // Playing
2275 // ----------------------------------------------------------------------------
2276
Playing() const2277 bool AudioDeviceWindowsWave::Playing() const
2278 {
2279 return (_playing);
2280 }
2281 // ----------------------------------------------------------------------------
2282 // SetPlayoutBuffer
2283 // ----------------------------------------------------------------------------
2284
SetPlayoutBuffer(const AudioDeviceModule::BufferType type,uint16_t sizeMS)2285 int32_t AudioDeviceWindowsWave::SetPlayoutBuffer(const AudioDeviceModule::BufferType type, uint16_t sizeMS)
2286 {
2287 CriticalSectionScoped lock(&_critSect);
2288 _playBufType = type;
2289 if (type == AudioDeviceModule::kFixedBufferSize)
2290 {
2291 _playBufDelayFixed = sizeMS;
2292 }
2293 return 0;
2294 }
2295
2296 // ----------------------------------------------------------------------------
2297 // PlayoutBuffer
2298 // ----------------------------------------------------------------------------
2299
PlayoutBuffer(AudioDeviceModule::BufferType & type,uint16_t & sizeMS) const2300 int32_t AudioDeviceWindowsWave::PlayoutBuffer(AudioDeviceModule::BufferType& type, uint16_t& sizeMS) const
2301 {
2302 CriticalSectionScoped lock(&_critSect);
2303 type = _playBufType;
2304 if (type == AudioDeviceModule::kFixedBufferSize)
2305 {
2306 sizeMS = _playBufDelayFixed;
2307 }
2308 else
2309 {
2310 sizeMS = _playBufDelay;
2311 }
2312
2313 return 0;
2314 }
2315
2316 // ----------------------------------------------------------------------------
2317 // CPULoad
2318 // ----------------------------------------------------------------------------
2319
CPULoad(uint16_t & load) const2320 int32_t AudioDeviceWindowsWave::CPULoad(uint16_t& load) const
2321 {
2322
2323 load = static_cast<uint16_t>(100*_avgCPULoad);
2324
2325 return 0;
2326 }
2327
2328 // ----------------------------------------------------------------------------
2329 // PlayoutWarning
2330 // ----------------------------------------------------------------------------
2331
PlayoutWarning() const2332 bool AudioDeviceWindowsWave::PlayoutWarning() const
2333 {
2334 return ( _playWarning > 0);
2335 }
2336
2337 // ----------------------------------------------------------------------------
2338 // PlayoutError
2339 // ----------------------------------------------------------------------------
2340
PlayoutError() const2341 bool AudioDeviceWindowsWave::PlayoutError() const
2342 {
2343 return ( _playError > 0);
2344 }
2345
2346 // ----------------------------------------------------------------------------
2347 // RecordingWarning
2348 // ----------------------------------------------------------------------------
2349
RecordingWarning() const2350 bool AudioDeviceWindowsWave::RecordingWarning() const
2351 {
2352 return ( _recWarning > 0);
2353 }
2354
2355 // ----------------------------------------------------------------------------
2356 // RecordingError
2357 // ----------------------------------------------------------------------------
2358
RecordingError() const2359 bool AudioDeviceWindowsWave::RecordingError() const
2360 {
2361 return ( _recError > 0);
2362 }
2363
2364 // ----------------------------------------------------------------------------
2365 // ClearPlayoutWarning
2366 // ----------------------------------------------------------------------------
2367
ClearPlayoutWarning()2368 void AudioDeviceWindowsWave::ClearPlayoutWarning()
2369 {
2370 _playWarning = 0;
2371 }
2372
2373 // ----------------------------------------------------------------------------
2374 // ClearPlayoutError
2375 // ----------------------------------------------------------------------------
2376
ClearPlayoutError()2377 void AudioDeviceWindowsWave::ClearPlayoutError()
2378 {
2379 _playError = 0;
2380 }
2381
2382 // ----------------------------------------------------------------------------
2383 // ClearRecordingWarning
2384 // ----------------------------------------------------------------------------
2385
ClearRecordingWarning()2386 void AudioDeviceWindowsWave::ClearRecordingWarning()
2387 {
2388 _recWarning = 0;
2389 }
2390
2391 // ----------------------------------------------------------------------------
2392 // ClearRecordingError
2393 // ----------------------------------------------------------------------------
2394
ClearRecordingError()2395 void AudioDeviceWindowsWave::ClearRecordingError()
2396 {
2397 _recError = 0;
2398 }
2399
2400 // ============================================================================
2401 // Private Methods
2402 // ============================================================================
2403
2404 // ----------------------------------------------------------------------------
2405 // InputSanityCheckAfterUnlockedPeriod
2406 // ----------------------------------------------------------------------------
2407
InputSanityCheckAfterUnlockedPeriod() const2408 int32_t AudioDeviceWindowsWave::InputSanityCheckAfterUnlockedPeriod() const
2409 {
2410 if (_hWaveIn == NULL)
2411 {
2412 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "input state has been modified during unlocked period");
2413 return -1;
2414 }
2415 return 0;
2416 }
2417
2418 // ----------------------------------------------------------------------------
2419 // OutputSanityCheckAfterUnlockedPeriod
2420 // ----------------------------------------------------------------------------
2421
OutputSanityCheckAfterUnlockedPeriod() const2422 int32_t AudioDeviceWindowsWave::OutputSanityCheckAfterUnlockedPeriod() const
2423 {
2424 if (_hWaveOut == NULL)
2425 {
2426 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "output state has been modified during unlocked period");
2427 return -1;
2428 }
2429 return 0;
2430 }
2431
2432 // ----------------------------------------------------------------------------
2433 // EnumeratePlayoutDevices
2434 // ----------------------------------------------------------------------------
2435
EnumeratePlayoutDevices()2436 int32_t AudioDeviceWindowsWave::EnumeratePlayoutDevices()
2437 {
2438
2439 uint16_t nDevices(PlayoutDevices());
2440 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
2441 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#output devices: %u", nDevices);
2442
2443 WAVEOUTCAPS caps;
2444 MMRESULT res;
2445
2446 for (UINT deviceID = 0; deviceID < nDevices; deviceID++)
2447 {
2448 res = waveOutGetDevCaps(deviceID, &caps, sizeof(WAVEOUTCAPS));
2449 if (res != MMSYSERR_NOERROR)
2450 {
2451 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCaps() failed (err=%d)", res);
2452 }
2453
2454 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
2455 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Device ID %u:", deviceID);
2456 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "manufacturer ID : %u", caps.wMid);
2457 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product ID : %u",caps.wPid);
2458 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "version of driver : %u.%u", HIBYTE(caps.vDriverVersion), LOBYTE(caps.vDriverVersion));
2459 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name : %s", caps.szPname);
2460 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwFormats : 0x%x", caps.dwFormats);
2461 if (caps.dwFormats & WAVE_FORMAT_48S16)
2462 {
2463 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " 48kHz,stereo,16bit : SUPPORTED");
2464 }
2465 else
2466 {
2467 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, " 48kHz,stereo,16bit : *NOT* SUPPORTED");
2468 }
2469 if (caps.dwFormats & WAVE_FORMAT_48M16)
2470 {
2471 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " 48kHz,mono,16bit : SUPPORTED");
2472 }
2473 else
2474 {
2475 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, " 48kHz,mono,16bit : *NOT* SUPPORTED");
2476 }
2477 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wChannels : %u", caps.wChannels);
2478 TraceSupportFlags(caps.dwSupport);
2479 }
2480
2481 return 0;
2482 }
2483
2484 // ----------------------------------------------------------------------------
2485 // EnumerateRecordingDevices
2486 // ----------------------------------------------------------------------------
2487
EnumerateRecordingDevices()2488 int32_t AudioDeviceWindowsWave::EnumerateRecordingDevices()
2489 {
2490
2491 uint16_t nDevices(RecordingDevices());
2492 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
2493 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#input devices: %u", nDevices);
2494
2495 WAVEINCAPS caps;
2496 MMRESULT res;
2497
2498 for (UINT deviceID = 0; deviceID < nDevices; deviceID++)
2499 {
2500 res = waveInGetDevCaps(deviceID, &caps, sizeof(WAVEINCAPS));
2501 if (res != MMSYSERR_NOERROR)
2502 {
2503 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetDevCaps() failed (err=%d)", res);
2504 }
2505
2506 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
2507 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Device ID %u:", deviceID);
2508 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "manufacturer ID : %u", caps.wMid);
2509 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product ID : %u",caps.wPid);
2510 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "version of driver : %u.%u", HIBYTE(caps.vDriverVersion), LOBYTE(caps.vDriverVersion));
2511 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name : %s", caps.szPname);
2512 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwFormats : 0x%x", caps.dwFormats);
2513 if (caps.dwFormats & WAVE_FORMAT_48S16)
2514 {
2515 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " 48kHz,stereo,16bit : SUPPORTED");
2516 }
2517 else
2518 {
2519 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, " 48kHz,stereo,16bit : *NOT* SUPPORTED");
2520 }
2521 if (caps.dwFormats & WAVE_FORMAT_48M16)
2522 {
2523 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " 48kHz,mono,16bit : SUPPORTED");
2524 }
2525 else
2526 {
2527 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, " 48kHz,mono,16bit : *NOT* SUPPORTED");
2528 }
2529 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wChannels : %u", caps.wChannels);
2530 }
2531
2532 return 0;
2533 }
2534
2535 // ----------------------------------------------------------------------------
2536 // TraceSupportFlags
2537 // ----------------------------------------------------------------------------
2538
TraceSupportFlags(DWORD dwSupport) const2539 void AudioDeviceWindowsWave::TraceSupportFlags(DWORD dwSupport) const
2540 {
2541 TCHAR buf[256];
2542
2543 StringCchPrintf(buf, 128, TEXT("support flags : 0x%x "), dwSupport);
2544
2545 if (dwSupport & WAVECAPS_PITCH)
2546 {
2547 // supports pitch control
2548 StringCchCat(buf, 256, TEXT("(PITCH)"));
2549 }
2550 if (dwSupport & WAVECAPS_PLAYBACKRATE)
2551 {
2552 // supports playback rate control
2553 StringCchCat(buf, 256, TEXT("(PLAYBACKRATE)"));
2554 }
2555 if (dwSupport & WAVECAPS_VOLUME)
2556 {
2557 // supports volume control
2558 StringCchCat(buf, 256, TEXT("(VOLUME)"));
2559 }
2560 if (dwSupport & WAVECAPS_LRVOLUME)
2561 {
2562 // supports separate left and right volume control
2563 StringCchCat(buf, 256, TEXT("(LRVOLUME)"));
2564 }
2565 if (dwSupport & WAVECAPS_SYNC)
2566 {
2567 // the driver is synchronous and will block while playing a buffer
2568 StringCchCat(buf, 256, TEXT("(SYNC)"));
2569 }
2570 if (dwSupport & WAVECAPS_SAMPLEACCURATE)
2571 {
2572 // returns sample-accurate position information
2573 StringCchCat(buf, 256, TEXT("(SAMPLEACCURATE)"));
2574 }
2575
2576 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%S", buf);
2577 }
2578
2579 // ----------------------------------------------------------------------------
2580 // TraceWaveInError
2581 // ----------------------------------------------------------------------------
2582
TraceWaveInError(MMRESULT error) const2583 void AudioDeviceWindowsWave::TraceWaveInError(MMRESULT error) const
2584 {
2585 TCHAR buf[MAXERRORLENGTH];
2586 TCHAR msg[MAXERRORLENGTH];
2587
2588 StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
2589 waveInGetErrorText(error, msg, MAXERRORLENGTH);
2590 StringCchCat(buf, MAXERRORLENGTH, msg);
2591 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%S", buf);
2592 }
2593
2594 // ----------------------------------------------------------------------------
2595 // TraceWaveOutError
2596 // ----------------------------------------------------------------------------
2597
TraceWaveOutError(MMRESULT error) const2598 void AudioDeviceWindowsWave::TraceWaveOutError(MMRESULT error) const
2599 {
2600 TCHAR buf[MAXERRORLENGTH];
2601 TCHAR msg[MAXERRORLENGTH];
2602
2603 StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
2604 waveOutGetErrorText(error, msg, MAXERRORLENGTH);
2605 StringCchCat(buf, MAXERRORLENGTH, msg);
2606 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%S", buf);
2607 }
2608
2609 // ----------------------------------------------------------------------------
2610 // PrepareStartPlayout
2611 // ----------------------------------------------------------------------------
2612
PrepareStartPlayout()2613 int32_t AudioDeviceWindowsWave::PrepareStartPlayout()
2614 {
2615
2616 CriticalSectionScoped lock(&_critSect);
2617
2618 if (_hWaveOut == NULL)
2619 {
2620 return -1;
2621 }
2622
2623 // A total of 30ms of data is immediately placed in the SC buffer
2624 //
2625 int8_t zeroVec[4*PLAY_BUF_SIZE_IN_SAMPLES]; // max allocation
2626 memset(zeroVec, 0, 4*PLAY_BUF_SIZE_IN_SAMPLES);
2627
2628 {
2629 Write(zeroVec, PLAY_BUF_SIZE_IN_SAMPLES);
2630 Write(zeroVec, PLAY_BUF_SIZE_IN_SAMPLES);
2631 Write(zeroVec, PLAY_BUF_SIZE_IN_SAMPLES);
2632 }
2633
2634 _playAcc = 0;
2635 _playWarning = 0;
2636 _playError = 0;
2637 _dc_diff_mean = 0;
2638 _dc_y_prev = 0;
2639 _dc_penalty_counter = 20;
2640 _dc_prevtime = 0;
2641 _dc_prevplay = 0;
2642
2643 return 0;
2644 }
2645
2646 // ----------------------------------------------------------------------------
2647 // PrepareStartRecording
2648 // ----------------------------------------------------------------------------
2649
PrepareStartRecording()2650 int32_t AudioDeviceWindowsWave::PrepareStartRecording()
2651 {
2652
2653 CriticalSectionScoped lock(&_critSect);
2654
2655 if (_hWaveIn == NULL)
2656 {
2657 return -1;
2658 }
2659
2660 _playAcc = 0;
2661 _recordedBytes = 0;
2662 _recPutBackDelay = REC_PUT_BACK_DELAY;
2663
2664 MMRESULT res;
2665 MMTIME mmtime;
2666 mmtime.wType = TIME_SAMPLES;
2667
2668 res = waveInGetPosition(_hWaveIn, &mmtime, sizeof(mmtime));
2669 if (MMSYSERR_NOERROR != res)
2670 {
2671 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetPosition(TIME_SAMPLES) failed (err=%d)", res);
2672 TraceWaveInError(res);
2673 }
2674
2675 _read_samples = mmtime.u.sample;
2676 _read_samples_old = _read_samples;
2677 _rec_samples_old = mmtime.u.sample;
2678 _wrapCounter = 0;
2679
2680 for (int n = 0; n < N_BUFFERS_IN; n++)
2681 {
2682 const uint8_t nBytesPerSample = 2*_recChannels;
2683
2684 // set up the input wave header
2685 _waveHeaderIn[n].lpData = reinterpret_cast<LPSTR>(&_recBuffer[n]);
2686 _waveHeaderIn[n].dwBufferLength = nBytesPerSample * REC_BUF_SIZE_IN_SAMPLES;
2687 _waveHeaderIn[n].dwFlags = 0;
2688 _waveHeaderIn[n].dwBytesRecorded = 0;
2689 _waveHeaderIn[n].dwUser = 0;
2690
2691 memset(_recBuffer[n], 0, nBytesPerSample * REC_BUF_SIZE_IN_SAMPLES);
2692
2693 // prepare a buffer for waveform-audio input
2694 res = waveInPrepareHeader(_hWaveIn, &_waveHeaderIn[n], sizeof(WAVEHDR));
2695 if (MMSYSERR_NOERROR != res)
2696 {
2697 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInPrepareHeader(%d) failed (err=%d)", n, res);
2698 TraceWaveInError(res);
2699 }
2700
2701 // send an input buffer to the given waveform-audio input device
2702 res = waveInAddBuffer(_hWaveIn, &_waveHeaderIn[n], sizeof(WAVEHDR));
2703 if (MMSYSERR_NOERROR != res)
2704 {
2705 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInAddBuffer(%d) failed (err=%d)", n, res);
2706 TraceWaveInError(res);
2707 }
2708 }
2709
2710 // start input on the given waveform-audio input device
2711 res = waveInStart(_hWaveIn);
2712 if (MMSYSERR_NOERROR != res)
2713 {
2714 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInStart() failed (err=%d)", res);
2715 TraceWaveInError(res);
2716 }
2717
2718 return 0;
2719 }
2720
2721 // ----------------------------------------------------------------------------
2722 // GetPlayoutBufferDelay
2723 // ----------------------------------------------------------------------------
2724
GetPlayoutBufferDelay(uint32_t & writtenSamples,uint32_t & playedSamples)2725 int32_t AudioDeviceWindowsWave::GetPlayoutBufferDelay(uint32_t& writtenSamples, uint32_t& playedSamples)
2726 {
2727 int i;
2728 int ms_Header;
2729 long playedDifference;
2730 int msecInPlayoutBuffer(0); // #milliseconds of audio in the playout buffer
2731
2732 const uint16_t nSamplesPerMs = (uint16_t)(N_PLAY_SAMPLES_PER_SEC/1000); // default is 48000/1000 = 48
2733
2734 MMRESULT res;
2735 MMTIME mmtime;
2736
2737 if (!_playing)
2738 {
2739 playedSamples = 0;
2740 return (0);
2741 }
2742
2743 // Retrieve the current playback position.
2744 //
2745 mmtime.wType = TIME_SAMPLES; // number of waveform-audio samples
2746 res = waveOutGetPosition(_hWaveOut, &mmtime, sizeof(mmtime));
2747 if (MMSYSERR_NOERROR != res)
2748 {
2749 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetPosition() failed (err=%d)", res);
2750 TraceWaveOutError(res);
2751 }
2752
2753 writtenSamples = _writtenSamples; // #samples written to the playout buffer
2754 playedSamples = mmtime.u.sample; // current playout position in the playout buffer
2755
2756 // derive remaining amount (in ms) of data in the playout buffer
2757 msecInPlayoutBuffer = ((writtenSamples - playedSamples)/nSamplesPerMs);
2758
2759 playedDifference = (long) (_playedSamplesOld - playedSamples);
2760
2761 if (playedDifference > 64000)
2762 {
2763 // If the sound cards number-of-played-out-samples variable wraps around before
2764 // written_sampels wraps around this needs to be adjusted. This can happen on
2765 // sound cards that uses less than 32 bits to keep track of number of played out
2766 // sampels. To avoid being fooled by sound cards that sometimes produces false
2767 // output we compare old value minus the new value with a large value. This is
2768 // neccessary because some SC:s produce an output like 153, 198, 175, 230 which
2769 // would trigger the wrap-around function if we didn't compare with a large value.
2770 // The value 64000 is chosen because 2^16=65536 so we allow wrap around at 16 bits.
2771
2772 i = 31;
2773 while((_playedSamplesOld <= (unsigned long)POW2(i)) && (i > 14)) {
2774 i--;
2775 }
2776
2777 if((i < 31) && (i > 14)) {
2778 // Avoid adjusting when there is 32-bit wrap-around since that is
2779 // something neccessary.
2780 //
2781 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "msecleft() => wrap around occured: %d bits used by sound card)", (i+1));
2782
2783 _writtenSamples = _writtenSamples - POW2(i + 1);
2784 writtenSamples = _writtenSamples;
2785 msecInPlayoutBuffer = ((writtenSamples - playedSamples)/nSamplesPerMs);
2786 }
2787 }
2788 else if ((_writtenSamplesOld > POW2(31)) && (writtenSamples < 96000))
2789 {
2790 // Wrap around as expected after having used all 32 bits. (But we still
2791 // test if the wrap around happened earlier which it should not)
2792
2793 i = 31;
2794 while (_writtenSamplesOld <= (unsigned long)POW2(i)) {
2795 i--;
2796 }
2797
2798 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, " msecleft() (wrap around occured after having used all 32 bits)");
2799
2800 _writtenSamplesOld = writtenSamples;
2801 _playedSamplesOld = playedSamples;
2802 msecInPlayoutBuffer = (int)((writtenSamples + POW2(i + 1) - playedSamples)/nSamplesPerMs);
2803
2804 }
2805 else if ((writtenSamples < 96000) && (playedSamples > POW2(31)))
2806 {
2807 // Wrap around has, as expected, happened for written_sampels before
2808 // playedSampels so we have to adjust for this until also playedSampels
2809 // has had wrap around.
2810
2811 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, " msecleft() (wrap around occured: correction of output is done)");
2812
2813 _writtenSamplesOld = writtenSamples;
2814 _playedSamplesOld = playedSamples;
2815 msecInPlayoutBuffer = (int)((writtenSamples + POW2(32) - playedSamples)/nSamplesPerMs);
2816 }
2817
2818 _writtenSamplesOld = writtenSamples;
2819 _playedSamplesOld = playedSamples;
2820
2821
2822 // We use the following formaula to track that playout works as it should
2823 // y=playedSamples/48 - timeGetTime();
2824 // y represent the clock drift between system clock and sound card clock - should be fairly stable
2825 // When the exponential mean value of diff(y) goes away from zero something is wrong
2826 // The exponential formula will accept 1% clock drift but not more
2827 // The driver error means that we will play to little audio and have a high negative clock drift
2828 // We kick in our alternative method when the clock drift reaches 20%
2829
2830 int diff,y;
2831 int unsigned time =0;
2832
2833 // If we have other problems that causes playout glitches
2834 // we don't want to switch playout method.
2835 // Check if playout buffer is extremely low, or if we haven't been able to
2836 // exectue our code in more than 40 ms
2837
2838 time = timeGetTime();
2839
2840 if ((msecInPlayoutBuffer < 20) || (time - _dc_prevtime > 40))
2841 {
2842 _dc_penalty_counter = 100;
2843 }
2844
2845 if ((playedSamples != 0))
2846 {
2847 y = playedSamples/48 - time;
2848 if ((_dc_y_prev != 0) && (_dc_penalty_counter == 0))
2849 {
2850 diff = y - _dc_y_prev;
2851 _dc_diff_mean = (990*_dc_diff_mean)/1000 + 10*diff;
2852 }
2853 _dc_y_prev = y;
2854 }
2855
2856 if (_dc_penalty_counter)
2857 {
2858 _dc_penalty_counter--;
2859 }
2860
2861 if (_dc_diff_mean < -200)
2862 {
2863 // Always reset the filter
2864 _dc_diff_mean = 0;
2865
2866 // Problem is detected. Switch delay method and set min buffer to 80.
2867 // Reset the filter and keep monitoring the filter output.
2868 // If issue is detected a second time, increase min buffer to 100.
2869 // If that does not help, we must modify this scheme further.
2870
2871 _useHeader++;
2872 if (_useHeader == 1)
2873 {
2874 _minPlayBufDelay = 80;
2875 _playWarning = 1; // only warn first time
2876 WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1, "Modification #1: _useHeader = %d, _minPlayBufDelay = %d", _useHeader, _minPlayBufDelay);
2877 }
2878 else if (_useHeader == 2)
2879 {
2880 _minPlayBufDelay = 100; // add some more safety
2881 WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1, "Modification #2: _useHeader = %d, _minPlayBufDelay = %d", _useHeader, _minPlayBufDelay);
2882 }
2883 else
2884 {
2885 // This state should not be entered... (HA)
2886 WEBRTC_TRACE (kTraceWarning, kTraceUtility, -1, "further actions are required!");
2887 }
2888 if (_playWarning == 1)
2889 {
2890 WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending playout warning exists");
2891 }
2892 _playWarning = 1; // triggers callback from module process thread
2893 WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "kPlayoutWarning message posted: switching to alternative playout delay method");
2894 }
2895 _dc_prevtime = time;
2896 _dc_prevplay = playedSamples;
2897
2898 // Try a very rough method of looking at how many buffers are still playing
2899 ms_Header = 0;
2900 for (i = 0; i < N_BUFFERS_OUT; i++) {
2901 if ((_waveHeaderOut[i].dwFlags & WHDR_INQUEUE)!=0) {
2902 ms_Header += 10;
2903 }
2904 }
2905
2906 if ((ms_Header-50) > msecInPlayoutBuffer) {
2907 // Test for cases when GetPosition appears to be screwed up (currently just log....)
2908 TCHAR infoStr[300];
2909 if (_no_of_msecleft_warnings%20==0)
2910 {
2911 StringCchPrintf(infoStr, 300, TEXT("writtenSamples=%i, playedSamples=%i, msecInPlayoutBuffer=%i, ms_Header=%i"), writtenSamples, playedSamples, msecInPlayoutBuffer, ms_Header);
2912 WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "%S", infoStr);
2913 }
2914 _no_of_msecleft_warnings++;
2915 }
2916
2917 // If this is true we have had a problem with the playout
2918 if (_useHeader > 0)
2919 {
2920 return (ms_Header);
2921 }
2922
2923
2924 if (ms_Header < msecInPlayoutBuffer)
2925 {
2926 if (_no_of_msecleft_warnings % 100 == 0)
2927 {
2928 TCHAR str[300];
2929 StringCchPrintf(str, 300, TEXT("_no_of_msecleft_warnings=%i, msecInPlayoutBuffer=%i ms_Header=%i (minBuffer=%i buffersize=%i writtenSamples=%i playedSamples=%i)"),
2930 _no_of_msecleft_warnings, msecInPlayoutBuffer, ms_Header, _minPlayBufDelay, _playBufDelay, writtenSamples, playedSamples);
2931 WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "%S", str);
2932 }
2933 _no_of_msecleft_warnings++;
2934 ms_Header -= 6; // Round off as we only have 10ms resolution + Header info is usually slightly delayed compared to GetPosition
2935
2936 if (ms_Header < 0)
2937 ms_Header = 0;
2938
2939 return (ms_Header);
2940 }
2941 else
2942 {
2943 return (msecInPlayoutBuffer);
2944 }
2945 }
2946
2947 // ----------------------------------------------------------------------------
2948 // GetRecordingBufferDelay
2949 // ----------------------------------------------------------------------------
2950
GetRecordingBufferDelay(uint32_t & readSamples,uint32_t & recSamples)2951 int32_t AudioDeviceWindowsWave::GetRecordingBufferDelay(uint32_t& readSamples, uint32_t& recSamples)
2952 {
2953 long recDifference;
2954 MMTIME mmtime;
2955 MMRESULT mmr;
2956
2957 const uint16_t nSamplesPerMs = (uint16_t)(N_REC_SAMPLES_PER_SEC/1000); // default is 48000/1000 = 48
2958
2959 // Retrieve the current input position of the given waveform-audio input device
2960 //
2961 mmtime.wType = TIME_SAMPLES;
2962 mmr = waveInGetPosition(_hWaveIn, &mmtime, sizeof(mmtime));
2963 if (MMSYSERR_NOERROR != mmr)
2964 {
2965 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetPosition() failed (err=%d)", mmr);
2966 TraceWaveInError(mmr);
2967 }
2968
2969 readSamples = _read_samples; // updated for each full fram in RecProc()
2970 recSamples = mmtime.u.sample; // remaining time in input queue (recorded but not read yet)
2971
2972 recDifference = (long) (_rec_samples_old - recSamples);
2973
2974 if( recDifference > 64000) {
2975 WEBRTC_TRACE (kTraceDebug, kTraceUtility, -1,"WRAP 1 (recDifference =%d)", recDifference);
2976 // If the sound cards number-of-recorded-samples variable wraps around before
2977 // read_sampels wraps around this needs to be adjusted. This can happen on
2978 // sound cards that uses less than 32 bits to keep track of number of played out
2979 // sampels. To avoid being fooled by sound cards that sometimes produces false
2980 // output we compare old value minus the new value with a large value. This is
2981 // neccessary because some SC:s produce an output like 153, 198, 175, 230 which
2982 // would trigger the wrap-around function if we didn't compare with a large value.
2983 // The value 64000 is chosen because 2^16=65536 so we allow wrap around at 16 bits.
2984 //
2985 int i = 31;
2986 while((_rec_samples_old <= (unsigned long)POW2(i)) && (i > 14))
2987 i--;
2988
2989 if((i < 31) && (i > 14)) {
2990 // Avoid adjusting when there is 32-bit wrap-around since that is
2991 // somethying neccessary.
2992 //
2993 _read_samples = _read_samples - POW2(i + 1);
2994 readSamples = _read_samples;
2995 _wrapCounter++;
2996 } else {
2997 WEBRTC_TRACE (kTraceWarning, kTraceUtility, -1,"AEC (_rec_samples_old %d recSamples %d)",_rec_samples_old, recSamples);
2998 }
2999 }
3000
3001 if((_wrapCounter>200)){
3002 // Do nothing, handled later
3003 }
3004 else if((_rec_samples_old > POW2(31)) && (recSamples < 96000)) {
3005 WEBRTC_TRACE (kTraceDebug, kTraceUtility, -1,"WRAP 2 (_rec_samples_old %d recSamples %d)",_rec_samples_old, recSamples);
3006 // Wrap around as expected after having used all 32 bits.
3007 _read_samples_old = readSamples;
3008 _rec_samples_old = recSamples;
3009 _wrapCounter++;
3010 return (int)((recSamples + POW2(32) - readSamples)/nSamplesPerMs);
3011
3012
3013 } else if((recSamples < 96000) && (readSamples > POW2(31))) {
3014 WEBRTC_TRACE (kTraceDebug, kTraceUtility, -1,"WRAP 3 (readSamples %d recSamples %d)",readSamples, recSamples);
3015 // Wrap around has, as expected, happened for rec_sampels before
3016 // readSampels so we have to adjust for this until also readSampels
3017 // has had wrap around.
3018 _read_samples_old = readSamples;
3019 _rec_samples_old = recSamples;
3020 _wrapCounter++;
3021 return (int)((recSamples + POW2(32) - readSamples)/nSamplesPerMs);
3022 }
3023
3024 _read_samples_old = _read_samples;
3025 _rec_samples_old = recSamples;
3026 int res=(((int)_rec_samples_old - (int)_read_samples_old)/nSamplesPerMs);
3027
3028 if((res > 2000)||(res < 0)||(_wrapCounter>200)){
3029 // Reset everything
3030 WEBRTC_TRACE (kTraceWarning, kTraceUtility, -1,"msec_read error (res %d wrapCounter %d)",res, _wrapCounter);
3031 MMTIME mmtime;
3032 mmtime.wType = TIME_SAMPLES;
3033
3034 mmr=waveInGetPosition(_hWaveIn, &mmtime, sizeof(mmtime));
3035 if (mmr != MMSYSERR_NOERROR) {
3036 WEBRTC_TRACE (kTraceWarning, kTraceUtility, -1, "waveInGetPosition failed (mmr=%d)", mmr);
3037 }
3038 _read_samples=mmtime.u.sample;
3039 _read_samples_old=_read_samples;
3040 _rec_samples_old=mmtime.u.sample;
3041
3042 // Guess a decent value
3043 res = 20;
3044 }
3045
3046 _wrapCounter = 0;
3047 return res;
3048 }
3049
3050 // ============================================================================
3051 // Thread Methods
3052 // ============================================================================
3053
3054 // ----------------------------------------------------------------------------
3055 // ThreadFunc
3056 // ----------------------------------------------------------------------------
3057
ThreadFunc(void * pThis)3058 bool AudioDeviceWindowsWave::ThreadFunc(void* pThis)
3059 {
3060 return (static_cast<AudioDeviceWindowsWave*>(pThis)->ThreadProcess());
3061 }
3062
3063 // ----------------------------------------------------------------------------
3064 // ThreadProcess
3065 // ----------------------------------------------------------------------------
3066
ThreadProcess()3067 bool AudioDeviceWindowsWave::ThreadProcess()
3068 {
3069 uint32_t time(0);
3070 uint32_t playDiff(0);
3071 uint32_t recDiff(0);
3072
3073 LONGLONG playTime(0);
3074 LONGLONG recTime(0);
3075
3076 switch (_timeEvent.Wait(1000))
3077 {
3078 case kEventSignaled:
3079 break;
3080 case kEventError:
3081 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "EventWrapper::Wait() failed => restarting timer");
3082 _timeEvent.StopTimer();
3083 _timeEvent.StartTimer(true, TIMER_PERIOD_MS);
3084 return true;
3085 case kEventTimeout:
3086 return true;
3087 }
3088
3089 time = AudioDeviceUtility::GetTimeInMS();
3090
3091 if (_startPlay)
3092 {
3093 if (PrepareStartPlayout() == 0)
3094 {
3095 _prevTimerCheckTime = time;
3096 _prevPlayTime = time;
3097 _startPlay = false;
3098 _playing = true;
3099 _playStartEvent.Set();
3100 }
3101 }
3102
3103 if (_startRec)
3104 {
3105 if (PrepareStartRecording() == 0)
3106 {
3107 _prevTimerCheckTime = time;
3108 _prevRecTime = time;
3109 _prevRecByteCheckTime = time;
3110 _startRec = false;
3111 _recording = true;
3112 _recStartEvent.Set();
3113 }
3114 }
3115
3116 if (_playing)
3117 {
3118 playDiff = time - _prevPlayTime;
3119 }
3120
3121 if (_recording)
3122 {
3123 recDiff = time - _prevRecTime;
3124 }
3125
3126 if (_playing || _recording)
3127 {
3128 RestartTimerIfNeeded(time);
3129 }
3130
3131 if (_playing &&
3132 (playDiff > (uint32_t)(_dTcheckPlayBufDelay - 1)) ||
3133 (playDiff < 0))
3134 {
3135 Lock();
3136 if (_playing)
3137 {
3138 if (PlayProc(playTime) == -1)
3139 {
3140 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "PlayProc() failed");
3141 }
3142 _prevPlayTime = time;
3143 if (playTime != 0)
3144 _playAcc += playTime;
3145 }
3146 UnLock();
3147 }
3148
3149 if (_playing && (playDiff > 12))
3150 {
3151 // It has been a long time since we were able to play out, try to
3152 // compensate by calling PlayProc again.
3153 //
3154 Lock();
3155 if (_playing)
3156 {
3157 if (PlayProc(playTime))
3158 {
3159 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "PlayProc() failed");
3160 }
3161 _prevPlayTime = time;
3162 if (playTime != 0)
3163 _playAcc += playTime;
3164 }
3165 UnLock();
3166 }
3167
3168 if (_recording &&
3169 (recDiff > REC_CHECK_TIME_PERIOD_MS) ||
3170 (recDiff < 0))
3171 {
3172 Lock();
3173 if (_recording)
3174 {
3175 int32_t nRecordedBytes(0);
3176 uint16_t maxIter(10);
3177
3178 // Deliver all availiable recorded buffers and update the CPU load measurement.
3179 // We use a while loop here to compensate for the fact that the multi-media timer
3180 // can sometimed enter a "bad state" after hibernation where the resolution is
3181 // reduced from ~1ms to ~10-15 ms.
3182 //
3183 while ((nRecordedBytes = RecProc(recTime)) > 0)
3184 {
3185 maxIter--;
3186 _recordedBytes += nRecordedBytes;
3187 if (recTime && _perfFreq.QuadPart)
3188 {
3189 // Measure the average CPU load:
3190 // This is a simplified expression where an exponential filter is used:
3191 // _avgCPULoad = 0.99 * _avgCPULoad + 0.01 * newCPU,
3192 // newCPU = (recTime+playAcc)/f is time in seconds
3193 // newCPU / 0.01 is the fraction of a 10 ms period
3194 // The two 0.01 cancels each other.
3195 // NOTE - assumes 10ms audio buffers.
3196 //
3197 _avgCPULoad = (float)(_avgCPULoad*.99 + (recTime+_playAcc)/(double)(_perfFreq.QuadPart));
3198 _playAcc = 0;
3199 }
3200 if (maxIter == 0)
3201 {
3202 // If we get this message ofte, our compensation scheme is not sufficient.
3203 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "failed to compensate for reduced MM-timer resolution");
3204 }
3205 }
3206
3207 if (nRecordedBytes == -1)
3208 {
3209 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "RecProc() failed");
3210 }
3211
3212 _prevRecTime = time;
3213
3214 // Monitor the recording process and generate error/warning callbacks if needed
3215 MonitorRecording(time);
3216 }
3217 UnLock();
3218 }
3219
3220 if (!_recording)
3221 {
3222 _prevRecByteCheckTime = time;
3223 _avgCPULoad = 0;
3224 }
3225
3226 return true;
3227 }
3228
3229 // ----------------------------------------------------------------------------
3230 // RecProc
3231 // ----------------------------------------------------------------------------
3232
RecProc(LONGLONG & consumedTime)3233 int32_t AudioDeviceWindowsWave::RecProc(LONGLONG& consumedTime)
3234 {
3235 MMRESULT res;
3236 uint32_t bufCount(0);
3237 uint32_t nBytesRecorded(0);
3238
3239 consumedTime = 0;
3240
3241 // count modulo N_BUFFERS_IN (0,1,2,...,(N_BUFFERS_IN-1),0,1,2,..)
3242 if (_recBufCount == N_BUFFERS_IN)
3243 {
3244 _recBufCount = 0;
3245 }
3246
3247 bufCount = _recBufCount;
3248
3249 // take mono/stereo mode into account when deriving size of a full buffer
3250 const uint16_t bytesPerSample = 2*_recChannels;
3251 const uint32_t fullBufferSizeInBytes = bytesPerSample * REC_BUF_SIZE_IN_SAMPLES;
3252
3253 // read number of recorded bytes for the given input-buffer
3254 nBytesRecorded = _waveHeaderIn[bufCount].dwBytesRecorded;
3255
3256 if (nBytesRecorded == fullBufferSizeInBytes ||
3257 (nBytesRecorded > 0))
3258 {
3259 int32_t msecOnPlaySide;
3260 int32_t msecOnRecordSide;
3261 uint32_t writtenSamples;
3262 uint32_t playedSamples;
3263 uint32_t readSamples, recSamples;
3264 bool send = true;
3265
3266 uint32_t nSamplesRecorded = (nBytesRecorded/bytesPerSample); // divide by 2 or 4 depending on mono or stereo
3267
3268 if (nBytesRecorded == fullBufferSizeInBytes)
3269 {
3270 _timesdwBytes = 0;
3271 }
3272 else
3273 {
3274 // Test if it is stuck on this buffer
3275 _timesdwBytes++;
3276 if (_timesdwBytes < 5)
3277 {
3278 // keep trying
3279 return (0);
3280 }
3281 else
3282 {
3283 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id,"nBytesRecorded=%d => don't use", nBytesRecorded);
3284 _timesdwBytes = 0;
3285 send = false;
3286 }
3287 }
3288
3289 // store the recorded buffer (no action will be taken if the #recorded samples is not a full buffer)
3290 _ptrAudioBuffer->SetRecordedBuffer(_waveHeaderIn[bufCount].lpData, nSamplesRecorded);
3291
3292 // update #samples read
3293 _read_samples += nSamplesRecorded;
3294
3295 // Check how large the playout and recording buffers are on the sound card.
3296 // This info is needed by the AEC.
3297 //
3298 msecOnPlaySide = GetPlayoutBufferDelay(writtenSamples, playedSamples);
3299 msecOnRecordSide = GetRecordingBufferDelay(readSamples, recSamples);
3300
3301 // If we use the alternative playout delay method, skip the clock drift compensation
3302 // since it will be an unreliable estimate and might degrade AEC performance.
3303 int32_t drift = (_useHeader > 0) ? 0 : GetClockDrift(playedSamples, recSamples);
3304
3305 _ptrAudioBuffer->SetVQEData(msecOnPlaySide, msecOnRecordSide, drift);
3306
3307 _ptrAudioBuffer->SetTypingStatus(KeyPressed());
3308
3309 // Store the play and rec delay values for video synchronization
3310 _sndCardPlayDelay = msecOnPlaySide;
3311 _sndCardRecDelay = msecOnRecordSide;
3312
3313 LARGE_INTEGER t1,t2;
3314
3315 if (send)
3316 {
3317 QueryPerformanceCounter(&t1);
3318
3319 // deliver recorded samples at specified sample rate, mic level etc. to the observer using callback
3320 UnLock();
3321 _ptrAudioBuffer->DeliverRecordedData();
3322 Lock();
3323
3324 QueryPerformanceCounter(&t2);
3325
3326 if (InputSanityCheckAfterUnlockedPeriod() == -1)
3327 {
3328 // assert(false);
3329 return -1;
3330 }
3331 }
3332
3333 if (_AGC)
3334 {
3335 uint32_t newMicLevel = _ptrAudioBuffer->NewMicLevel();
3336 if (newMicLevel != 0)
3337 {
3338 // The VQE will only deliver non-zero microphone levels when a change is needed.
3339 WEBRTC_TRACE(kTraceStream, kTraceUtility, _id,"AGC change of volume: => new=%u", newMicLevel);
3340
3341 // We store this outside of the audio buffer to avoid
3342 // having it overwritten by the getter thread.
3343 _newMicLevel = newMicLevel;
3344 SetEvent(_hSetCaptureVolumeEvent);
3345 }
3346 }
3347
3348 // return utilized buffer to queue after specified delay (default is 4)
3349 if (_recDelayCount > (_recPutBackDelay-1))
3350 {
3351 // deley buffer counter to compensate for "put-back-delay"
3352 bufCount = (bufCount + N_BUFFERS_IN - _recPutBackDelay) % N_BUFFERS_IN;
3353
3354 // reset counter so we can make new detection
3355 _waveHeaderIn[bufCount].dwBytesRecorded = 0;
3356
3357 // return the utilized wave-header after certain delay (given by _recPutBackDelay)
3358 res = waveInUnprepareHeader(_hWaveIn, &(_waveHeaderIn[bufCount]), sizeof(WAVEHDR));
3359 if (MMSYSERR_NOERROR != res)
3360 {
3361 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInUnprepareHeader(%d) failed (err=%d)", bufCount, res);
3362 TraceWaveInError(res);
3363 }
3364
3365 // ensure that the utilized header can be used again
3366 res = waveInPrepareHeader(_hWaveIn, &(_waveHeaderIn[bufCount]), sizeof(WAVEHDR));
3367 if (res != MMSYSERR_NOERROR)
3368 {
3369 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveInPrepareHeader(%d) failed (err=%d)", bufCount, res);
3370 TraceWaveInError(res);
3371 return -1;
3372 }
3373
3374 // add the utilized buffer to the queue again
3375 res = waveInAddBuffer(_hWaveIn, &(_waveHeaderIn[bufCount]), sizeof(WAVEHDR));
3376 if (res != MMSYSERR_NOERROR)
3377 {
3378 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveInAddBuffer(%d) failed (err=%d)", bufCount, res);
3379 TraceWaveInError(res);
3380 if (_recPutBackDelay < 50)
3381 {
3382 _recPutBackDelay++;
3383 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "_recPutBackDelay increased to %d", _recPutBackDelay);
3384 }
3385 else
3386 {
3387 if (_recError == 1)
3388 {
3389 WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending recording error exists");
3390 }
3391 _recError = 1; // triggers callback from module process thread
3392 WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kRecordingError message posted: _recPutBackDelay=%u", _recPutBackDelay);
3393 }
3394 }
3395 } // if (_recDelayCount > (_recPutBackDelay-1))
3396
3397 if (_recDelayCount < (_recPutBackDelay+1))
3398 {
3399 _recDelayCount++;
3400 }
3401
3402 // increase main buffer count since one complete buffer has now been delivered
3403 _recBufCount++;
3404
3405 if (send) {
3406 // Calculate processing time
3407 consumedTime = (int)(t2.QuadPart-t1.QuadPart);
3408 // handle wraps, time should not be higher than a second
3409 if ((consumedTime > _perfFreq.QuadPart) || (consumedTime < 0))
3410 consumedTime = 0;
3411 }
3412
3413 } // if ((nBytesRecorded == fullBufferSizeInBytes))
3414
3415 return nBytesRecorded;
3416 }
3417
3418 // ----------------------------------------------------------------------------
3419 // PlayProc
3420 // ----------------------------------------------------------------------------
3421
PlayProc(LONGLONG & consumedTime)3422 int AudioDeviceWindowsWave::PlayProc(LONGLONG& consumedTime)
3423 {
3424 int32_t remTimeMS(0);
3425 int8_t playBuffer[4*PLAY_BUF_SIZE_IN_SAMPLES];
3426 uint32_t writtenSamples(0);
3427 uint32_t playedSamples(0);
3428
3429 LARGE_INTEGER t1;
3430 LARGE_INTEGER t2;
3431
3432 consumedTime = 0;
3433 _waitCounter++;
3434
3435 // Get number of ms of sound that remains in the sound card buffer for playback.
3436 //
3437 remTimeMS = GetPlayoutBufferDelay(writtenSamples, playedSamples);
3438
3439 // The threshold can be adaptive or fixed. The adaptive scheme is updated
3440 // also for fixed mode but the updated threshold is not utilized.
3441 //
3442 const uint16_t thresholdMS =
3443 (_playBufType == AudioDeviceModule::kAdaptiveBufferSize) ? _playBufDelay : _playBufDelayFixed;
3444
3445 if (remTimeMS < thresholdMS + 9)
3446 {
3447 _dTcheckPlayBufDelay = 5;
3448
3449 if (remTimeMS == 0)
3450 {
3451 WEBRTC_TRACE(kTraceInfo, kTraceUtility, _id, "playout buffer is empty => we must adapt...");
3452 if (_waitCounter > 30)
3453 {
3454 _erZeroCounter++;
3455 if (_erZeroCounter == 2)
3456 {
3457 _playBufDelay += 15;
3458 _minPlayBufDelay += 20;
3459 _waitCounter = 50;
3460 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "New playout states (er=0,erZero=2): minPlayBufDelay=%u, playBufDelay=%u", _minPlayBufDelay, _playBufDelay);
3461 }
3462 else if (_erZeroCounter == 3)
3463 {
3464 _erZeroCounter = 0;
3465 _playBufDelay += 30;
3466 _minPlayBufDelay += 25;
3467 _waitCounter = 0;
3468 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "New playout states (er=0, erZero=3): minPlayBufDelay=%u, playBufDelay=%u", _minPlayBufDelay, _playBufDelay);
3469 }
3470 else
3471 {
3472 _minPlayBufDelay += 10;
3473 _playBufDelay += 15;
3474 _waitCounter = 50;
3475 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "New playout states (er=0, erZero=1): minPlayBufDelay=%u, playBufDelay=%u", _minPlayBufDelay, _playBufDelay);
3476 }
3477 }
3478 }
3479 else if (remTimeMS < _minPlayBufDelay)
3480 {
3481 // If there is less than 25 ms of audio in the play out buffer
3482 // increase the buffersize limit value. _waitCounter prevents
3483 // _playBufDelay to be increased every time this function is called.
3484
3485 if (_waitCounter > 30)
3486 {
3487 _playBufDelay += 10;
3488 if (_intro == 0)
3489 _waitCounter = 0;
3490 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Playout threshold is increased: playBufDelay=%u", _playBufDelay);
3491 }
3492 }
3493 else if (remTimeMS < thresholdMS - 9)
3494 {
3495 _erZeroCounter = 0;
3496 }
3497 else
3498 {
3499 _erZeroCounter = 0;
3500 _dTcheckPlayBufDelay = 10;
3501 }
3502
3503 QueryPerformanceCounter(&t1); // measure time: START
3504
3505 // Ask for new PCM data to be played out using the AudioDeviceBuffer.
3506 // Ensure that this callback is executed without taking the audio-thread lock.
3507 //
3508 UnLock();
3509 uint32_t nSamples = _ptrAudioBuffer->RequestPlayoutData(PLAY_BUF_SIZE_IN_SAMPLES);
3510 Lock();
3511
3512 if (OutputSanityCheckAfterUnlockedPeriod() == -1)
3513 {
3514 // assert(false);
3515 return -1;
3516 }
3517
3518 nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
3519 if (nSamples != PLAY_BUF_SIZE_IN_SAMPLES)
3520 {
3521 WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "invalid number of output samples(%d)", nSamples);
3522 }
3523
3524 QueryPerformanceCounter(&t2); // measure time: STOP
3525 consumedTime = (int)(t2.QuadPart - t1.QuadPart);
3526
3527 Write(playBuffer, PLAY_BUF_SIZE_IN_SAMPLES);
3528
3529 } // if (er < thresholdMS + 9)
3530 else if (thresholdMS + 9 < remTimeMS )
3531 {
3532 _erZeroCounter = 0;
3533 _dTcheckPlayBufDelay = 2; // check buffer more often
3534 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Need to check playout buffer more often (dT=%u, remTimeMS=%u)", _dTcheckPlayBufDelay, remTimeMS);
3535 }
3536
3537 // If the buffersize has been stable for 20 seconds try to decrease the buffer size
3538 if (_waitCounter > 2000)
3539 {
3540 _intro = 0;
3541 _playBufDelay--;
3542 _waitCounter = 1990;
3543 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Playout threshold is decreased: playBufDelay=%u", _playBufDelay);
3544 }
3545
3546 // Limit the minimum sound card (playback) delay to adaptive minimum delay
3547 if (_playBufDelay < _minPlayBufDelay)
3548 {
3549 _playBufDelay = _minPlayBufDelay;
3550 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Playout threshold is limited to %u", _minPlayBufDelay);
3551 }
3552
3553 // Limit the maximum sound card (playback) delay to 150 ms
3554 if (_playBufDelay > 150)
3555 {
3556 _playBufDelay = 150;
3557 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Playout threshold is limited to %d", _playBufDelay);
3558 }
3559
3560 // Upper limit of the minimum sound card (playback) delay to 65 ms.
3561 // Deactivated during "useHeader mode" (_useHeader > 0).
3562 if (_minPlayBufDelay > _MAX_minBuffer &&
3563 (_useHeader == 0))
3564 {
3565 _minPlayBufDelay = _MAX_minBuffer;
3566 WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Minimum playout threshold is limited to %d", _MAX_minBuffer);
3567 }
3568
3569 return (0);
3570 }
3571
3572 // ----------------------------------------------------------------------------
3573 // Write
3574 // ----------------------------------------------------------------------------
3575
Write(int8_t * data,uint16_t nSamples)3576 int32_t AudioDeviceWindowsWave::Write(int8_t* data, uint16_t nSamples)
3577 {
3578 if (_hWaveOut == NULL)
3579 {
3580 return -1;
3581 }
3582
3583 if (_playIsInitialized)
3584 {
3585 MMRESULT res;
3586
3587 const uint16_t bufCount(_playBufCount);
3588
3589 // Place data in the memory associated with _waveHeaderOut[bufCount]
3590 //
3591 const int16_t nBytes = (2*_playChannels)*nSamples;
3592 memcpy(&_playBuffer[bufCount][0], &data[0], nBytes);
3593
3594 // Send a data block to the given waveform-audio output device.
3595 //
3596 // When the buffer is finished, the WHDR_DONE bit is set in the dwFlags
3597 // member of the WAVEHDR structure. The buffer must be prepared with the
3598 // waveOutPrepareHeader function before it is passed to waveOutWrite.
3599 // Unless the device is paused by calling the waveOutPause function,
3600 // playback begins when the first data block is sent to the device.
3601 //
3602 res = waveOutWrite(_hWaveOut, &_waveHeaderOut[bufCount], sizeof(_waveHeaderOut[bufCount]));
3603 if (MMSYSERR_NOERROR != res)
3604 {
3605 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveOutWrite(%d) failed (err=%d)", bufCount, res);
3606 TraceWaveOutError(res);
3607
3608 _writeErrors++;
3609 if (_writeErrors > 10)
3610 {
3611 if (_playError == 1)
3612 {
3613 WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending playout error exists");
3614 }
3615 _playError = 1; // triggers callback from module process thread
3616 WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kPlayoutError message posted: _writeErrors=%u", _writeErrors);
3617 }
3618
3619 return -1;
3620 }
3621
3622 _playBufCount = (_playBufCount+1) % N_BUFFERS_OUT; // increase buffer counter modulo size of total buffer
3623 _writtenSamples += nSamples; // each sample is 2 or 4 bytes
3624 _writeErrors = 0;
3625 }
3626
3627 return 0;
3628 }
3629
3630 // ----------------------------------------------------------------------------
3631 // GetClockDrift
3632 // ----------------------------------------------------------------------------
3633
GetClockDrift(const uint32_t plSamp,const uint32_t rcSamp)3634 int32_t AudioDeviceWindowsWave::GetClockDrift(const uint32_t plSamp, const uint32_t rcSamp)
3635 {
3636 int drift = 0;
3637 unsigned int plSampDiff = 0, rcSampDiff = 0;
3638
3639 if (plSamp >= _plSampOld)
3640 {
3641 plSampDiff = plSamp - _plSampOld;
3642 }
3643 else
3644 {
3645 // Wrap
3646 int i = 31;
3647 while(_plSampOld <= (unsigned int)POW2(i))
3648 {
3649 i--;
3650 }
3651
3652 // Add the amount remaining prior to wrapping
3653 plSampDiff = plSamp + POW2(i + 1) - _plSampOld;
3654 }
3655
3656 if (rcSamp >= _rcSampOld)
3657 {
3658 rcSampDiff = rcSamp - _rcSampOld;
3659 }
3660 else
3661 { // Wrap
3662 int i = 31;
3663 while(_rcSampOld <= (unsigned int)POW2(i))
3664 {
3665 i--;
3666 }
3667
3668 rcSampDiff = rcSamp + POW2(i + 1) - _rcSampOld;
3669 }
3670
3671 drift = plSampDiff - rcSampDiff;
3672
3673 _plSampOld = plSamp;
3674 _rcSampOld = rcSamp;
3675
3676 return drift;
3677 }
3678
3679 // ----------------------------------------------------------------------------
3680 // MonitorRecording
3681 // ----------------------------------------------------------------------------
3682
MonitorRecording(const uint32_t time)3683 int32_t AudioDeviceWindowsWave::MonitorRecording(const uint32_t time)
3684 {
3685 const uint16_t bytesPerSample = 2*_recChannels;
3686 const uint32_t nRecordedSamples = _recordedBytes/bytesPerSample;
3687
3688 if (nRecordedSamples > 5*N_REC_SAMPLES_PER_SEC)
3689 {
3690 // 5 seconds of audio has been recorded...
3691 if ((time - _prevRecByteCheckTime) > 5700)
3692 {
3693 // ...and it was more than 5.7 seconds since we last did this check <=>
3694 // we have not been able to record 5 seconds of audio in 5.7 seconds,
3695 // hence a problem should be reported.
3696 // This problem can be related to USB overload.
3697 //
3698 if (_recWarning == 1)
3699 {
3700 WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending recording warning exists");
3701 }
3702 _recWarning = 1; // triggers callback from module process thread
3703 WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "kRecordingWarning message posted: time-_prevRecByteCheckTime=%d", time - _prevRecByteCheckTime);
3704 }
3705
3706 _recordedBytes = 0; // restart "check again when 5 seconds are recorded"
3707 _prevRecByteCheckTime = time; // reset timer to measure time for recording of 5 seconds
3708 }
3709
3710 if ((time - _prevRecByteCheckTime) > 8000)
3711 {
3712 // It has been more than 8 seconds since we able to confirm that 5 seconds of
3713 // audio was recorded, hence we have not been able to record 5 seconds in
3714 // 8 seconds => the complete recording process is most likely dead.
3715 //
3716 if (_recError == 1)
3717 {
3718 WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending recording error exists");
3719 }
3720 _recError = 1; // triggers callback from module process thread
3721 WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kRecordingError message posted: time-_prevRecByteCheckTime=%d", time - _prevRecByteCheckTime);
3722
3723 _prevRecByteCheckTime = time;
3724 }
3725
3726 return 0;
3727 }
3728
3729 // ----------------------------------------------------------------------------
3730 // MonitorRecording
3731 //
3732 // Restart timer if needed (they seem to be messed up after a hibernate).
3733 // ----------------------------------------------------------------------------
3734
RestartTimerIfNeeded(const uint32_t time)3735 int32_t AudioDeviceWindowsWave::RestartTimerIfNeeded(const uint32_t time)
3736 {
3737 const uint32_t diffMS = time - _prevTimerCheckTime;
3738 _prevTimerCheckTime = time;
3739
3740 if (diffMS > 7)
3741 {
3742 // one timer-issue detected...
3743 _timerFaults++;
3744 if (_timerFaults > 5 && _timerRestartAttempts < 2)
3745 {
3746 // Reinitialize timer event if event fails to execute at least every 5ms.
3747 // On some machines it helps and the timer starts working as it should again;
3748 // however, not all machines (we have seen issues on e.g. IBM T60).
3749 // Therefore, the scheme below ensures that we do max 2 attempts to restart the timer.
3750 // For the cases where restart does not do the trick, we compensate for the reduced
3751 // resolution on both the recording and playout sides.
3752 WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, " timer issue detected => timer is restarted");
3753 _timeEvent.StopTimer();
3754 _timeEvent.StartTimer(true, TIMER_PERIOD_MS);
3755 // make sure timer gets time to start up and we don't kill/start timer serveral times over and over again
3756 _timerFaults = -20;
3757 _timerRestartAttempts++;
3758 }
3759 }
3760 else
3761 {
3762 // restart timer-check scheme since we are OK
3763 _timerFaults = 0;
3764 _timerRestartAttempts = 0;
3765 }
3766
3767 return 0;
3768 }
3769
3770
KeyPressed() const3771 bool AudioDeviceWindowsWave::KeyPressed() const{
3772
3773 int key_down = 0;
3774 for (int key = VK_SPACE; key < VK_NUMLOCK; key++) {
3775 short res = GetAsyncKeyState(key);
3776 key_down |= res & 0x1; // Get the LSB
3777 }
3778 return (key_down > 0);
3779 }
3780 } // namespace webrtc
3781