1 /* GStreamer
2 * Copyright (C) 2021 Seungha Yang <seungha@centricular.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
18 */
19
20 #include "gstwasapi2ringbuffer.h"
21 #include <string.h>
22 #include <mfapi.h>
23 #include <wrl.h>
24
25 GST_DEBUG_CATEGORY_STATIC (gst_wasapi2_ring_buffer_debug);
26 #define GST_CAT_DEFAULT gst_wasapi2_ring_buffer_debug
27
28 static HRESULT gst_wasapi2_ring_buffer_io_callback (GstWasapi2RingBuffer * buf);
29 static HRESULT
30 gst_wasapi2_ring_buffer_loopback_callback (GstWasapi2RingBuffer * buf);
31
32 /* *INDENT-OFF* */
33 using namespace Microsoft::WRL;
34
35 class GstWasapiAsyncCallback : public IMFAsyncCallback
36 {
37 public:
GstWasapiAsyncCallback(GstWasapi2RingBuffer * listener,DWORD queue_id,gboolean loopback)38 GstWasapiAsyncCallback(GstWasapi2RingBuffer *listener,
39 DWORD queue_id,
40 gboolean loopback)
41 : ref_count_(1)
42 , queue_id_(queue_id)
43 , loopback_(loopback)
44 {
45 g_weak_ref_init (&listener_, listener);
46 }
47
~GstWasapiAsyncCallback()48 virtual ~GstWasapiAsyncCallback ()
49 {
50 g_weak_ref_set (&listener_, nullptr);
51 }
52
53 /* IUnknown */
54 STDMETHODIMP_ (ULONG)
AddRef(void)55 AddRef (void)
56 {
57 GST_TRACE ("%p, %d", this, ref_count_);
58 return InterlockedIncrement (&ref_count_);
59 }
60 STDMETHODIMP_ (ULONG)
Release(void)61 Release (void)
62 {
63 ULONG ref_count;
64
65 GST_TRACE ("%p, %d", this, ref_count_);
66 ref_count = InterlockedDecrement (&ref_count_);
67
68 if (ref_count == 0) {
69 GST_TRACE ("Delete instance %p", this);
70 delete this;
71 }
72
73 return ref_count;
74 }
75
76 STDMETHODIMP
QueryInterface(REFIID riid,void ** object)77 QueryInterface (REFIID riid, void ** object)
78 {
79 if (!object)
80 return E_POINTER;
81
82 if (riid == IID_IUnknown) {
83 GST_TRACE ("query IUnknown interface %p", this);
84 *object = static_cast<IUnknown *> (static_cast<GstWasapiAsyncCallback *> (this));
85 } else if (riid == __uuidof (IMFAsyncCallback)) {
86 GST_TRACE ("query IUnknown interface %p", this);
87 *object = static_cast<IUnknown *> (static_cast<GstWasapiAsyncCallback *> (this));
88 } else {
89 *object = nullptr;
90 return E_NOINTERFACE;
91 }
92
93 AddRef ();
94
95 return S_OK;
96 }
97
98 /* IMFAsyncCallback */
99 STDMETHODIMP
GetParameters(DWORD * pdwFlags,DWORD * pdwQueue)100 GetParameters(DWORD * pdwFlags, DWORD * pdwQueue)
101 {
102 *pdwFlags = 0;
103 *pdwQueue = queue_id_;
104
105 return S_OK;
106 }
107
108 STDMETHODIMP
Invoke(IMFAsyncResult * pAsyncResult)109 Invoke(IMFAsyncResult * pAsyncResult)
110 {
111 GstWasapi2RingBuffer *ringbuffer;
112 HRESULT hr;
113
114 ringbuffer = (GstWasapi2RingBuffer *) g_weak_ref_get (&listener_);
115 if (!ringbuffer) {
116 GST_WARNING ("Listener was removed");
117 return S_OK;
118 }
119
120 if (loopback_)
121 hr = gst_wasapi2_ring_buffer_loopback_callback (ringbuffer);
122 else
123 hr = gst_wasapi2_ring_buffer_io_callback (ringbuffer);
124 gst_object_unref (ringbuffer);
125
126 return hr;
127 }
128
129 private:
130 ULONG ref_count_;
131 DWORD queue_id_;
132 GWeakRef listener_;
133 gboolean loopback_;
134 };
135 /* *INDENT-ON* */
136
137 struct _GstWasapi2RingBuffer
138 {
139 GstAudioRingBuffer parent;
140
141 GstWasapi2ClientDeviceClass device_class;
142 gchar *device_id;
143 gboolean low_latency;
144 gboolean mute;
145 gdouble volume;
146 gpointer dispatcher;
147 gboolean can_auto_routing;
148
149 GstWasapi2Client *client;
150 GstWasapi2Client *loopback_client;
151 IAudioCaptureClient *capture_client;
152 IAudioRenderClient *render_client;
153 ISimpleAudioVolume *volume_object;
154
155 GstWasapiAsyncCallback *callback_object;
156 IMFAsyncResult *callback_result;
157 MFWORKITEM_KEY callback_key;
158 HANDLE event_handle;
159
160 GstWasapiAsyncCallback *loopback_callback_object;
161 IMFAsyncResult *loopback_callback_result;
162 MFWORKITEM_KEY loopback_callback_key;
163 HANDLE loopback_event_handle;
164
165 guint64 expected_position;
166 gboolean is_first;
167 gboolean running;
168 UINT32 buffer_size;
169 UINT32 loopback_buffer_size;
170
171 gint segoffset;
172 guint64 write_frame_offset;
173
174 GMutex volume_lock;
175 gboolean mute_changed;
176 gboolean volume_changed;
177
178 GstCaps *supported_caps;
179 };
180
181 static void gst_wasapi2_ring_buffer_constructed (GObject * object);
182 static void gst_wasapi2_ring_buffer_dispose (GObject * object);
183 static void gst_wasapi2_ring_buffer_finalize (GObject * object);
184
185 static gboolean gst_wasapi2_ring_buffer_open_device (GstAudioRingBuffer * buf);
186 static gboolean gst_wasapi2_ring_buffer_close_device (GstAudioRingBuffer * buf);
187 static gboolean gst_wasapi2_ring_buffer_acquire (GstAudioRingBuffer * buf,
188 GstAudioRingBufferSpec * spec);
189 static gboolean gst_wasapi2_ring_buffer_release (GstAudioRingBuffer * buf);
190 static gboolean gst_wasapi2_ring_buffer_start (GstAudioRingBuffer * buf);
191 static gboolean gst_wasapi2_ring_buffer_resume (GstAudioRingBuffer * buf);
192 static gboolean gst_wasapi2_ring_buffer_pause (GstAudioRingBuffer * buf);
193 static gboolean gst_wasapi2_ring_buffer_stop (GstAudioRingBuffer * buf);
194 static guint gst_wasapi2_ring_buffer_delay (GstAudioRingBuffer * buf);
195
196 #define gst_wasapi2_ring_buffer_parent_class parent_class
197 G_DEFINE_TYPE (GstWasapi2RingBuffer, gst_wasapi2_ring_buffer,
198 GST_TYPE_AUDIO_RING_BUFFER);
199
200 static void
gst_wasapi2_ring_buffer_class_init(GstWasapi2RingBufferClass * klass)201 gst_wasapi2_ring_buffer_class_init (GstWasapi2RingBufferClass * klass)
202 {
203 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
204 GstAudioRingBufferClass *ring_buffer_class =
205 GST_AUDIO_RING_BUFFER_CLASS (klass);
206
207 gobject_class->constructed = gst_wasapi2_ring_buffer_constructed;
208 gobject_class->dispose = gst_wasapi2_ring_buffer_dispose;
209 gobject_class->finalize = gst_wasapi2_ring_buffer_finalize;
210
211 ring_buffer_class->open_device =
212 GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_open_device);
213 ring_buffer_class->close_device =
214 GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_close_device);
215 ring_buffer_class->acquire =
216 GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_acquire);
217 ring_buffer_class->release =
218 GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_release);
219 ring_buffer_class->start = GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_start);
220 ring_buffer_class->resume =
221 GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_resume);
222 ring_buffer_class->pause = GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_pause);
223 ring_buffer_class->stop = GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_stop);
224 ring_buffer_class->delay = GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_delay);
225
226 GST_DEBUG_CATEGORY_INIT (gst_wasapi2_ring_buffer_debug,
227 "wasapi2ringbuffer", 0, "wasapi2ringbuffer");
228 }
229
230 static void
gst_wasapi2_ring_buffer_init(GstWasapi2RingBuffer * self)231 gst_wasapi2_ring_buffer_init (GstWasapi2RingBuffer * self)
232 {
233 self->volume = 1.0f;
234 self->mute = FALSE;
235
236 self->event_handle = CreateEvent (nullptr, FALSE, FALSE, nullptr);
237 self->loopback_event_handle = CreateEvent (nullptr, FALSE, FALSE, nullptr);
238 g_mutex_init (&self->volume_lock);
239 }
240
241 static void
gst_wasapi2_ring_buffer_constructed(GObject * object)242 gst_wasapi2_ring_buffer_constructed (GObject * object)
243 {
244 GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (object);
245 HRESULT hr;
246 DWORD task_id = 0;
247 DWORD queue_id = 0;
248
249 hr = MFLockSharedWorkQueue (L"Pro Audio", 0, &task_id, &queue_id);
250 if (!gst_wasapi2_result (hr)) {
251 GST_WARNING_OBJECT (self, "Failed to get work queue id");
252 goto out;
253 }
254
255 self->callback_object = new GstWasapiAsyncCallback (self, queue_id, FALSE);
256 hr = MFCreateAsyncResult (nullptr, self->callback_object, nullptr,
257 &self->callback_result);
258 if (!gst_wasapi2_result (hr)) {
259 GST_WARNING_OBJECT (self, "Failed to create IAsyncResult");
260 GST_WASAPI2_CLEAR_COM (self->callback_object);
261 }
262
263 /* Create another callback object for loopback silence feed */
264 self->loopback_callback_object =
265 new GstWasapiAsyncCallback (self, queue_id, TRUE);
266 hr = MFCreateAsyncResult (nullptr, self->loopback_callback_object, nullptr,
267 &self->loopback_callback_result);
268 if (!gst_wasapi2_result (hr)) {
269 GST_WARNING_OBJECT (self, "Failed to create IAsyncResult");
270 GST_WASAPI2_CLEAR_COM (self->callback_object);
271 GST_WASAPI2_CLEAR_COM (self->callback_result);
272 GST_WASAPI2_CLEAR_COM (self->loopback_callback_object);
273 }
274
275 out:
276 G_OBJECT_CLASS (parent_class)->constructed (object);
277 }
278
279 static void
gst_wasapi2_ring_buffer_dispose(GObject * object)280 gst_wasapi2_ring_buffer_dispose (GObject * object)
281 {
282 GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (object);
283
284 GST_WASAPI2_CLEAR_COM (self->render_client);
285 GST_WASAPI2_CLEAR_COM (self->capture_client);
286 GST_WASAPI2_CLEAR_COM (self->volume_object);
287 GST_WASAPI2_CLEAR_COM (self->callback_result);
288 GST_WASAPI2_CLEAR_COM (self->callback_object);
289 GST_WASAPI2_CLEAR_COM (self->loopback_callback_result);
290 GST_WASAPI2_CLEAR_COM (self->loopback_callback_object);
291
292 gst_clear_object (&self->client);
293 gst_clear_object (&self->loopback_client);
294 gst_clear_caps (&self->supported_caps);
295
296 G_OBJECT_CLASS (parent_class)->dispose (object);
297 }
298
299 static void
gst_wasapi2_ring_buffer_finalize(GObject * object)300 gst_wasapi2_ring_buffer_finalize (GObject * object)
301 {
302 GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (object);
303
304 g_free (self->device_id);
305 CloseHandle (self->event_handle);
306 CloseHandle (self->loopback_event_handle);
307 g_mutex_clear (&self->volume_lock);
308
309 G_OBJECT_CLASS (parent_class)->finalize (object);
310 }
311
312 static void
gst_wasapi2_ring_buffer_post_open_error(GstWasapi2RingBuffer * self)313 gst_wasapi2_ring_buffer_post_open_error (GstWasapi2RingBuffer * self)
314 {
315 GstElement *parent = (GstElement *) GST_OBJECT_PARENT (self);
316
317 if (!parent) {
318 GST_WARNING_OBJECT (self, "Cannot find parent");
319 return;
320 }
321
322 if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
323 GST_ELEMENT_ERROR (parent, RESOURCE, OPEN_WRITE,
324 (nullptr), ("Failed to open device"));
325 } else {
326 GST_ELEMENT_ERROR (parent, RESOURCE, OPEN_READ,
327 (nullptr), ("Failed to open device"));
328 }
329 }
330
331 static void
gst_wasapi2_ring_buffer_post_scheduling_error(GstWasapi2RingBuffer * self)332 gst_wasapi2_ring_buffer_post_scheduling_error (GstWasapi2RingBuffer * self)
333 {
334 GstElement *parent = (GstElement *) GST_OBJECT_PARENT (self);
335
336 if (!parent) {
337 GST_WARNING_OBJECT (self, "Cannot find parent");
338 return;
339 }
340
341 GST_ELEMENT_ERROR (parent, RESOURCE, FAILED,
342 (nullptr), ("Failed to schedule next I/O"));
343 }
344
345 static void
gst_wasapi2_ring_buffer_post_io_error(GstWasapi2RingBuffer * self,HRESULT hr)346 gst_wasapi2_ring_buffer_post_io_error (GstWasapi2RingBuffer * self, HRESULT hr)
347 {
348 GstElement *parent = (GstElement *) GST_OBJECT_PARENT (self);
349 gchar *error_msg;
350
351 if (!parent) {
352 GST_WARNING_OBJECT (self, "Cannot find parent");
353 return;
354 }
355
356 error_msg = gst_wasapi2_util_get_error_message (hr);
357
358 GST_ERROR_OBJECT (self, "Posting I/O error %s (hr: 0x%x)", error_msg, hr);
359 if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
360 GST_ELEMENT_ERROR (parent, RESOURCE, WRITE,
361 ("Failed to write to device"), ("%s, hr: 0x%x", error_msg, hr));
362 } else {
363 GST_ELEMENT_ERROR (parent, RESOURCE, READ,
364 ("Failed to read from device"), ("%s hr: 0x%x", error_msg, hr));
365 }
366
367 g_free (error_msg);
368 }
369
370 static gboolean
gst_wasapi2_ring_buffer_open_device(GstAudioRingBuffer * buf)371 gst_wasapi2_ring_buffer_open_device (GstAudioRingBuffer * buf)
372 {
373 GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
374
375 GST_DEBUG_OBJECT (self, "Open");
376
377 if (self->client) {
378 GST_DEBUG_OBJECT (self, "Already opened");
379 return TRUE;
380 }
381
382 self->client = gst_wasapi2_client_new (self->device_class,
383 -1, self->device_id, self->dispatcher);
384 if (!self->client) {
385 gst_wasapi2_ring_buffer_post_open_error (self);
386 return FALSE;
387 }
388
389 g_object_get (self->client, "auto-routing", &self->can_auto_routing, nullptr);
390
391 /* Open another render client to feed silence */
392 if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE) {
393 self->loopback_client =
394 gst_wasapi2_client_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER,
395 -1, self->device_id, self->dispatcher);
396
397 if (!self->loopback_client) {
398 gst_wasapi2_ring_buffer_post_open_error (self);
399 gst_clear_object (&self->client);
400
401 return FALSE;
402 }
403 }
404
405 return TRUE;
406 }
407
408 static gboolean
gst_wasapi2_ring_buffer_close_device_internal(GstAudioRingBuffer * buf)409 gst_wasapi2_ring_buffer_close_device_internal (GstAudioRingBuffer * buf)
410 {
411 GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
412
413 GST_DEBUG_OBJECT (self, "Close device");
414
415 if (self->running)
416 gst_wasapi2_ring_buffer_stop (buf);
417
418 GST_WASAPI2_CLEAR_COM (self->capture_client);
419 GST_WASAPI2_CLEAR_COM (self->render_client);
420
421 g_mutex_lock (&self->volume_lock);
422 if (self->volume_object)
423 self->volume_object->SetMute (FALSE, nullptr);
424 GST_WASAPI2_CLEAR_COM (self->volume_object);
425 g_mutex_unlock (&self->volume_lock);
426
427 gst_clear_object (&self->client);
428 gst_clear_object (&self->loopback_client);
429
430 return TRUE;
431 }
432
433 static gboolean
gst_wasapi2_ring_buffer_close_device(GstAudioRingBuffer * buf)434 gst_wasapi2_ring_buffer_close_device (GstAudioRingBuffer * buf)
435 {
436 GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
437
438 GST_DEBUG_OBJECT (self, "Close");
439
440 gst_wasapi2_ring_buffer_close_device_internal (buf);
441
442 gst_clear_caps (&self->supported_caps);
443
444 return TRUE;
445 }
446
447 static HRESULT
gst_wasapi2_ring_buffer_read(GstWasapi2RingBuffer * self)448 gst_wasapi2_ring_buffer_read (GstWasapi2RingBuffer * self)
449 {
450 GstAudioRingBuffer *ringbuffer = GST_AUDIO_RING_BUFFER_CAST (self);
451 BYTE *data = nullptr;
452 UINT32 to_read = 0;
453 guint32 to_read_bytes;
454 DWORD flags = 0;
455 HRESULT hr;
456 guint64 position;
457 GstAudioInfo *info = &ringbuffer->spec.info;
458 IAudioCaptureClient *capture_client = self->capture_client;
459 guint gap_size = 0;
460 guint offset = 0;
461 gint segment;
462 guint8 *readptr;
463 gint len;
464
465 if (!capture_client) {
466 GST_ERROR_OBJECT (self, "IAudioCaptureClient is not available");
467 return E_FAIL;
468 }
469
470 hr = capture_client->GetBuffer (&data, &to_read, &flags, &position, nullptr);
471 if (hr == AUDCLNT_S_BUFFER_EMPTY || to_read == 0) {
472 GST_LOG_OBJECT (self, "Empty buffer");
473 to_read = 0;
474 goto out;
475 }
476
477 to_read_bytes = to_read * GST_AUDIO_INFO_BPF (info);
478
479 GST_LOG_OBJECT (self, "Reading %d frames offset at %" G_GUINT64_FORMAT
480 ", expected position %" G_GUINT64_FORMAT, to_read, position,
481 self->expected_position);
482
483 if (self->is_first) {
484 self->expected_position = position + to_read;
485 self->is_first = FALSE;
486 } else {
487 if (position > self->expected_position) {
488 guint gap_frames;
489
490 gap_frames = (guint) (position - self->expected_position);
491 GST_WARNING_OBJECT (self, "Found %u frames gap", gap_frames);
492 gap_size = gap_frames * GST_AUDIO_INFO_BPF (info);
493 }
494
495 self->expected_position = position + to_read;
496 }
497
498 /* Fill gap data if any */
499 while (gap_size > 0) {
500 if (!gst_audio_ring_buffer_prepare_read (ringbuffer,
501 &segment, &readptr, &len)) {
502 GST_INFO_OBJECT (self, "No segment available");
503 goto out;
504 }
505
506 g_assert (self->segoffset >= 0);
507
508 len -= self->segoffset;
509 if (len > gap_size)
510 len = gap_size;
511
512 gst_audio_format_info_fill_silence (ringbuffer->spec.info.finfo,
513 readptr + self->segoffset, len);
514
515 self->segoffset += len;
516 gap_size -= len;
517
518 if (self->segoffset == ringbuffer->spec.segsize) {
519 gst_audio_ring_buffer_advance (ringbuffer, 1);
520 self->segoffset = 0;
521 }
522 }
523
524 while (to_read_bytes) {
525 if (!gst_audio_ring_buffer_prepare_read (ringbuffer,
526 &segment, &readptr, &len)) {
527 GST_INFO_OBJECT (self, "No segment available");
528 goto out;
529 }
530
531 len -= self->segoffset;
532 if (len > to_read_bytes)
533 len = to_read_bytes;
534
535 if ((flags & AUDCLNT_BUFFERFLAGS_SILENT) == AUDCLNT_BUFFERFLAGS_SILENT) {
536 gst_audio_format_info_fill_silence (ringbuffer->spec.info.finfo,
537 readptr + self->segoffset, len);
538 } else {
539 memcpy (readptr + self->segoffset, data + offset, len);
540 }
541
542 self->segoffset += len;
543 offset += len;
544 to_read_bytes -= len;
545
546 if (self->segoffset == ringbuffer->spec.segsize) {
547 gst_audio_ring_buffer_advance (ringbuffer, 1);
548 self->segoffset = 0;
549 }
550 }
551
552 out:
553 hr = capture_client->ReleaseBuffer (to_read);
554 /* For debugging */
555 gst_wasapi2_result (hr);
556
557 return hr;
558 }
559
560 static HRESULT
gst_wasapi2_ring_buffer_write(GstWasapi2RingBuffer * self,gboolean preroll)561 gst_wasapi2_ring_buffer_write (GstWasapi2RingBuffer * self, gboolean preroll)
562 {
563 GstAudioRingBuffer *ringbuffer = GST_AUDIO_RING_BUFFER_CAST (self);
564 HRESULT hr;
565 IAudioClient *client_handle;
566 IAudioRenderClient *render_client;
567 guint32 padding_frames = 0;
568 guint32 can_write;
569 guint32 can_write_bytes;
570 gint segment;
571 guint8 *readptr;
572 gint len;
573 BYTE *data = nullptr;
574
575 client_handle = gst_wasapi2_client_get_handle (self->client);
576 if (!client_handle) {
577 GST_ERROR_OBJECT (self, "IAudioClient is not available");
578 return E_FAIL;
579 }
580
581 render_client = self->render_client;
582 if (!render_client) {
583 GST_ERROR_OBJECT (self, "IAudioRenderClient is not available");
584 return E_FAIL;
585 }
586
587 hr = client_handle->GetCurrentPadding (&padding_frames);
588 if (!gst_wasapi2_result (hr))
589 return hr;
590
591 if (padding_frames >= self->buffer_size) {
592 GST_INFO_OBJECT (self,
593 "Padding size %d is larger than or equal to buffer size %d",
594 padding_frames, self->buffer_size);
595 return S_OK;
596 }
597
598 can_write = self->buffer_size - padding_frames;
599 can_write_bytes = can_write * GST_AUDIO_INFO_BPF (&ringbuffer->spec.info);
600 if (preroll) {
601 GST_INFO_OBJECT (self, "Pre-fill %d frames with silence", can_write);
602
603 hr = render_client->GetBuffer (can_write, &data);
604 if (!gst_wasapi2_result (hr))
605 return hr;
606
607 hr = render_client->ReleaseBuffer (can_write, AUDCLNT_BUFFERFLAGS_SILENT);
608 return gst_wasapi2_result (hr);
609 }
610
611 GST_LOG_OBJECT (self, "Writing %d frames offset at %" G_GUINT64_FORMAT,
612 can_write, self->write_frame_offset);
613 self->write_frame_offset += can_write;
614
615 while (can_write_bytes > 0) {
616 if (!gst_audio_ring_buffer_prepare_read (ringbuffer,
617 &segment, &readptr, &len)) {
618 GST_INFO_OBJECT (self, "No segment available, fill silence");
619
620 /* This would be case where in the middle of PAUSED state change.
621 * Just fill silent buffer to avoid immediate I/O callback after
622 * we return here */
623 hr = render_client->GetBuffer (can_write, &data);
624 if (!gst_wasapi2_result (hr))
625 return hr;
626
627 hr = render_client->ReleaseBuffer (can_write, AUDCLNT_BUFFERFLAGS_SILENT);
628 /* for debugging */
629 gst_wasapi2_result (hr);
630 return hr;
631 }
632
633 len -= self->segoffset;
634
635 if (len > can_write_bytes)
636 len = can_write_bytes;
637
638 can_write = len / GST_AUDIO_INFO_BPF (&ringbuffer->spec.info);
639 if (can_write == 0)
640 break;
641
642 hr = render_client->GetBuffer (can_write, &data);
643 if (!gst_wasapi2_result (hr))
644 return hr;
645
646 memcpy (data, readptr + self->segoffset, len);
647 hr = render_client->ReleaseBuffer (can_write, 0);
648
649 self->segoffset += len;
650 can_write_bytes -= len;
651
652 if (self->segoffset == ringbuffer->spec.segsize) {
653 gst_audio_ring_buffer_clear (ringbuffer, segment);
654 gst_audio_ring_buffer_advance (ringbuffer, 1);
655 self->segoffset = 0;
656 }
657
658 if (!gst_wasapi2_result (hr)) {
659 GST_WARNING_OBJECT (self, "Failed to release buffer");
660 break;
661 }
662 }
663
664 return S_OK;
665 }
666
667 static HRESULT
gst_wasapi2_ring_buffer_io_callback(GstWasapi2RingBuffer * self)668 gst_wasapi2_ring_buffer_io_callback (GstWasapi2RingBuffer * self)
669 {
670 HRESULT hr = E_FAIL;
671
672 g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (self), E_FAIL);
673
674 if (!self->running) {
675 GST_INFO_OBJECT (self, "We are not running now");
676 return S_OK;
677 }
678
679 switch (self->device_class) {
680 case GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE:
681 case GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE:
682 hr = gst_wasapi2_ring_buffer_read (self);
683 break;
684 case GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER:
685 hr = gst_wasapi2_ring_buffer_write (self, FALSE);
686 break;
687 default:
688 g_assert_not_reached ();
689 break;
690 }
691
692 /* We can ignore errors for device unplugged event if client can support
693 * automatic stream routing, but except for loopback capture.
694 * loopback capture client doesn't seem to be able to recover status from this
695 * situation */
696 if (self->can_auto_routing &&
697 self->device_class != GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE &&
698 (hr == AUDCLNT_E_ENDPOINT_CREATE_FAILED
699 || hr == AUDCLNT_E_DEVICE_INVALIDATED)) {
700 GST_WARNING_OBJECT (self,
701 "Device was unplugged but client can support automatic routing");
702 hr = S_OK;
703 }
704
705 if (self->running) {
706 if (gst_wasapi2_result (hr)) {
707 hr = MFPutWaitingWorkItem (self->event_handle, 0, self->callback_result,
708 &self->callback_key);
709
710 if (!gst_wasapi2_result (hr)) {
711 GST_ERROR_OBJECT (self, "Failed to put item");
712 gst_wasapi2_ring_buffer_post_scheduling_error (self);
713
714 return hr;
715 }
716 }
717 } else {
718 GST_INFO_OBJECT (self, "We are not running now");
719 return S_OK;
720 }
721
722 if (FAILED (hr))
723 gst_wasapi2_ring_buffer_post_io_error (self, hr);
724
725 return hr;
726 }
727
728 static HRESULT
gst_wasapi2_ring_buffer_fill_loopback_silence(GstWasapi2RingBuffer * self)729 gst_wasapi2_ring_buffer_fill_loopback_silence (GstWasapi2RingBuffer * self)
730 {
731 HRESULT hr;
732 IAudioClient *client_handle;
733 IAudioRenderClient *render_client;
734 guint32 padding_frames = 0;
735 guint32 can_write;
736 BYTE *data = nullptr;
737
738 client_handle = gst_wasapi2_client_get_handle (self->loopback_client);
739 if (!client_handle) {
740 GST_ERROR_OBJECT (self, "IAudioClient is not available");
741 return E_FAIL;
742 }
743
744 render_client = self->render_client;
745 if (!render_client) {
746 GST_ERROR_OBJECT (self, "IAudioRenderClient is not available");
747 return E_FAIL;
748 }
749
750 hr = client_handle->GetCurrentPadding (&padding_frames);
751 if (!gst_wasapi2_result (hr))
752 return hr;
753
754 if (padding_frames >= self->buffer_size) {
755 GST_INFO_OBJECT (self,
756 "Padding size %d is larger than or equal to buffer size %d",
757 padding_frames, self->buffer_size);
758 return S_OK;
759 }
760
761 can_write = self->buffer_size - padding_frames;
762
763 GST_TRACE_OBJECT (self,
764 "Writing %d silent frames offset at %" G_GUINT64_FORMAT, can_write);
765
766 hr = render_client->GetBuffer (can_write, &data);
767 if (!gst_wasapi2_result (hr))
768 return hr;
769
770 hr = render_client->ReleaseBuffer (can_write, AUDCLNT_BUFFERFLAGS_SILENT);
771 return gst_wasapi2_result (hr);
772 }
773
774 static HRESULT
gst_wasapi2_ring_buffer_loopback_callback(GstWasapi2RingBuffer * self)775 gst_wasapi2_ring_buffer_loopback_callback (GstWasapi2RingBuffer * self)
776 {
777 HRESULT hr = E_FAIL;
778
779 g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (self), E_FAIL);
780 g_return_val_if_fail (self->device_class ==
781 GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE, E_FAIL);
782
783 if (!self->running) {
784 GST_INFO_OBJECT (self, "We are not running now");
785 return S_OK;
786 }
787
788 hr = gst_wasapi2_ring_buffer_fill_loopback_silence (self);
789
790 if (self->running) {
791 if (gst_wasapi2_result (hr)) {
792 hr = MFPutWaitingWorkItem (self->loopback_event_handle, 0,
793 self->loopback_callback_result, &self->loopback_callback_key);
794
795 if (!gst_wasapi2_result (hr)) {
796 GST_ERROR_OBJECT (self, "Failed to put item");
797 gst_wasapi2_ring_buffer_post_scheduling_error (self);
798
799 return hr;
800 }
801 }
802 } else {
803 GST_INFO_OBJECT (self, "We are not running now");
804 return S_OK;
805 }
806
807 if (FAILED (hr))
808 gst_wasapi2_ring_buffer_post_io_error (self, hr);
809
810 return hr;
811 }
812
813 static HRESULT
gst_wasapi2_ring_buffer_initialize_audio_client3(GstWasapi2RingBuffer * self,IAudioClient * client_handle,WAVEFORMATEX * mix_format,guint * period)814 gst_wasapi2_ring_buffer_initialize_audio_client3 (GstWasapi2RingBuffer * self,
815 IAudioClient * client_handle, WAVEFORMATEX * mix_format, guint * period)
816 {
817 HRESULT hr = S_OK;
818 UINT32 default_period, fundamental_period, min_period, max_period;
819 /* AUDCLNT_STREAMFLAGS_NOPERSIST is not allowed for
820 * InitializeSharedAudioStream */
821 DWORD stream_flags = AUDCLNT_STREAMFLAGS_EVENTCALLBACK;
822 ComPtr < IAudioClient3 > audio_client;
823
824 hr = client_handle->QueryInterface (IID_PPV_ARGS (&audio_client));
825 if (!gst_wasapi2_result (hr)) {
826 GST_INFO_OBJECT (self, "IAudioClient3 interface is unavailable");
827 return hr;
828 }
829
830 hr = audio_client->GetSharedModeEnginePeriod (mix_format,
831 &default_period, &fundamental_period, &min_period, &max_period);
832 if (!gst_wasapi2_result (hr)) {
833 GST_INFO_OBJECT (self, "Couldn't get period");
834 return hr;
835 }
836
837 GST_INFO_OBJECT (self, "Using IAudioClient3, default period %d frames, "
838 "fundamental period %d frames, minimum period %d frames, maximum period "
839 "%d frames", default_period, fundamental_period, min_period, max_period);
840
841 *period = min_period;
842
843 hr = audio_client->InitializeSharedAudioStream (stream_flags, min_period,
844 mix_format, nullptr);
845
846 if (!gst_wasapi2_result (hr))
847 GST_WARNING_OBJECT (self, "Failed to initialize IAudioClient3");
848
849 return hr;
850 }
851
852 static HRESULT
gst_wasapi2_ring_buffer_initialize_audio_client(GstWasapi2RingBuffer * self,IAudioClient * client_handle,WAVEFORMATEX * mix_format,guint * period,DWORD extra_flags)853 gst_wasapi2_ring_buffer_initialize_audio_client (GstWasapi2RingBuffer * self,
854 IAudioClient * client_handle, WAVEFORMATEX * mix_format, guint * period,
855 DWORD extra_flags)
856 {
857 GstAudioRingBuffer *ringbuffer = GST_AUDIO_RING_BUFFER_CAST (self);
858 REFERENCE_TIME default_period, min_period;
859 DWORD stream_flags =
860 AUDCLNT_STREAMFLAGS_EVENTCALLBACK | AUDCLNT_STREAMFLAGS_NOPERSIST;
861 HRESULT hr;
862
863 stream_flags |= extra_flags;
864
865 hr = client_handle->GetDevicePeriod (&default_period, &min_period);
866 if (!gst_wasapi2_result (hr)) {
867 GST_WARNING_OBJECT (self, "Couldn't get device period info");
868 return hr;
869 }
870
871 GST_INFO_OBJECT (self, "wasapi2 default period: %" G_GINT64_FORMAT
872 ", min period: %" G_GINT64_FORMAT, default_period, min_period);
873
874 hr = client_handle->Initialize (AUDCLNT_SHAREMODE_SHARED, stream_flags,
875 /* hnsBufferDuration should be same as hnsPeriodicity
876 * when AUDCLNT_STREAMFLAGS_EVENTCALLBACK is used.
877 * And in case of shared mode, hnsPeriodicity should be zero, so
878 * this value should be zero as well */
879 0,
880 /* This must always be 0 in shared mode */
881 0, mix_format, nullptr);
882
883 if (!gst_wasapi2_result (hr)) {
884 GST_WARNING_OBJECT (self, "Couldn't initialize audioclient");
885 return hr;
886 }
887
888 *period = gst_util_uint64_scale_round (default_period * 100,
889 GST_AUDIO_INFO_RATE (&ringbuffer->spec.info), GST_SECOND);
890
891 return S_OK;
892 }
893
894 static gboolean
gst_wasapi2_ring_buffer_prepare_loopback_client(GstWasapi2RingBuffer * self)895 gst_wasapi2_ring_buffer_prepare_loopback_client (GstWasapi2RingBuffer * self)
896 {
897 IAudioClient *client_handle;
898 HRESULT hr;
899 WAVEFORMATEX *mix_format = nullptr;
900 guint period = 0;
901 ComPtr < IAudioRenderClient > render_client;
902
903 if (!self->loopback_client) {
904 GST_ERROR_OBJECT (self, "No configured client object");
905 return FALSE;
906 }
907
908 if (!gst_wasapi2_client_ensure_activation (self->loopback_client)) {
909 GST_ERROR_OBJECT (self, "Failed to activate audio client");
910 return FALSE;
911 }
912
913 client_handle = gst_wasapi2_client_get_handle (self->loopback_client);
914 if (!client_handle) {
915 GST_ERROR_OBJECT (self, "IAudioClient handle is not available");
916 return FALSE;
917 }
918
919 hr = client_handle->GetMixFormat (&mix_format);
920 if (!gst_wasapi2_result (hr)) {
921 GST_ERROR_OBJECT (self, "Failed to get mix format");
922 return FALSE;
923 }
924
925 hr = gst_wasapi2_ring_buffer_initialize_audio_client (self, client_handle,
926 mix_format, &period, 0);
927
928 if (!gst_wasapi2_result (hr)) {
929 GST_ERROR_OBJECT (self, "Failed to initialize audio client");
930 return FALSE;
931 }
932
933 hr = client_handle->SetEventHandle (self->loopback_event_handle);
934 if (!gst_wasapi2_result (hr)) {
935 GST_ERROR_OBJECT (self, "Failed to set event handle");
936 return FALSE;
937 }
938
939 hr = client_handle->GetBufferSize (&self->loopback_buffer_size);
940 if (!gst_wasapi2_result (hr)) {
941 GST_ERROR_OBJECT (self, "Failed to query buffer size");
942 return FALSE;
943 }
944
945 hr = client_handle->GetService (IID_PPV_ARGS (&render_client));
946 if (!gst_wasapi2_result (hr)) {
947 GST_ERROR_OBJECT (self, "IAudioRenderClient is unavailable");
948 return FALSE;
949 }
950
951 self->render_client = render_client.Detach ();
952
953 return TRUE;
954 }
955
956 static gboolean
gst_wasapi2_ring_buffer_acquire(GstAudioRingBuffer * buf,GstAudioRingBufferSpec * spec)957 gst_wasapi2_ring_buffer_acquire (GstAudioRingBuffer * buf,
958 GstAudioRingBufferSpec * spec)
959 {
960 GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
961 IAudioClient *client_handle;
962 HRESULT hr;
963 WAVEFORMATEX *mix_format = nullptr;
964 ComPtr < ISimpleAudioVolume > audio_volume;
965 GstAudioChannelPosition *position = nullptr;
966 guint period = 0;
967
968 GST_DEBUG_OBJECT (buf, "Acquire");
969
970 if (!self->client && !gst_wasapi2_ring_buffer_open_device (buf))
971 return FALSE;
972
973 if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE) {
974 if (!gst_wasapi2_ring_buffer_prepare_loopback_client (self)) {
975 GST_ERROR_OBJECT (self, "Failed to prepare loopback client");
976 goto error;
977 }
978 }
979
980 if (!gst_wasapi2_client_ensure_activation (self->client)) {
981 GST_ERROR_OBJECT (self, "Failed to activate audio client");
982 goto error;
983 }
984
985 client_handle = gst_wasapi2_client_get_handle (self->client);
986 if (!client_handle) {
987 GST_ERROR_OBJECT (self, "IAudioClient handle is not available");
988 goto error;
989 }
990
991 /* TODO: convert given caps to mix format */
992 hr = client_handle->GetMixFormat (&mix_format);
993 if (!gst_wasapi2_result (hr)) {
994 GST_ERROR_OBJECT (self, "Failed to get mix format");
995 goto error;
996 }
997
998 /* Only use audioclient3 when low-latency is requested because otherwise
999 * very slow machines and VMs with 1 CPU allocated will get glitches:
1000 * https://bugzilla.gnome.org/show_bug.cgi?id=794497 */
1001 hr = E_FAIL;
1002 if (self->low_latency &&
1003 /* AUDCLNT_STREAMFLAGS_LOOPBACK is not allowed for
1004 * InitializeSharedAudioStream */
1005 self->device_class != GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE) {
1006 hr = gst_wasapi2_ring_buffer_initialize_audio_client3 (self, client_handle,
1007 mix_format, &period);
1008 }
1009
1010 /* Try again if IAudioClinet3 API is unavailable.
1011 * NOTE: IAudioClinet3:: methods might not be available for default device
1012 * NOTE: The default device is a special device which is needed for supporting
1013 * automatic stream routing
1014 * https://docs.microsoft.com/en-us/windows/win32/coreaudio/automatic-stream-routing
1015 */
1016 if (FAILED (hr)) {
1017 DWORD extra_flags = 0;
1018 if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE)
1019 extra_flags = AUDCLNT_STREAMFLAGS_LOOPBACK;
1020
1021 hr = gst_wasapi2_ring_buffer_initialize_audio_client (self, client_handle,
1022 mix_format, &period, extra_flags);
1023 }
1024
1025 if (!gst_wasapi2_result (hr)) {
1026 GST_ERROR_OBJECT (self, "Failed to initialize audio client");
1027 goto error;
1028 }
1029
1030 hr = client_handle->SetEventHandle (self->event_handle);
1031 if (!gst_wasapi2_result (hr)) {
1032 GST_ERROR_OBJECT (self, "Failed to set event handle");
1033 goto error;
1034 }
1035
1036 gst_wasapi2_util_waveformatex_to_channel_mask (mix_format, &position);
1037 if (position)
1038 gst_audio_ring_buffer_set_channel_positions (buf, position);
1039 g_free (position);
1040
1041 CoTaskMemFree (mix_format);
1042
1043 if (!gst_wasapi2_result (hr)) {
1044 GST_ERROR_OBJECT (self, "Failed to init audio client");
1045 goto error;
1046 }
1047
1048 hr = client_handle->GetBufferSize (&self->buffer_size);
1049 if (!gst_wasapi2_result (hr)) {
1050 GST_ERROR_OBJECT (self, "Failed to query buffer size");
1051 goto error;
1052 }
1053
1054 g_assert (period > 0);
1055
1056 if (self->buffer_size > period) {
1057 GST_INFO_OBJECT (self, "Updating buffer size %d -> %d", self->buffer_size,
1058 period);
1059 self->buffer_size = period;
1060 }
1061
1062 spec->segsize = period * GST_AUDIO_INFO_BPF (&buf->spec.info);
1063 spec->segtotal = 2;
1064
1065 GST_INFO_OBJECT (self,
1066 "Buffer size: %d frames, period: %d frames, segsize: %d bytes",
1067 self->buffer_size, period, spec->segsize);
1068
1069 if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
1070 ComPtr < IAudioRenderClient > render_client;
1071
1072 hr = client_handle->GetService (IID_PPV_ARGS (&render_client));
1073 if (!gst_wasapi2_result (hr)) {
1074 GST_ERROR_OBJECT (self, "IAudioRenderClient is unavailable");
1075 goto error;
1076 }
1077
1078 self->render_client = render_client.Detach ();
1079 } else {
1080 ComPtr < IAudioCaptureClient > capture_client;
1081
1082 hr = client_handle->GetService (IID_PPV_ARGS (&capture_client));
1083 if (!gst_wasapi2_result (hr)) {
1084 GST_ERROR_OBJECT (self, "IAudioCaptureClient is unavailable");
1085 goto error;
1086 }
1087
1088 self->capture_client = capture_client.Detach ();
1089 }
1090
1091 hr = client_handle->GetService (IID_PPV_ARGS (&audio_volume));
1092 if (!gst_wasapi2_result (hr)) {
1093 GST_ERROR_OBJECT (self, "ISimpleAudioVolume is unavailable");
1094 goto error;
1095 }
1096
1097 g_mutex_lock (&self->volume_lock);
1098 self->volume_object = audio_volume.Detach ();
1099
1100 if (self->mute_changed) {
1101 self->volume_object->SetMute (self->mute, nullptr);
1102 self->mute_changed = FALSE;
1103 } else {
1104 self->volume_object->SetMute (FALSE, nullptr);
1105 }
1106
1107 if (self->volume_changed) {
1108 self->volume_object->SetMasterVolume (self->volume, nullptr);
1109 self->volume_changed = FALSE;
1110 }
1111 g_mutex_unlock (&self->volume_lock);
1112
1113 buf->size = spec->segtotal * spec->segsize;
1114 buf->memory = (guint8 *) g_malloc (buf->size);
1115 gst_audio_format_info_fill_silence (buf->spec.info.finfo,
1116 buf->memory, buf->size);
1117
1118 return TRUE;
1119
1120 error:
1121 GST_WASAPI2_CLEAR_COM (self->render_client);
1122 GST_WASAPI2_CLEAR_COM (self->capture_client);
1123 GST_WASAPI2_CLEAR_COM (self->volume_object);
1124
1125 gst_wasapi2_ring_buffer_post_open_error (self);
1126
1127 return FALSE;
1128 }
1129
1130 static gboolean
gst_wasapi2_ring_buffer_release(GstAudioRingBuffer * buf)1131 gst_wasapi2_ring_buffer_release (GstAudioRingBuffer * buf)
1132 {
1133 GST_DEBUG_OBJECT (buf, "Release");
1134
1135 g_clear_pointer (&buf->memory, g_free);
1136
1137 /* IAudioClient handle is not reusable once it's initialized */
1138 gst_wasapi2_ring_buffer_close_device_internal (buf);
1139
1140 return TRUE;
1141 }
1142
1143 static gboolean
gst_wasapi2_ring_buffer_start_internal(GstWasapi2RingBuffer * self)1144 gst_wasapi2_ring_buffer_start_internal (GstWasapi2RingBuffer * self)
1145 {
1146 IAudioClient *client_handle;
1147 HRESULT hr;
1148
1149 if (self->running) {
1150 GST_INFO_OBJECT (self, "We are running already");
1151 return TRUE;
1152 }
1153
1154 client_handle = gst_wasapi2_client_get_handle (self->client);
1155 self->is_first = TRUE;
1156 self->running = TRUE;
1157 self->segoffset = 0;
1158 self->write_frame_offset = 0;
1159
1160 switch (self->device_class) {
1161 case GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER:
1162 /* render client might read data from buffer immediately once it's prepared.
1163 * Pre-fill with silence in order to start-up glitch */
1164 hr = gst_wasapi2_ring_buffer_write (self, TRUE);
1165 if (!gst_wasapi2_result (hr)) {
1166 GST_ERROR_OBJECT (self, "Failed to pre-fill buffer with silence");
1167 goto error;
1168 }
1169 break;
1170 case GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE:
1171 {
1172 IAudioClient *loopback_client_handle;
1173
1174 /* Start silence feed client first */
1175 loopback_client_handle =
1176 gst_wasapi2_client_get_handle (self->loopback_client);
1177
1178 hr = loopback_client_handle->Start ();
1179 if (!gst_wasapi2_result (hr)) {
1180 GST_ERROR_OBJECT (self, "Failed to start loopback client");
1181 self->running = FALSE;
1182 goto error;
1183 }
1184
1185 hr = MFPutWaitingWorkItem (self->loopback_event_handle,
1186 0, self->loopback_callback_result, &self->loopback_callback_key);
1187 if (!gst_wasapi2_result (hr)) {
1188 GST_ERROR_OBJECT (self, "Failed to put waiting item");
1189 loopback_client_handle->Stop ();
1190 self->running = FALSE;
1191 goto error;
1192 }
1193 break;
1194 }
1195 default:
1196 break;
1197 }
1198
1199 hr = client_handle->Start ();
1200 if (!gst_wasapi2_result (hr)) {
1201 GST_ERROR_OBJECT (self, "Failed to start client");
1202 self->running = FALSE;
1203 goto error;
1204 }
1205
1206 hr = MFPutWaitingWorkItem (self->event_handle, 0, self->callback_result,
1207 &self->callback_key);
1208 if (!gst_wasapi2_result (hr)) {
1209 GST_ERROR_OBJECT (self, "Failed to put waiting item");
1210 client_handle->Stop ();
1211 self->running = FALSE;
1212 goto error;
1213 }
1214
1215 return TRUE;
1216
1217 error:
1218 gst_wasapi2_ring_buffer_post_open_error (self);
1219 return FALSE;
1220 }
1221
1222 static gboolean
gst_wasapi2_ring_buffer_start(GstAudioRingBuffer * buf)1223 gst_wasapi2_ring_buffer_start (GstAudioRingBuffer * buf)
1224 {
1225 GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
1226
1227 GST_DEBUG_OBJECT (self, "Start");
1228
1229 return gst_wasapi2_ring_buffer_start_internal (self);
1230 }
1231
1232 static gboolean
gst_wasapi2_ring_buffer_resume(GstAudioRingBuffer * buf)1233 gst_wasapi2_ring_buffer_resume (GstAudioRingBuffer * buf)
1234 {
1235 GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
1236
1237 GST_DEBUG_OBJECT (self, "Resume");
1238
1239 return gst_wasapi2_ring_buffer_start_internal (self);
1240 }
1241
1242 static gboolean
gst_wasapi2_ring_buffer_stop_internal(GstWasapi2RingBuffer * self)1243 gst_wasapi2_ring_buffer_stop_internal (GstWasapi2RingBuffer * self)
1244 {
1245 IAudioClient *client_handle;
1246 HRESULT hr;
1247
1248 if (!self->client) {
1249 GST_DEBUG_OBJECT (self, "No configured client");
1250 return TRUE;
1251 }
1252
1253 if (!self->running) {
1254 GST_DEBUG_OBJECT (self, "We are not running");
1255 return TRUE;
1256 }
1257
1258 client_handle = gst_wasapi2_client_get_handle (self->client);
1259
1260 self->running = FALSE;
1261 MFCancelWorkItem (self->callback_key);
1262
1263 hr = client_handle->Stop ();
1264 gst_wasapi2_result (hr);
1265
1266 /* Call reset for later reuse case */
1267 hr = client_handle->Reset ();
1268 self->expected_position = 0;
1269 self->write_frame_offset = 0;
1270
1271 if (self->loopback_client) {
1272 client_handle = gst_wasapi2_client_get_handle (self->loopback_client);
1273
1274 MFCancelWorkItem (self->loopback_callback_key);
1275
1276 hr = client_handle->Stop ();
1277 gst_wasapi2_result (hr);
1278
1279 client_handle->Reset ();
1280 }
1281
1282 return TRUE;
1283 }
1284
1285 static gboolean
gst_wasapi2_ring_buffer_stop(GstAudioRingBuffer * buf)1286 gst_wasapi2_ring_buffer_stop (GstAudioRingBuffer * buf)
1287 {
1288 GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
1289
1290 GST_DEBUG_OBJECT (buf, "Stop");
1291
1292 return gst_wasapi2_ring_buffer_stop_internal (self);
1293 }
1294
1295 static gboolean
gst_wasapi2_ring_buffer_pause(GstAudioRingBuffer * buf)1296 gst_wasapi2_ring_buffer_pause (GstAudioRingBuffer * buf)
1297 {
1298 GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
1299
1300 GST_DEBUG_OBJECT (buf, "Pause");
1301
1302 return gst_wasapi2_ring_buffer_stop_internal (self);
1303 }
1304
1305 static guint
gst_wasapi2_ring_buffer_delay(GstAudioRingBuffer * buf)1306 gst_wasapi2_ring_buffer_delay (GstAudioRingBuffer * buf)
1307 {
1308 /* NOTE: WASAPI supports GetCurrentPadding() method for querying
1309 * currently unread buffer size, but it doesn't seem to be quite useful
1310 * here because:
1311 *
1312 * In case of capture client, GetCurrentPadding() will return the number of
1313 * unread frames which will be identical to pNumFramesToRead value of
1314 * IAudioCaptureClient::GetBuffer()'s return. Since we are running on
1315 * event-driven mode and whenever available, WASAPI will notify signal
1316 * so it's likely zero at this moment. And there is a chance to
1317 * return incorrect value here because our IO callback happens from
1318 * other thread.
1319 *
1320 * And render client's padding size will return the total size of buffer
1321 * which is likely larger than twice of our period. Which doesn't represent
1322 * the amount queued frame size in device correctly
1323 */
1324 return 0;
1325 }
1326
1327 GstAudioRingBuffer *
gst_wasapi2_ring_buffer_new(GstWasapi2ClientDeviceClass device_class,gboolean low_latency,const gchar * device_id,gpointer dispatcher,const gchar * name)1328 gst_wasapi2_ring_buffer_new (GstWasapi2ClientDeviceClass device_class,
1329 gboolean low_latency, const gchar * device_id, gpointer dispatcher,
1330 const gchar * name)
1331 {
1332 GstWasapi2RingBuffer *self;
1333
1334 self = (GstWasapi2RingBuffer *)
1335 g_object_new (GST_TYPE_WASAPI2_RING_BUFFER, "name", name, nullptr);
1336
1337 if (!self->callback_object) {
1338 gst_object_unref (self);
1339 return nullptr;
1340 }
1341
1342 self->device_class = device_class;
1343 self->low_latency = low_latency;
1344 self->device_id = g_strdup (device_id);
1345 self->dispatcher = dispatcher;
1346
1347 return GST_AUDIO_RING_BUFFER_CAST (self);
1348 }
1349
1350 GstCaps *
gst_wasapi2_ring_buffer_get_caps(GstWasapi2RingBuffer * buf)1351 gst_wasapi2_ring_buffer_get_caps (GstWasapi2RingBuffer * buf)
1352 {
1353 g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (buf), nullptr);
1354
1355 if (buf->supported_caps)
1356 return gst_caps_ref (buf->supported_caps);
1357
1358 if (!buf->client)
1359 return nullptr;
1360
1361 if (!gst_wasapi2_client_ensure_activation (buf->client)) {
1362 GST_ERROR_OBJECT (buf, "Failed to activate audio client");
1363 return nullptr;
1364 }
1365
1366 buf->supported_caps = gst_wasapi2_client_get_caps (buf->client);
1367 if (buf->supported_caps)
1368 return gst_caps_ref (buf->supported_caps);
1369
1370 return nullptr;
1371 }
1372
1373 HRESULT
gst_wasapi2_ring_buffer_set_mute(GstWasapi2RingBuffer * buf,gboolean mute)1374 gst_wasapi2_ring_buffer_set_mute (GstWasapi2RingBuffer * buf, gboolean mute)
1375 {
1376 HRESULT hr = S_OK;
1377 g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (buf), E_INVALIDARG);
1378
1379 g_mutex_lock (&buf->volume_lock);
1380 buf->mute = mute;
1381 if (buf->volume_object)
1382 hr = buf->volume_object->SetMute (mute, nullptr);
1383 else
1384 buf->volume_changed = TRUE;
1385 g_mutex_unlock (&buf->volume_lock);
1386
1387 return S_OK;
1388 }
1389
1390 HRESULT
gst_wasapi2_ring_buffer_get_mute(GstWasapi2RingBuffer * buf,gboolean * mute)1391 gst_wasapi2_ring_buffer_get_mute (GstWasapi2RingBuffer * buf, gboolean * mute)
1392 {
1393 BOOL mute_val;
1394 HRESULT hr = S_OK;
1395
1396 g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (buf), E_INVALIDARG);
1397 g_return_val_if_fail (mute != nullptr, E_INVALIDARG);
1398
1399 mute_val = buf->mute;
1400
1401 g_mutex_lock (&buf->volume_lock);
1402 if (buf->volume_object)
1403 hr = buf->volume_object->GetMute (&mute_val);
1404 g_mutex_unlock (&buf->volume_lock);
1405
1406 *mute = mute_val ? TRUE : FALSE;
1407
1408 return hr;
1409 }
1410
1411 HRESULT
gst_wasapi2_ring_buffer_set_volume(GstWasapi2RingBuffer * buf,gfloat volume)1412 gst_wasapi2_ring_buffer_set_volume (GstWasapi2RingBuffer * buf, gfloat volume)
1413 {
1414 HRESULT hr = S_OK;
1415
1416 g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (buf), E_INVALIDARG);
1417 g_return_val_if_fail (volume >= 0 && volume <= 1.0, E_INVALIDARG);
1418
1419 g_mutex_lock (&buf->volume_lock);
1420 buf->volume = volume;
1421 if (buf->volume_object)
1422 hr = buf->volume_object->SetMasterVolume (volume, nullptr);
1423 else
1424 buf->mute_changed = TRUE;
1425 g_mutex_unlock (&buf->volume_lock);
1426
1427 return hr;
1428 }
1429
1430 HRESULT
gst_wasapi2_ring_buffer_get_volume(GstWasapi2RingBuffer * buf,gfloat * volume)1431 gst_wasapi2_ring_buffer_get_volume (GstWasapi2RingBuffer * buf, gfloat * volume)
1432 {
1433 gfloat volume_val;
1434 HRESULT hr = S_OK;
1435
1436 g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (buf), E_INVALIDARG);
1437 g_return_val_if_fail (volume != nullptr, E_INVALIDARG);
1438
1439 g_mutex_lock (&buf->volume_lock);
1440 volume_val = buf->volume;
1441 if (buf->volume_object)
1442 hr = buf->volume_object->GetMasterVolume (&volume_val);
1443 g_mutex_unlock (&buf->volume_lock);
1444
1445 *volume = volume_val;
1446
1447 return hr;
1448 }
1449