2 * Copyright (C) 2008 Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>
3 * Copyright (C) 2013 Collabora Ltd.
4 * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
5 * Copyright (C) 2018 Centricular Ltd.
6 * Author: Nirbheek Chauhan <nirbheek@centricular.com>
7 * Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Library General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
19 * You should have received a copy of the GNU Library General Public
20 * License along with this library; if not, write to the
21 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
22 * Boston, MA 02110-1301, USA.
29 #include "AsyncOperations.h"
30 #include "gstwasapi2client.h"
31 #include "gstwasapi2util.h"
33 #include <windows.foundation.h>
34 #include <windows.ui.core.h>
36 #include <wrl/wrappers/corewrappers.h>
37 #include <audioclient.h>
38 #include <mmdeviceapi.h>
44 using namespace ABI::Windows::ApplicationModel::Core;
45 using namespace ABI::Windows::Foundation;
46 using namespace ABI::Windows::Foundation::Collections;
47 using namespace ABI::Windows::UI::Core;
48 using namespace ABI::Windows::Media::Devices;
49 using namespace ABI::Windows::Devices::Enumeration;
51 using namespace Microsoft::WRL;
52 using namespace Microsoft::WRL::Wrappers;
56 GST_DEBUG_CATEGORY_EXTERN (gst_wasapi2_client_debug);
57 #define GST_CAT_DEFAULT gst_wasapi2_client_debug
62 gst_wasapi2_client_on_device_activated (GstWasapi2Client * client,
63 IAudioClient3 * audio_client);
65 class GstWasapiDeviceActivator
66 : public RuntimeClass<RuntimeClassFlags<ClassicCom>, FtmBase,
67 IActivateAudioInterfaceCompletionHandler>
70 GstWasapiDeviceActivator ()
72 g_weak_ref_init (&listener_, nullptr);
75 ~GstWasapiDeviceActivator ()
77 g_weak_ref_set (&listener_, nullptr);
81 RuntimeClassInitialize (GstWasapi2Client * listener, gpointer dispatcher)
86 g_weak_ref_set (&listener_, listener);
89 ComPtr<IInspectable> inspectable =
90 reinterpret_cast<IInspectable*> (dispatcher);
93 hr = inspectable.As (&dispatcher_);
94 if (gst_wasapi2_result (hr))
95 GST_INFO("Main UI dispatcher is available");
101 STDMETHOD(ActivateCompleted)
102 (IActivateAudioInterfaceAsyncOperation *async_op)
104 ComPtr<IAudioClient3> audio_client;
106 HRESULT hr_async_op = S_OK;
107 ComPtr<IUnknown> audio_interface;
108 GstWasapi2Client *client;
110 client = (GstWasapi2Client *) g_weak_ref_get (&listener_);
114 GST_WARNING ("No listener was configured");
118 GST_INFO_OBJECT (client, "AsyncOperation done");
120 hr = async_op->GetActivateResult(&hr_async_op, &audio_interface);
122 if (!gst_wasapi2_result (hr)) {
123 GST_WARNING_OBJECT (client, "Failed to get activate result, hr: 0x%x", hr);
127 if (!gst_wasapi2_result (hr_async_op)) {
128 GST_WARNING_OBJECT (client, "Failed to activate device");
132 hr = audio_interface.As (&audio_client);
133 if (!gst_wasapi2_result (hr)) {
134 GST_ERROR_OBJECT (client, "Failed to get IAudioClient3 interface");
139 /* Should call this method anyway, listener will wait this event */
140 gst_wasapi2_client_on_device_activated (client, audio_client.Get());
141 gst_object_unref (client);
142 /* return S_OK anyway, but listener can know it's succeeded or not
143 * by passed IAudioClient handle via gst_wasapi2_client_on_device_activated
152 ActivateDeviceAsync(const std::wstring &device_id)
154 ComPtr<IAsyncAction> async_action;
155 bool run_async = false;
158 auto work_item = Callback<Implements<RuntimeClassFlags<ClassicCom>,
159 IDispatchedHandler, FtmBase>>([this, device_id]{
160 ComPtr<IActivateAudioInterfaceAsyncOperation> async_op;
161 HRESULT async_hr = S_OK;
163 async_hr = ActivateAudioInterfaceAsync (device_id.c_str (),
164 __uuidof(IAudioClient3), nullptr, this, &async_op);
167 gst_wasapi2_result (async_hr);
174 hr = dispatcher_->get_HasThreadAccess (&can_now);
176 if (!gst_wasapi2_result (hr))
183 if (run_async && dispatcher_) {
184 hr = dispatcher_->RunAsync (CoreDispatcherPriority_Normal,
185 work_item.Get (), &async_action);
187 hr = work_item->Invoke ();
190 /* We should hold activator object until activation callback has executed,
191 * because OS doesn't hold reference of this callback COM object.
192 * otherwise access violation would happen
193 * See https://docs.microsoft.com/en-us/windows/win32/api/mmdeviceapi/nf-mmdeviceapi-activateaudiointerfaceasync
195 * This reference count will be decreased by self later on callback,
196 * which will be called from device worker thread.
198 if (gst_wasapi2_result (hr))
206 ComPtr<ICoreDispatcher> dispatcher_;
211 GST_WASAPI2_CLIENT_ACTIVATE_FAILED = -1,
212 GST_WASAPI2_CLIENT_ACTIVATE_INIT = 0,
213 GST_WASAPI2_CLIENT_ACTIVATE_WAIT,
214 GST_WASAPI2_CLIENT_ACTIVATE_DONE,
215 } GstWasapi2ClientActivateState;
228 #define DEFAULT_DEVICE_INDEX -1
229 #define DEFAULT_DEVICE_CLASS GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE
230 #define DEFAULT_LOW_LATENCY FALSE
232 struct _GstWasapi2Client
236 GstWasapi2ClientDeviceClass device_class;
237 gboolean low_latency;
243 IAudioClient3 *audio_client;
244 IAudioCaptureClient *audio_capture_client;
245 IAudioRenderClient *audio_render_client;
246 ISimpleAudioVolume *audio_volume;
247 GstWasapiDeviceActivator *activator;
249 WAVEFORMATEX *mix_format;
250 GstCaps *supported_caps;
257 guint32 device_period;
258 guint32 buffer_frame_count;
260 GstAudioChannelPosition *positions;
262 /* Used for capture mode */
268 GMainContext *context;
271 /* To wait ActivateCompleted event */
274 GstWasapi2ClientActivateState activate_state;
278 gst_wasapi2_client_device_class_get_type (void)
280 static volatile GType class_type = 0;
281 static const GEnumValue types[] = {
282 {GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE, "Capture", "capture"},
283 {GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER, "Render", "render"},
287 if (g_once_init_enter (&class_type)) {
288 GType gtype = g_enum_register_static ("GstWasapi2ClientDeviceClass", types);
289 g_once_init_leave (&class_type, gtype);
295 static void gst_wasapi2_client_constructed (GObject * object);
296 static void gst_wasapi2_client_dispose (GObject * object);
297 static void gst_wasapi2_client_finalize (GObject * object);
298 static void gst_wasapi2_client_get_property (GObject * object, guint prop_id,
299 GValue * value, GParamSpec * pspec);
300 static void gst_wasapi2_client_set_property (GObject * object, guint prop_id,
301 const GValue * value, GParamSpec * pspec);
303 static gpointer gst_wasapi2_client_thread_func (GstWasapi2Client * self);
305 gst_wasapi2_client_main_loop_running_cb (GstWasapi2Client * self);
307 #define gst_wasapi2_client_parent_class parent_class
308 G_DEFINE_TYPE (GstWasapi2Client,
309 gst_wasapi2_client, GST_TYPE_OBJECT);
312 gst_wasapi2_client_class_init (GstWasapi2ClientClass * klass)
314 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
315 GParamFlags param_flags =
316 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY |
317 G_PARAM_STATIC_STRINGS);
319 gobject_class->constructed = gst_wasapi2_client_constructed;
320 gobject_class->dispose = gst_wasapi2_client_dispose;
321 gobject_class->finalize = gst_wasapi2_client_finalize;
322 gobject_class->get_property = gst_wasapi2_client_get_property;
323 gobject_class->set_property = gst_wasapi2_client_set_property;
325 g_object_class_install_property (gobject_class, PROP_DEVICE,
326 g_param_spec_string ("device", "Device",
327 "WASAPI playback device as a GUID string", NULL, param_flags));
328 g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
329 g_param_spec_string ("device-name", "Device Name",
330 "The human-readable device name", NULL, param_flags));
331 g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
332 g_param_spec_int ("device-index", "Device Index",
333 "The zero-based device index", -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
335 g_object_class_install_property (gobject_class, PROP_DEVICE_CLASS,
336 g_param_spec_enum ("device-class", "Device Class",
337 "Device class", GST_TYPE_WASAPI2_CLIENT_DEVICE_CLASS,
338 DEFAULT_DEVICE_CLASS, param_flags));
339 g_object_class_install_property (gobject_class, PROP_LOW_LATENCY,
340 g_param_spec_boolean ("low-latency", "Low latency",
341 "Optimize all settings for lowest latency. Always safe to enable.",
342 DEFAULT_LOW_LATENCY, param_flags));
343 g_object_class_install_property (gobject_class, PROP_DISPATCHER,
344 g_param_spec_pointer ("dispatcher", "Dispatcher",
345 "ICoreDispatcher COM object to use", param_flags));
349 gst_wasapi2_client_init (GstWasapi2Client * self)
351 self->device_index = DEFAULT_DEVICE_INDEX;
352 self->device_class = DEFAULT_DEVICE_CLASS;
353 self->low_latency = DEFAULT_LOW_LATENCY;
355 self->adapter = gst_adapter_new ();
356 self->event_handle = CreateEvent (NULL, FALSE, FALSE, NULL);
357 self->cancellable = CreateEvent (NULL, TRUE, FALSE, NULL);
359 g_mutex_init (&self->lock);
360 g_cond_init (&self->cond);
362 g_mutex_init (&self->init_lock);
363 g_cond_init (&self->init_cond);
364 self->activate_state = GST_WASAPI2_CLIENT_ACTIVATE_INIT;
366 self->context = g_main_context_new ();
367 self->loop = g_main_loop_new (self->context, FALSE);
371 gst_wasapi2_client_constructed (GObject * object)
373 GstWasapi2Client *self = GST_WASAPI2_CLIENT (object);
374 ComPtr<GstWasapiDeviceActivator> activator;
376 /* Create a new thread to ensure that COM thread can be MTA thread.
377 * We cannot ensure whether CoInitializeEx() was called outside of here for
378 * this thread or not. If it was called with non-COINIT_MULTITHREADED option,
379 * we cannot update it */
380 g_mutex_lock (&self->lock);
381 self->thread = g_thread_new ("GstWasapi2ClientWinRT",
382 (GThreadFunc) gst_wasapi2_client_thread_func, self);
383 while (!self->loop || !g_main_loop_is_running (self->loop))
384 g_cond_wait (&self->cond, &self->lock);
385 g_mutex_unlock (&self->lock);
387 G_OBJECT_CLASS (parent_class)->constructed (object);
391 gst_wasapi2_client_dispose (GObject * object)
393 GstWasapi2Client *self = GST_WASAPI2_CLIENT (object);
395 GST_DEBUG_OBJECT (self, "dispose");
397 gst_clear_caps (&self->supported_caps);
400 g_main_loop_quit (self->loop);
401 g_thread_join (self->thread);
402 g_main_context_unref (self->context);
403 g_main_loop_unref (self->loop);
406 self->context = NULL;
410 g_clear_object (&self->adapter);
412 G_OBJECT_CLASS (parent_class)->dispose (object);
416 gst_wasapi2_client_finalize (GObject * object)
418 GstWasapi2Client *self = GST_WASAPI2_CLIENT (object);
420 g_free (self->device_id);
421 g_free (self->device_name);
423 g_free (self->positions);
425 CoTaskMemFree (self->mix_format);
426 CloseHandle (self->event_handle);
427 CloseHandle (self->cancellable);
429 g_mutex_clear (&self->lock);
430 g_cond_clear (&self->cond);
432 g_mutex_clear (&self->init_lock);
433 g_cond_clear (&self->init_cond);
435 G_OBJECT_CLASS (parent_class)->finalize (object);
439 gst_wasapi2_client_get_property (GObject * object, guint prop_id,
440 GValue * value, GParamSpec * pspec)
442 GstWasapi2Client *self = GST_WASAPI2_CLIENT (object);
446 g_value_set_string (value, self->device_id);
448 case PROP_DEVICE_NAME:
449 g_value_set_string (value, self->device_name);
451 case PROP_DEVICE_INDEX:
452 g_value_set_int (value, self->device_index);
454 case PROP_DEVICE_CLASS:
455 g_value_set_enum (value, self->device_class);
457 case PROP_LOW_LATENCY:
458 g_value_set_boolean (value, self->low_latency);
460 case PROP_DISPATCHER:
461 g_value_set_pointer (value, self->dispatcher);
464 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
470 gst_wasapi2_client_set_property (GObject * object, guint prop_id,
471 const GValue * value, GParamSpec * pspec)
473 GstWasapi2Client *self = GST_WASAPI2_CLIENT (object);
477 g_free (self->device_id);
478 self->device_id = g_value_dup_string (value);
480 case PROP_DEVICE_NAME:
481 g_free (self->device_name);
482 self->device_name = g_value_dup_string (value);
484 case PROP_DEVICE_INDEX:
485 self->device_index = g_value_get_int (value);
487 case PROP_DEVICE_CLASS:
489 (GstWasapi2ClientDeviceClass) g_value_get_enum (value);
491 case PROP_LOW_LATENCY:
492 self->low_latency = g_value_get_boolean (value);
494 case PROP_DISPATCHER:
495 self->dispatcher = g_value_get_pointer (value);
498 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
504 gst_wasapi2_client_main_loop_running_cb (GstWasapi2Client * self)
506 GST_DEBUG_OBJECT (self, "Main loop running now");
508 g_mutex_lock (&self->lock);
509 g_cond_signal (&self->cond);
510 g_mutex_unlock (&self->lock);
512 return G_SOURCE_REMOVE;
516 gst_wasapi2_client_on_device_activated (GstWasapi2Client * self,
517 IAudioClient3 * audio_client)
519 GST_INFO_OBJECT (self, "Device activated");
521 g_mutex_lock (&self->init_lock);
523 audio_client->AddRef();
524 self->audio_client = audio_client;
525 self->activate_state = GST_WASAPI2_CLIENT_ACTIVATE_DONE;
527 GST_WARNING_OBJECT (self, "IAudioClient is unavailable");
528 self->activate_state = GST_WASAPI2_CLIENT_ACTIVATE_FAILED;
530 g_cond_broadcast (&self->init_cond);
531 g_mutex_unlock (&self->init_lock);
535 convert_wstring_to_string (const std::wstring &wstr)
537 std::wstring_convert<std::codecvt_utf8<wchar_t>, wchar_t> converter;
539 return converter.to_bytes (wstr.c_str());
543 convert_hstring_to_string (HString * hstr)
545 const wchar_t *raw_hstr;
548 return std::string();
550 raw_hstr = hstr->GetRawBuffer (nullptr);
552 return std::string();
554 return convert_wstring_to_string (std::wstring (raw_hstr));
558 gst_wasapi2_client_get_default_device_id (GstWasapi2Client * self)
561 PWSTR default_device_id_wstr = nullptr;
563 if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE)
564 hr = StringFromIID (DEVINTERFACE_AUDIO_CAPTURE, &default_device_id_wstr);
566 hr = StringFromIID (DEVINTERFACE_AUDIO_RENDER, &default_device_id_wstr);
568 if (!gst_wasapi2_result (hr))
569 return std::wstring();
571 std::wstring ret = std::wstring (default_device_id_wstr);
572 CoTaskMemFree (default_device_id_wstr);
578 gst_wasapi2_client_activate_async (GstWasapi2Client * self,
579 GstWasapiDeviceActivator * activator)
582 ComPtr<IDeviceInformationStatics> device_info_static;
583 ComPtr<IAsyncOperation<DeviceInformationCollection*>> async_op;
584 ComPtr<IVectorView<DeviceInformation*>> device_list;
585 HStringReference hstr_device_info =
586 HStringReference(RuntimeClass_Windows_Devices_Enumeration_DeviceInformation);
587 DeviceClass device_class;
588 unsigned int count = 0;
589 gint device_index = 0;
590 std::wstring default_device_id_wstring;
591 std::string default_device_id;
592 std::wstring target_device_id_wstring;
593 std::string target_device_id;
594 std::string target_device_name;
595 gboolean use_default_device = FALSE;
597 GST_INFO_OBJECT (self,
598 "requested device info, device-class: %s, device: %s, device-index: %d",
599 self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE ? "capture" :
600 "render", GST_STR_NULL (self->device_id), self->device_index);
602 if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE) {
603 device_class = DeviceClass::DeviceClass_AudioCapture;
605 device_class = DeviceClass::DeviceClass_AudioRender;
608 default_device_id_wstring = gst_wasapi2_client_get_default_device_id (self);
609 if (default_device_id_wstring.empty ()) {
610 GST_WARNING_OBJECT (self, "Couldn't get default device id");
614 default_device_id = convert_wstring_to_string (default_device_id_wstring);
615 GST_DEBUG_OBJECT (self, "Default device id: %s", default_device_id.c_str ());
618 * 1) default device was requested or
619 * 2) no explicitly requested device or
620 * 3) requested device string id is null but device index is zero
621 * will use default device
623 * Note that default device is much preferred
624 * See https://docs.microsoft.com/en-us/windows/win32/coreaudio/automatic-stream-routing
626 if (self->device_id &&
627 g_ascii_strcasecmp (self->device_id, default_device_id.c_str()) == 0) {
628 GST_DEBUG_OBJECT (self, "Default device was requested");
629 use_default_device = TRUE;
630 } else if (self->device_index < 0 && !self->device_id) {
631 GST_DEBUG_OBJECT (self,
632 "No device was explicitly requested, use default device");
633 use_default_device = TRUE;
634 } else if (!self->device_id && self->device_index == 0) {
635 GST_DEBUG_OBJECT (self, "device-index == zero means default device");
636 use_default_device = TRUE;
639 if (use_default_device) {
640 target_device_id_wstring = default_device_id_wstring;
641 target_device_id = default_device_id;
642 if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE)
643 target_device_name = "Default Audio Capture Device";
645 target_device_name = "Default Audio Render Device";
649 hr = GetActivationFactory (hstr_device_info.Get(), &device_info_static);
650 if (!gst_wasapi2_result (hr))
653 hr = device_info_static->FindAllAsyncDeviceClass (device_class, &async_op);
654 device_info_static.Reset ();
655 if (!gst_wasapi2_result (hr))
658 hr = SyncWait<DeviceInformationCollection*>(async_op.Get ());
659 if (!gst_wasapi2_result (hr))
662 hr = async_op->GetResults (&device_list);
664 if (!gst_wasapi2_result (hr))
667 hr = device_list->get_Size (&count);
668 if (!gst_wasapi2_result (hr))
672 GST_WARNING_OBJECT (self, "No available device");
676 /* device_index 0 will be assigned for default device
677 * so the number of available device is count + 1 (for default device) */
678 if (self->device_index >= 0 && self->device_index > (gint) count) {
679 GST_WARNING_OBJECT (self, "Device index %d is unavailable",
684 GST_DEBUG_OBJECT (self, "Available device count: %d", count);
686 /* zero is for default device */
688 for (unsigned int i = 0; i < count; i++) {
689 ComPtr<IDeviceInformation> device_info;
693 std::string cur_device_id;
694 std::string cur_device_name;
696 hr = device_list->GetAt (i, &device_info);
697 if (!gst_wasapi2_result (hr))
700 hr = device_info->get_IsEnabled (&b_value);
701 if (!gst_wasapi2_result (hr))
704 /* select only enabled device */
706 GST_DEBUG_OBJECT (self, "Device index %d is disabled", i);
710 /* To ensure device id and device name are available,
711 * will query this later again once target device is determined */
712 hr = device_info->get_Id (id.GetAddressOf());
713 if (!gst_wasapi2_result (hr))
717 GST_WARNING_OBJECT (self, "Device index %d has invalid id", i);
721 hr = device_info->get_Name (name.GetAddressOf());
722 if (!gst_wasapi2_result (hr))
725 if (!name.IsValid ()) {
726 GST_WARNING_OBJECT (self, "Device index %d has invalid name", i);
730 cur_device_id = convert_hstring_to_string (&id);
731 if (cur_device_id.empty ()) {
732 GST_WARNING_OBJECT (self, "Device index %d has empty id", i);
736 cur_device_name = convert_hstring_to_string (&name);
737 if (cur_device_name.empty ()) {
738 GST_WARNING_OBJECT (self, "Device index %d has empty device name", i);
742 GST_DEBUG_OBJECT (self, "device [%d] id: %s, name: %s",
743 device_index, cur_device_id.c_str(), cur_device_name.c_str());
745 if (self->device_id &&
746 g_ascii_strcasecmp (self->device_id, cur_device_id.c_str ()) == 0) {
747 GST_INFO_OBJECT (self,
748 "Device index %d has matching device id %s", device_index,
749 cur_device_id.c_str ());
750 target_device_id_wstring = id.GetRawBuffer (nullptr);
751 target_device_id = cur_device_id;
752 target_device_name = cur_device_name;
756 if (self->device_index >= 0 && self->device_index == device_index) {
757 GST_INFO_OBJECT (self, "Select device index %d, device id %s",
758 device_index, cur_device_id.c_str ());
759 target_device_id_wstring = id.GetRawBuffer (nullptr);
760 target_device_id = cur_device_id;
761 target_device_name = cur_device_name;
765 /* count only available devices */
769 if (target_device_id_wstring.empty ()) {
770 GST_WARNING_OBJECT (self, "Couldn't find target device");
775 /* fill device id and name */
776 g_free (self->device_id);
777 self->device_id = g_strdup (target_device_id.c_str());
779 g_free (self->device_name);
780 self->device_name = g_strdup (target_device_name.c_str ());
782 self->device_index = device_index;
784 hr = activator->ActivateDeviceAsync (target_device_id_wstring);
785 if (!gst_wasapi2_result (hr)) {
786 GST_WARNING_OBJECT (self, "Failed to activate device");
790 g_mutex_lock (&self->lock);
791 if (self->activate_state == GST_WASAPI2_CLIENT_ACTIVATE_INIT)
792 self->activate_state = GST_WASAPI2_CLIENT_ACTIVATE_WAIT;
793 g_mutex_unlock (&self->lock);
798 self->activate_state = GST_WASAPI2_CLIENT_ACTIVATE_FAILED;
804 activate_state_to_string (GstWasapi2ClientActivateState state)
807 case GST_WASAPI2_CLIENT_ACTIVATE_FAILED:
809 case GST_WASAPI2_CLIENT_ACTIVATE_INIT:
811 case GST_WASAPI2_CLIENT_ACTIVATE_WAIT:
813 case GST_WASAPI2_CLIENT_ACTIVATE_DONE:
817 g_assert_not_reached ();
823 gst_wasapi2_client_thread_func (GstWasapi2Client * self)
825 RoInitializeWrapper initialize (RO_INIT_MULTITHREADED);
828 ComPtr<GstWasapiDeviceActivator> activator;
830 hr = MakeAndInitialize<GstWasapiDeviceActivator> (&activator,
831 self, self->dispatcher);
832 if (!gst_wasapi2_result (hr)) {
833 GST_ERROR_OBJECT (self, "Could not create activator object");
834 self->activate_state = GST_WASAPI2_CLIENT_ACTIVATE_FAILED;
838 gst_wasapi2_client_activate_async (self, activator.Get ());
840 if (!self->dispatcher) {
841 /* In case that dispatcher is unavailable, wait activation synchroniously */
842 GST_DEBUG_OBJECT (self, "Wait device activation");
843 gst_wasapi2_client_ensure_activation (self);
844 GST_DEBUG_OBJECT (self, "Device activation result %s",
845 activate_state_to_string (self->activate_state));
849 g_main_context_push_thread_default (self->context);
851 source = g_idle_source_new ();
852 g_source_set_callback (source,
853 (GSourceFunc) gst_wasapi2_client_main_loop_running_cb, self, NULL);
854 g_source_attach (source, self->context);
855 g_source_unref (source);
857 GST_DEBUG_OBJECT (self, "Starting main loop");
858 g_main_loop_run (self->loop);
859 GST_DEBUG_OBJECT (self, "Stopped main loop");
861 g_main_context_pop_thread_default (self->context);
863 gst_wasapi2_client_stop (self);
865 if (self->audio_volume) {
866 /* this mute state seems to be global setting for this device
867 * Explicitly disable mute for later use of this audio device
868 * by other application. Otherwise users would blame GStreamer
869 * if we close audio device with muted state */
870 self->audio_volume->SetMute(FALSE, nullptr);
871 self->audio_volume->Release ();
872 self->audio_volume = NULL;
875 if (self->audio_render_client) {
876 self->audio_render_client->Release ();
877 self->audio_render_client = NULL;
880 if (self->audio_capture_client) {
881 self->audio_capture_client->Release ();
882 self->audio_capture_client = NULL;
885 if (self->audio_client) {
886 self->audio_client->Release ();
887 self->audio_client = NULL;
890 /* Reset explicitly to ensure that it happens before
891 * RoInitializeWrapper dtor is called */
894 GST_DEBUG_OBJECT (self, "Exit thread function");
900 gst_waveformatex_to_audio_format (WAVEFORMATEXTENSIBLE * format)
902 const gchar *fmt_str = NULL;
903 GstAudioFormat fmt = GST_AUDIO_FORMAT_UNKNOWN;
905 if (format->Format.wFormatTag == WAVE_FORMAT_PCM) {
906 fmt = gst_audio_format_build_integer (TRUE, G_LITTLE_ENDIAN,
907 format->Format.wBitsPerSample, format->Format.wBitsPerSample);
908 } else if (format->Format.wFormatTag == WAVE_FORMAT_IEEE_FLOAT) {
909 if (format->Format.wBitsPerSample == 32)
910 fmt = GST_AUDIO_FORMAT_F32LE;
911 else if (format->Format.wBitsPerSample == 64)
912 fmt = GST_AUDIO_FORMAT_F64LE;
913 } else if (format->Format.wFormatTag == WAVE_FORMAT_EXTENSIBLE) {
914 if (IsEqualGUID (format->SubFormat, KSDATAFORMAT_SUBTYPE_PCM)) {
915 fmt = gst_audio_format_build_integer (TRUE, G_LITTLE_ENDIAN,
916 format->Format.wBitsPerSample, format->Samples.wValidBitsPerSample);
917 } else if (IsEqualGUID (format->SubFormat,
918 KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)) {
919 if (format->Format.wBitsPerSample == 32
920 && format->Samples.wValidBitsPerSample == 32)
921 fmt = GST_AUDIO_FORMAT_F32LE;
922 else if (format->Format.wBitsPerSample == 64 &&
923 format->Samples.wValidBitsPerSample == 64)
924 fmt = GST_AUDIO_FORMAT_F64LE;
928 if (fmt != GST_AUDIO_FORMAT_UNKNOWN)
929 fmt_str = gst_audio_format_to_string (fmt);
935 gst_wasapi_util_channel_position_all_none (guint channels,
936 GstAudioChannelPosition * position)
939 for (ii = 0; ii < channels; ii++)
940 position[ii] = GST_AUDIO_CHANNEL_POSITION_NONE;
946 GstAudioChannelPosition gst_pos;
947 } wasapi_to_gst_pos[] = {
948 {SPEAKER_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT},
949 {SPEAKER_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT},
950 {SPEAKER_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER},
951 {SPEAKER_LOW_FREQUENCY, GST_AUDIO_CHANNEL_POSITION_LFE1},
952 {SPEAKER_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_REAR_LEFT},
953 {SPEAKER_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT},
954 {SPEAKER_FRONT_LEFT_OF_CENTER,
955 GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER},
956 {SPEAKER_FRONT_RIGHT_OF_CENTER,
957 GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER},
958 {SPEAKER_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_REAR_CENTER},
959 /* Enum values diverge from this point onwards */
960 {SPEAKER_SIDE_LEFT, GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT},
961 {SPEAKER_SIDE_RIGHT, GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT},
962 {SPEAKER_TOP_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_CENTER},
963 {SPEAKER_TOP_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_LEFT},
964 {SPEAKER_TOP_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_CENTER},
965 {SPEAKER_TOP_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_RIGHT},
966 {SPEAKER_TOP_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_LEFT},
967 {SPEAKER_TOP_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER},
968 {SPEAKER_TOP_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT}
971 /* Parse WAVEFORMATEX to get the gstreamer channel mask, and the wasapi channel
972 * positions so GstAudioRingbuffer can reorder the audio data to match the
973 * gstreamer channel order. */
975 gst_wasapi_util_waveformatex_to_channel_mask (WAVEFORMATEXTENSIBLE * format,
976 GstAudioChannelPosition ** out_position)
980 WORD nChannels = format->Format.nChannels;
981 DWORD dwChannelMask = format->dwChannelMask;
982 GstAudioChannelPosition *pos = NULL;
984 pos = g_new (GstAudioChannelPosition, nChannels);
985 gst_wasapi_util_channel_position_all_none (nChannels, pos);
987 /* Too many channels, have to assume that they are all non-positional */
988 if (nChannels > G_N_ELEMENTS (wasapi_to_gst_pos)) {
989 GST_INFO ("Got too many (%i) channels, assuming non-positional", nChannels);
993 /* Too many bits in the channel mask, and the bits don't match nChannels */
994 if (dwChannelMask >> (G_N_ELEMENTS (wasapi_to_gst_pos) + 1) != 0) {
995 GST_WARNING ("Too many bits in channel mask (%lu), assuming "
996 "non-positional", dwChannelMask);
1000 /* Map WASAPI's channel mask to Gstreamer's channel mask and positions.
1001 * If the no. of bits in the mask > nChannels, we will ignore the extra. */
1002 for (ii = 0, ch = 0; ii < G_N_ELEMENTS (wasapi_to_gst_pos) && ch < nChannels;
1004 if (!(dwChannelMask & wasapi_to_gst_pos[ii].wasapi_pos))
1005 /* no match, try next */
1007 mask |= G_GUINT64_CONSTANT (1) << wasapi_to_gst_pos[ii].gst_pos;
1008 pos[ch++] = wasapi_to_gst_pos[ii].gst_pos;
1011 /* XXX: Warn if some channel masks couldn't be mapped? */
1013 GST_DEBUG ("Converted WASAPI mask 0x%" G_GINT64_MODIFIER "x -> 0x%"
1014 G_GINT64_MODIFIER "x", (guint64) dwChannelMask, (guint64) mask);
1018 *out_position = pos;
1023 gst_wasapi2_util_parse_waveformatex (WAVEFORMATEXTENSIBLE * format,
1024 GstCaps * template_caps, GstCaps ** out_caps,
1025 GstAudioChannelPosition ** out_positions)
1029 guint64 channel_mask;
1033 /* TODO: handle SPDIF and other encoded formats */
1035 /* 1 or 2 channels <= 16 bits sample size OR
1036 * 1 or 2 channels > 16 bits sample size or >2 channels */
1037 if (format->Format.wFormatTag != WAVE_FORMAT_PCM &&
1038 format->Format.wFormatTag != WAVE_FORMAT_IEEE_FLOAT &&
1039 format->Format.wFormatTag != WAVE_FORMAT_EXTENSIBLE)
1040 /* Unhandled format tag */
1043 /* WASAPI can only tell us one canonical mix format that it will accept. The
1044 * alternative is calling IsFormatSupported on all combinations of formats.
1045 * Instead, it's simpler and faster to require conversion inside gstreamer */
1046 afmt = gst_waveformatex_to_audio_format (format);
1050 *out_caps = gst_caps_copy (template_caps);
1052 /* This will always return something that might be usable */
1054 gst_wasapi_util_waveformatex_to_channel_mask (format, out_positions);
1056 for (ii = 0; ii < gst_caps_get_size (*out_caps); ii++) {
1057 GstStructure *s = gst_caps_get_structure (*out_caps, ii);
1059 gst_structure_set (s,
1060 "format", G_TYPE_STRING, afmt,
1061 "channels", G_TYPE_INT, format->Format.nChannels,
1062 "rate", G_TYPE_INT, format->Format.nSamplesPerSec, NULL);
1065 gst_structure_set (s,
1066 "channel-mask", GST_TYPE_BITMASK, channel_mask, NULL);
1074 gst_wasapi2_client_get_caps (GstWasapi2Client * client)
1076 WAVEFORMATEX *format = NULL;
1077 static GstStaticCaps static_caps = GST_STATIC_CAPS (GST_WASAPI2_STATIC_CAPS);
1081 g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), NULL);
1083 if (client->supported_caps)
1084 return gst_caps_ref (client->supported_caps);
1086 if (!client->audio_client) {
1087 GST_WARNING_OBJECT (client, "IAudioClient3 wasn't configured");
1091 CoTaskMemFree (client->mix_format);
1092 client->mix_format = nullptr;
1094 g_clear_pointer (&client->positions, g_free);
1096 hr = client->audio_client->GetMixFormat (&format);
1097 if (!gst_wasapi2_result (hr))
1100 scaps = gst_static_caps_get (&static_caps);
1101 gst_wasapi2_util_parse_waveformatex ((WAVEFORMATEXTENSIBLE *) format,
1102 scaps, &client->supported_caps, &client->positions);
1103 gst_caps_unref (scaps);
1105 client->mix_format = format;
1107 if (!client->supported_caps) {
1108 GST_ERROR_OBJECT (client, "No caps from subclass");
1112 return gst_caps_ref (client->supported_caps);
1116 gst_wasapi2_client_initialize_audio_client3 (GstWasapi2Client * self)
1119 UINT32 default_period, fundamental_period, min_period, max_period;
1120 DWORD stream_flags = AUDCLNT_STREAMFLAGS_EVENTCALLBACK;
1121 WAVEFORMATEX *format = NULL;
1123 gboolean ret = FALSE;
1124 IAudioClient3 *audio_client = self->audio_client;
1126 hr = audio_client->GetSharedModeEnginePeriod (self->mix_format,
1127 &default_period, &fundamental_period, &min_period, &max_period);
1128 if (!gst_wasapi2_result (hr))
1131 GST_INFO_OBJECT (self, "Using IAudioClient3, default period %d frames, "
1132 "fundamental period %d frames, minimum period %d frames, maximum period "
1133 "%d frames", default_period, fundamental_period, min_period, max_period);
1135 hr = audio_client->InitializeSharedAudioStream (stream_flags, min_period,
1136 self->mix_format, nullptr);
1138 if (!gst_wasapi2_result (hr)) {
1139 GST_WARNING_OBJECT (self, "Failed to initialize IAudioClient3");
1143 /* query period again to be ensured */
1144 hr = audio_client->GetCurrentSharedModeEnginePeriod (&format, &period);
1145 if (!gst_wasapi2_result (hr)) {
1146 GST_WARNING_OBJECT (self, "Failed to get current period");
1150 self->device_period = period;
1154 CoTaskMemFree (format);
1160 gst_wasapi2_util_get_best_buffer_sizes (GstAudioRingBufferSpec * spec,
1161 REFERENCE_TIME default_period, REFERENCE_TIME min_period,
1162 REFERENCE_TIME * ret_period, REFERENCE_TIME * ret_buffer_duration)
1164 REFERENCE_TIME use_period, use_buffer;
1166 /* Shared mode always runs at the default period, so if we want a larger
1167 * period (for lower CPU usage), we do it as a multiple of that */
1168 use_period = default_period;
1170 /* Ensure that the period (latency_time) used is an integral multiple of
1171 * either the default period or the minimum period */
1172 use_period = use_period * MAX ((spec->latency_time * 10) / use_period, 1);
1174 /* Ask WASAPI to create a software ringbuffer of at least this size; it may
1175 * be larger so the actual buffer time may be different, which is why after
1176 * initialization we read the buffer duration actually in-use and set
1177 * segsize/segtotal from that. */
1178 use_buffer = spec->buffer_time * 10;
1179 /* Has to be at least twice the period */
1180 if (use_buffer < 2 * use_period)
1181 use_buffer = 2 * use_period;
1183 *ret_period = use_period;
1184 *ret_buffer_duration = use_buffer;
1188 gst_wasapi2_client_initialize_audio_client (GstWasapi2Client * self,
1189 GstAudioRingBufferSpec * spec)
1191 REFERENCE_TIME default_period, min_period;
1192 REFERENCE_TIME device_period, device_buffer_duration;
1194 DWORD stream_flags = AUDCLNT_STREAMFLAGS_EVENTCALLBACK;
1196 IAudioClient3 *audio_client = self->audio_client;
1198 hr = audio_client->GetDevicePeriod (&default_period, &min_period);
1199 if (!gst_wasapi2_result (hr)) {
1200 GST_WARNING_OBJECT (self, "Couldn't get device period info");
1204 GST_INFO_OBJECT (self, "wasapi2 default period: %" G_GINT64_FORMAT
1205 ", min period: %" G_GINT64_FORMAT, default_period, min_period);
1207 rate = GST_AUDIO_INFO_RATE (&spec->info);
1209 if (self->low_latency) {
1210 device_period = default_period;
1211 /* this should be same as hnsPeriodicity
1212 * when AUDCLNT_STREAMFLAGS_EVENTCALLBACK is used
1213 * And in case of shared mode, hnsPeriodicity should be zero, so
1214 * this value should be zero as well */
1215 device_buffer_duration = 0;
1217 /* Clamp values to integral multiples of an appropriate period */
1218 gst_wasapi2_util_get_best_buffer_sizes (spec,
1219 default_period, min_period, &device_period, &device_buffer_duration);
1222 hr = audio_client->Initialize (AUDCLNT_SHAREMODE_SHARED, stream_flags,
1223 device_buffer_duration,
1224 /* This must always be 0 in shared mode */
1226 self->mix_format, nullptr);
1227 if (!gst_wasapi2_result (hr)) {
1228 GST_WARNING_OBJECT (self, "Couldn't initialize audioclient");
1232 /* device_period can be a non-power-of-10 value so round while converting */
1233 self->device_period =
1234 gst_util_uint64_scale_round (device_period, rate * 100, GST_SECOND);
1240 gst_wasapi2_client_open (GstWasapi2Client * client, GstAudioRingBufferSpec * spec,
1241 GstAudioRingBuffer * buf)
1244 REFERENCE_TIME latency_rt;
1246 IAudioClient3 *audio_client;
1247 ComPtr<ISimpleAudioVolume> audio_volume;
1248 gboolean initialized = FALSE;
1250 g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
1252 /* FIXME: Once IAudioClient3 was initialized, we may need to re-open
1253 * IAudioClient3 in order to handle audio format change */
1254 if (client->opened) {
1255 GST_INFO_OBJECT (client, "IAudioClient3 object is initialized already");
1259 audio_client = client->audio_client;
1261 if (!audio_client) {
1262 GST_ERROR_OBJECT (client, "IAudioClient3 object wasn't configured");
1266 if (!client->mix_format) {
1267 GST_ERROR_OBJECT (client, "Unknown mix format");
1271 /* Only use audioclient3 when low-latency is requested because otherwise
1272 * very slow machines and VMs with 1 CPU allocated will get glitches:
1273 * https://bugzilla.gnome.org/show_bug.cgi?id=794497 */
1274 if (client->low_latency)
1275 initialized = gst_wasapi2_client_initialize_audio_client3 (client);
1277 /* Try again if IAudioClinet3 API is unavailable.
1278 * NOTE: IAudioClinet3:: methods might not be available for default device
1279 * NOTE: The default device is a special device which is needed for supporting
1280 * automatic stream routing
1281 * https://docs.microsoft.com/en-us/windows/win32/coreaudio/automatic-stream-routing
1284 initialized = gst_wasapi2_client_initialize_audio_client (client, spec);
1287 GST_ERROR_OBJECT (client, "Failed to initialize audioclient");
1291 bpf = GST_AUDIO_INFO_BPF (&spec->info);
1292 rate = GST_AUDIO_INFO_RATE (&spec->info);
1294 /* Total size in frames of the allocated buffer that we will read from */
1295 hr = audio_client->GetBufferSize (&client->buffer_frame_count);
1296 if (!gst_wasapi2_result (hr)) {
1300 GST_INFO_OBJECT (client, "buffer size is %i frames, device period is %i "
1301 "frames, bpf is %i bytes, rate is %i Hz", client->buffer_frame_count,
1302 client->device_period, bpf, rate);
1304 /* Actual latency-time/buffer-time will be different now */
1305 spec->segsize = client->device_period * bpf;
1307 /* We need a minimum of 2 segments to ensure glitch-free playback */
1308 spec->segtotal = MAX (client->buffer_frame_count * bpf / spec->segsize, 2);
1310 GST_INFO_OBJECT (client, "segsize is %i, segtotal is %i", spec->segsize,
1313 /* Get WASAPI latency for logging */
1314 hr = audio_client->GetStreamLatency (&latency_rt);
1315 if (!gst_wasapi2_result (hr)) {
1319 GST_INFO_OBJECT (client, "wasapi2 stream latency: %" G_GINT64_FORMAT " (%"
1320 G_GINT64_FORMAT " ms)", latency_rt, latency_rt / 10000);
1322 /* Set the event handler which will trigger read/write */
1323 hr = audio_client->SetEventHandle (client->event_handle);
1324 if (!gst_wasapi2_result (hr))
1327 if (client->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
1328 ComPtr<IAudioRenderClient> render_client;
1330 hr = audio_client->GetService (IID_PPV_ARGS (&render_client));
1331 if (!gst_wasapi2_result (hr))
1334 client->audio_render_client = render_client.Detach ();
1336 ComPtr<IAudioCaptureClient> capture_client;
1338 hr = audio_client->GetService (IID_PPV_ARGS (&capture_client));
1339 if (!gst_wasapi2_result (hr))
1342 client->audio_capture_client = capture_client.Detach ();
1345 hr = audio_client->GetService (IID_PPV_ARGS (&audio_volume));
1346 if (!gst_wasapi2_result (hr))
1349 client->audio_volume = audio_volume.Detach ();
1351 /* this mute state seems to be global setting for this device
1352 * but below documentation looks unclear why mute state is preserved
1353 * even after process is terminated
1354 * https://docs.microsoft.com/en-us/windows/win32/api/audioclient/nf-audioclient-isimpleaudiovolume-setmute
1355 * Explicitly disable mute so that ensure we can produce or play audio
1356 * regardless of previous status
1358 client->audio_volume->SetMute(FALSE, nullptr);
1360 gst_audio_ring_buffer_set_channel_positions (buf, client->positions);
1362 client->opened = TRUE;
1367 /* Get the empty space in the buffer that we have to write to */
1369 gst_wasapi2_client_get_can_frames (GstWasapi2Client * self)
1372 UINT32 n_frames_padding;
1373 IAudioClient3 *audio_client = self->audio_client;
1375 if (!audio_client) {
1376 GST_WARNING_OBJECT (self, "IAudioClient3 wasn't configured");
1380 /* Frames the card hasn't rendered yet */
1381 hr = audio_client->GetCurrentPadding (&n_frames_padding);
1382 if (!gst_wasapi2_result (hr))
1385 GST_LOG_OBJECT (self, "%d unread frames (padding)", n_frames_padding);
1387 /* We can write out these many frames */
1388 return self->buffer_frame_count - n_frames_padding;
1392 gst_wasapi2_client_start (GstWasapi2Client * client)
1395 IAudioClient3 *audio_client;
1396 WAVEFORMATEX *mix_format;
1398 g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
1400 audio_client = client->audio_client;
1401 mix_format = client->mix_format;
1403 if (!audio_client) {
1404 GST_ERROR_OBJECT (client, "IAudioClient3 object wasn't configured");
1409 GST_ERROR_OBJECT (client, "Unknown MixFormat");
1413 if (client->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE &&
1414 !client->audio_capture_client) {
1415 GST_ERROR_OBJECT (client, "IAudioCaptureClient wasn't configured");
1419 if (client->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER &&
1420 !client->audio_render_client) {
1421 GST_ERROR_OBJECT (client, "IAudioRenderClient wasn't configured");
1425 ResetEvent (client->cancellable);
1427 if (client->running) {
1428 GST_WARNING_OBJECT (client, "IAudioClient3 is running already");
1432 /* To avoid start-up glitches, before starting the streaming, we fill the
1433 * buffer with silence as recommended by the documentation:
1434 * https://msdn.microsoft.com/en-us/library/windows/desktop/dd370879%28v=vs.85%29.aspx */
1435 if (client->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
1436 IAudioRenderClient *render_client = client->audio_render_client;
1440 n_frames = gst_wasapi2_client_get_can_frames (client);
1442 GST_ERROR_OBJECT (client,
1443 "should have more than %i frames to write", n_frames);
1447 len = n_frames * mix_format->nBlockAlign;
1449 hr = render_client->GetBuffer (n_frames, &dst);
1450 if (!gst_wasapi2_result (hr)) {
1451 GST_ERROR_OBJECT (client, "Couldn't get buffer");
1455 GST_DEBUG_OBJECT (client, "pre-wrote %i bytes of silence", len);
1457 hr = render_client->ReleaseBuffer (n_frames, AUDCLNT_BUFFERFLAGS_SILENT);
1458 if (!gst_wasapi2_result (hr)) {
1459 GST_ERROR_OBJECT (client, "Couldn't release buffer");
1464 hr = audio_client->Start ();
1465 client->running = gst_wasapi2_result (hr);
1466 gst_adapter_clear (client->adapter);
1468 return client->running;
1472 gst_wasapi2_client_stop (GstWasapi2Client * client)
1475 IAudioClient3 *audio_client;
1477 g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
1479 audio_client = client->audio_client;
1481 if (!client->running) {
1482 GST_DEBUG_OBJECT (client, "We are not running now");
1486 if (!client->audio_client) {
1487 GST_ERROR_OBJECT (client, "IAudioClient3 object wasn't configured");
1491 client->running = FALSE;
1492 SetEvent (client->cancellable);
1494 hr = audio_client->Stop ();
1495 if (!gst_wasapi2_result (hr))
1498 /* reset state for reuse case */
1499 hr = audio_client->Reset ();
1500 return gst_wasapi2_result (hr);
1504 gst_wasapi2_client_read (GstWasapi2Client * client, gpointer data, guint length)
1506 IAudioCaptureClient *capture_client;
1507 WAVEFORMATEX *mix_format;
1510 guint wanted = length;
1514 g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
1515 g_return_val_if_fail (client->audio_capture_client != NULL, -1);
1516 g_return_val_if_fail (client->mix_format != NULL, -1);
1518 capture_client = client->audio_capture_client;
1519 mix_format = client->mix_format;
1521 if (!client->running) {
1522 GST_ERROR_OBJECT (client, "client is not running now");
1526 /* If we've accumulated enough data, return it immediately */
1527 if (gst_adapter_available (client->adapter) >= wanted) {
1528 memcpy (data, gst_adapter_map (client->adapter, wanted), wanted);
1529 gst_adapter_flush (client->adapter, wanted);
1530 GST_DEBUG_OBJECT (client, "Adapter has enough data, returning %i", wanted);
1534 bpf = mix_format->nBlockAlign;
1536 while (wanted > 0) {
1538 guint got_frames, avail_frames, n_frames, want_frames, read_len;
1539 HANDLE event_handle[2];
1541 event_handle[0] = client->event_handle;
1542 event_handle[1] = client->cancellable;
1544 /* Wait for data to become available */
1545 dwWaitResult = WaitForMultipleObjects (2, event_handle, FALSE, INFINITE);
1546 if (dwWaitResult != WAIT_OBJECT_0 && dwWaitResult != WAIT_OBJECT_0 + 1) {
1547 GST_ERROR_OBJECT (client, "Error waiting for event handle: %x",
1548 (guint) dwWaitResult);
1552 if (!client->running) {
1553 GST_DEBUG_OBJECT (client, "Cancelled");
1557 hr = capture_client->GetBuffer (&from, &got_frames, &flags, nullptr,
1559 if (!gst_wasapi2_result (hr)) {
1560 if (hr == AUDCLNT_S_BUFFER_EMPTY) {
1561 GST_INFO_OBJECT (client, "Client buffer is empty, retry");
1565 GST_ERROR_OBJECT (client, "Couldn't get buffer from capture client");
1569 if (got_frames == 0) {
1570 GST_DEBUG_OBJECT (client, "No buffer to read");
1571 capture_client->ReleaseBuffer (got_frames);
1575 if (G_UNLIKELY (flags != 0)) {
1576 /* https://docs.microsoft.com/en-us/windows/win32/api/audioclient/ne-audioclient-_audclnt_bufferflags */
1577 if (flags & AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY)
1578 GST_DEBUG_OBJECT (client, "WASAPI reported discontinuity (glitch?)");
1579 if (flags & AUDCLNT_BUFFERFLAGS_TIMESTAMP_ERROR)
1580 GST_DEBUG_OBJECT (client, "WASAPI reported a timestamp error");
1583 /* Copy all the frames we got into the adapter, and then extract at most
1584 * @wanted size of frames from it. This helps when ::GetBuffer returns more
1585 * data than we can handle right now. */
1587 GstBuffer *tmp = gst_buffer_new_allocate (NULL, got_frames * bpf, NULL);
1588 /* If flags has AUDCLNT_BUFFERFLAGS_SILENT, we will ignore the actual
1589 * data and write out silence, see:
1590 * https://docs.microsoft.com/en-us/windows/win32/api/audioclient/ne-audioclient-_audclnt_bufferflags */
1591 if (flags & AUDCLNT_BUFFERFLAGS_SILENT)
1592 memset (from, 0, got_frames * bpf);
1593 gst_buffer_fill (tmp, 0, from, got_frames * bpf);
1594 gst_adapter_push (client->adapter, tmp);
1597 /* Release all captured buffers; we copied them above */
1598 hr = capture_client->ReleaseBuffer (got_frames);
1600 if (!gst_wasapi2_result (hr)) {
1601 GST_ERROR_OBJECT (client, "Failed to release buffer");
1605 want_frames = wanted / bpf;
1606 avail_frames = gst_adapter_available (client->adapter) / bpf;
1608 /* Only copy data that will fit into the allocated buffer of size @length */
1609 n_frames = MIN (avail_frames, want_frames);
1610 read_len = n_frames * bpf;
1612 if (read_len == 0) {
1613 GST_WARNING_OBJECT (client, "No data to read");
1617 GST_LOG_OBJECT (client, "frames captured: %d (%d bytes), "
1618 "can read: %d (%d bytes), will read: %d (%d bytes), "
1619 "adapter has: %d (%d bytes)", got_frames, got_frames * bpf, want_frames,
1620 wanted, n_frames, read_len, avail_frames, avail_frames * bpf);
1622 memcpy (data, gst_adapter_map (client->adapter, read_len), read_len);
1623 gst_adapter_flush (client->adapter, read_len);
1631 gst_wasapi2_client_write (GstWasapi2Client * client, gpointer data,
1634 IAudioRenderClient *render_client;
1635 WAVEFORMATEX *mix_format;
1637 BYTE *dst = nullptr;
1639 guint can_frames, have_frames, n_frames, write_len = 0;
1641 g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), -1);
1642 g_return_val_if_fail (client->audio_render_client != NULL, -1);
1643 g_return_val_if_fail (client->mix_format != NULL, -1);
1645 if (!client->running) {
1646 GST_WARNING_OBJECT (client, "client is not running now");
1650 render_client = client->audio_render_client;
1651 mix_format = client->mix_format;
1653 /* We have N frames to be written out */
1654 have_frames = length / (mix_format->nBlockAlign);
1656 /* In shared mode we can write parts of the buffer, so only wait
1657 * in case we can't write anything */
1658 can_frames = gst_wasapi2_client_get_can_frames (client);
1659 if (can_frames < 0) {
1660 GST_ERROR_OBJECT (client, "Error getting frames to write to");
1664 if (can_frames == 0) {
1665 HANDLE event_handle[2];
1667 event_handle[0] = client->event_handle;
1668 event_handle[1] = client->cancellable;
1670 dwWaitResult = WaitForMultipleObjects (2, event_handle, FALSE, INFINITE);
1671 if (dwWaitResult != WAIT_OBJECT_0 && dwWaitResult != WAIT_OBJECT_0 + 1) {
1672 GST_ERROR_OBJECT (client, "Error waiting for event handle: %x",
1673 (guint) dwWaitResult);
1677 if (!client->running) {
1678 GST_DEBUG_OBJECT (client, "Cancelled");
1682 can_frames = gst_wasapi2_client_get_can_frames (client);
1683 if (can_frames < 0) {
1684 GST_ERROR_OBJECT (client, "Error getting frames to write to");
1689 /* We will write out these many frames, and this much length */
1690 n_frames = MIN (can_frames, have_frames);
1691 write_len = n_frames * mix_format->nBlockAlign;
1693 GST_LOG_OBJECT (client, "total: %d, have_frames: %d (%d bytes), "
1694 "can_frames: %d, will write: %d (%d bytes)", client->buffer_frame_count,
1695 have_frames, length, can_frames, n_frames, write_len);
1697 hr = render_client->GetBuffer (n_frames, &dst);
1698 if (!gst_wasapi2_result (hr)) {
1699 GST_ERROR_OBJECT (client, "Couldn't get buffer from client");
1703 memcpy (dst, data, write_len);
1704 hr = render_client->ReleaseBuffer (n_frames, 0);
1710 gst_wasapi2_client_delay (GstWasapi2Client * client)
1714 IAudioClient3 *audio_client;
1716 g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), 0);
1718 audio_client = client->audio_client;
1720 if (!audio_client) {
1721 GST_WARNING_OBJECT (client, "IAudioClient3 wasn't configured");
1725 hr = audio_client->GetCurrentPadding (&delay);
1726 if (!gst_wasapi2_result (hr))
1733 gst_wasapi2_client_set_mute (GstWasapi2Client * client, gboolean mute)
1736 ISimpleAudioVolume *audio_volume;
1738 g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
1740 audio_volume = client->audio_volume;
1742 if (!audio_volume) {
1743 GST_WARNING_OBJECT (client, "ISimpleAudioVolume object wasn't configured");
1747 hr = audio_volume->SetMute (mute, nullptr);
1748 GST_DEBUG_OBJECT (client, "Set mute %s, hr: 0x%x",
1749 mute ? "enabled" : "disabled", (gint) hr);
1751 return gst_wasapi2_result (hr);
1755 gst_wasapi2_client_get_mute (GstWasapi2Client * client, gboolean * mute)
1758 ISimpleAudioVolume *audio_volume;
1759 BOOL current_mute = FALSE;
1761 g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
1762 g_return_val_if_fail (mute != NULL, FALSE);
1764 audio_volume = client->audio_volume;
1766 if (!audio_volume) {
1767 GST_WARNING_OBJECT (client, "ISimpleAudioVolume object wasn't configured");
1771 hr = audio_volume->GetMute (¤t_mute);
1772 if (!gst_wasapi2_result (hr))
1775 *mute = (gboolean) current_mute;
1781 gst_wasapi2_client_set_volume (GstWasapi2Client * client, gfloat volume)
1784 ISimpleAudioVolume *audio_volume;
1786 g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
1787 g_return_val_if_fail (volume >= 0 && volume <= 1.0, FALSE);
1789 audio_volume = client->audio_volume;
1791 if (!audio_volume) {
1792 GST_WARNING_OBJECT (client, "ISimpleAudioVolume object wasn't configured");
1796 hr = audio_volume->SetMasterVolume (volume, nullptr);
1797 GST_DEBUG_OBJECT (client, "Set volume %.2f hr: 0x%x", volume, (gint) hr);
1799 return gst_wasapi2_result (hr);
1803 gst_wasapi2_client_get_volume (GstWasapi2Client * client, gfloat * volume)
1806 ISimpleAudioVolume *audio_volume;
1807 float current_volume = FALSE;
1809 g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
1810 g_return_val_if_fail (volume != NULL, FALSE);
1812 audio_volume = client->audio_volume;
1814 if (!audio_volume) {
1815 GST_WARNING_OBJECT (client, "ISimpleAudioVolume object wasn't configured");
1819 hr = audio_volume->GetMasterVolume (¤t_volume);
1820 if (!gst_wasapi2_result (hr))
1823 *volume = current_volume;
1829 gst_wasapi2_client_ensure_activation (GstWasapi2Client * client)
1831 g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
1833 /* should not happen */
1834 g_assert (client->activate_state != GST_WASAPI2_CLIENT_ACTIVATE_INIT);
1836 g_mutex_lock (&client->init_lock);
1837 while (client->activate_state == GST_WASAPI2_CLIENT_ACTIVATE_WAIT)
1838 g_cond_wait (&client->init_cond, &client->init_lock);
1839 g_mutex_unlock (&client->init_lock);
1841 return client->activate_state == GST_WASAPI2_CLIENT_ACTIVATE_DONE;
1845 find_dispatcher (ICoreDispatcher ** dispatcher)
1847 HStringReference hstr_core_app =
1848 HStringReference(RuntimeClass_Windows_ApplicationModel_Core_CoreApplication);
1851 ComPtr<ICoreApplication> core_app;
1852 hr = GetActivationFactory (hstr_core_app.Get(), &core_app);
1856 ComPtr<ICoreApplicationView> core_app_view;
1857 hr = core_app->GetCurrentView (&core_app_view);
1861 ComPtr<ICoreWindow> core_window;
1862 hr = core_app_view->get_CoreWindow (&core_window);
1866 return core_window->get_Dispatcher (dispatcher);
1870 gst_wasapi2_client_new (GstWasapi2ClientDeviceClass device_class,
1871 gboolean low_latency, gint device_index, const gchar * device_id,
1872 gpointer dispatcher)
1874 GstWasapi2Client *self;
1875 ComPtr<ICoreDispatcher> core_dispatcher;
1876 /* Multiple COM init is allowed */
1877 RoInitializeWrapper init_wrapper (RO_INIT_MULTITHREADED);
1879 /* If application didn't pass ICoreDispatcher object,
1880 * try to get dispatcher object for the current thread */
1884 hr = find_dispatcher (&core_dispatcher);
1885 if (SUCCEEDED (hr)) {
1886 GST_DEBUG ("UI dispatcher is available");
1887 dispatcher = core_dispatcher.Get ();
1889 GST_DEBUG ("UI dispatcher is unavailable");
1892 GST_DEBUG ("Use user passed UI dispatcher");
1895 self = (GstWasapi2Client *) g_object_new (GST_TYPE_WASAPI2_CLIENT,
1896 "device-class", device_class, "low-latency", low_latency,
1897 "device-index", device_index, "device", device_id,
1898 "dispatcher", dispatcher, NULL);
1900 /* Reset explicitly to ensure that it happens before
1901 * RoInitializeWrapper dtor is called */
1902 core_dispatcher.Reset ();
1904 if (self->activate_state == GST_WASAPI2_CLIENT_ACTIVATE_FAILED) {
1905 gst_object_unref (self);
1909 gst_object_ref_sink (self);