summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSeungha Yang <seungha@centricular.com>2020-05-26 05:17:41 +0900
committerNirbheek Chauhan <nirbheek@centricular.com>2020-06-08 03:10:05 +0000
commit27784576781a89752010c32ad6df51b38cd2339c (patch)
treee8daa901c588e24a8341e59592e390be679e788b
parent0f74785b8ebc3a042f1e8593e516c793897623f1 (diff)
downloadgstreamer-plugins-bad-27784576781a89752010c32ad6df51b38cd2339c.tar.gz
wasapi2: Introduce new WASAPI plugin
Add a new wasapi implementation mainly to support UWP application. Basically the core logic of this plugin is almost identical to existing wasapi plugin, but main target is Windows 10 (+ UWP). Since this plugin uses WinRT APIs, this plugin most likely might not work Windows 8 or lower. Compared with existing wasapi plugin, additional features of this plugin are * Fully compatible with both Windows 10 desktop and UWP application * Supports automatic stream routing (auto fallback when device was removed) * Support device level mute/volume control But some features of existing wasapi plugin are not implemented in this plugin yet * Exclusive streaming mode is not supported * Loopback feature is not implemented * Cross-compile is not possible with current mingw toolchain (meaning that MSVC and Windows 10 SDK are required to build this plugin) Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1264>
-rw-r--r--meson_options.txt1
-rw-r--r--sys/meson.build1
-rw-r--r--sys/wasapi2/AsyncOperations.h168
-rw-r--r--sys/wasapi2/gstwasapi2client.cpp1777
-rw-r--r--sys/wasapi2/gstwasapi2client.h82
-rw-r--r--sys/wasapi2/gstwasapi2sink.c548
-rw-r--r--sys/wasapi2/gstwasapi2sink.h34
-rw-r--r--sys/wasapi2/gstwasapi2src.c546
-rw-r--r--sys/wasapi2/gstwasapi2src.h34
-rw-r--r--sys/wasapi2/gstwasapi2util.c209
-rw-r--r--sys/wasapi2/gstwasapi2util.h47
-rw-r--r--sys/wasapi2/meson.build92
-rw-r--r--sys/wasapi2/plugin.c56
13 files changed, 3595 insertions, 0 deletions
diff --git a/meson_options.txt b/meson_options.txt
index dd34b54fe..ccd79333d 100644
--- a/meson_options.txt
+++ b/meson_options.txt
@@ -155,6 +155,7 @@ option('voaacenc', type : 'feature', value : 'auto', description : 'AAC audio en
option('voamrwbenc', type : 'feature', value : 'auto', description : 'AMR-WB audio encoder plugin')
option('vulkan', type : 'feature', value : 'auto', description : 'Vulkan video sink plugin')
option('wasapi', type : 'feature', value : 'auto', description : 'Windows Audio Session API source/sink plugin')
+option('wasapi2', type : 'feature', value : 'auto', description : 'Windows Audio Session API source/sink plugin with WinRT API')
option('webp', type : 'feature', value : 'auto', description : 'WebP image codec plugin')
option('webrtc', type : 'feature', value : 'auto', description : 'WebRTC audio/video network bin plugin')
option('webrtcdsp', type : 'feature', value : 'auto', description : 'Plugin with various audio filters provided by the WebRTC audio processing library')
diff --git a/sys/meson.build b/sys/meson.build
index 7cf9a0024..bb0791b06 100644
--- a/sys/meson.build
+++ b/sys/meson.build
@@ -22,6 +22,7 @@ subdir('tinyalsa')
subdir('uvch264')
subdir('v4l2codecs')
subdir('wasapi')
+subdir('wasapi2')
subdir('winks')
subdir('winscreencap')
diff --git a/sys/wasapi2/AsyncOperations.h b/sys/wasapi2/AsyncOperations.h
new file mode 100644
index 000000000..412886065
--- /dev/null
+++ b/sys/wasapi2/AsyncOperations.h
@@ -0,0 +1,168 @@
+// MIT License
+//
+// Copyright (c) 2016 Microsoft Corporation
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+// Source taken from https://github.com/microsoft/MixedRealityCompanionKit
+
+#pragma once
+
+#include <wrl.h>
+#include <wrl\async.h>
+#include <Windows.System.Threading.h>
+#include <functional>
+
+template <typename TDelegate, typename TOperation, typename TLambda>
+HRESULT StartAsyncThen(_In_ TOperation* pOperation, _In_ TLambda&& tFunc)
+{
+ if (nullptr == pOperation)
+ {
+ return E_INVALIDARG;
+ }
+
+ auto spCallback = Microsoft::WRL::Callback<TDelegate>(
+ [tFunc](_In_ TOperation* pOperation, _In_ AsyncStatus status) -> HRESULT
+ {
+ HRESULT hr = S_OK;
+
+ // wrap the operation
+ if (status != AsyncStatus::Completed)
+ {
+ Microsoft::WRL::ComPtr<TOperation> spOperation(pOperation);
+ Microsoft::WRL::ComPtr<IAsyncInfo> spAsyncInfo;
+ hr = spOperation.As(&spAsyncInfo);
+ if (SUCCEEDED(hr))
+ {
+ spAsyncInfo->get_ErrorCode(&hr);
+ }
+ }
+
+ return tFunc(hr, pOperation, status);
+ });
+
+ // start
+ return (nullptr != spCallback) ? pOperation->put_Completed(spCallback.Get()) : E_OUTOFMEMORY;
+}
+template <typename TLambda>
+HRESULT StartAsyncThen(_In_ ABI::Windows::Foundation::IAsyncAction* pOperation, _In_ TLambda&& tFunc)
+{
+ return StartAsyncThen<ABI::Windows::Foundation::IAsyncActionCompletedHandler, ABI::Windows::Foundation::IAsyncAction>(pOperation, static_cast<TLambda&&>(tFunc));
+}
+template <typename TProgress, typename TLambda>
+HRESULT StartAsyncThen(_In_ ABI::Windows::Foundation::IAsyncActionWithProgress<TProgress>* pOperation, _In_ TLambda&& tFunc)
+{
+ return StartAsyncThen<ABI::Windows::Foundation::IAsyncActionWithProgressCompletedHandler<TProgress>, Windows::Foundation::IAsyncActionWithProgress<TProgress>>(pOperation, static_cast<TLambda&&>(tFunc));
+}
+template <typename TResult, typename TLambda>
+HRESULT StartAsyncThen(_In_ ABI::Windows::Foundation::IAsyncOperation<TResult>* pOperation, _In_ TLambda&& tFunc)
+{
+ return StartAsyncThen<ABI::Windows::Foundation::IAsyncOperationCompletedHandler<TResult>, ABI::Windows::Foundation::IAsyncOperation<TResult>>(pOperation, static_cast<TLambda&&>(tFunc));
+}
+template <typename TResult, typename TProgress, typename TLambda>
+HRESULT StartAsyncThen(_In_ ABI::Windows::Foundation::IAsyncOperationWithProgress<TResult, TProgress>* pOperation, _In_ TLambda&& tFunc)
+{
+ return StartAsyncThen<ABI::Windows::Foundation::IAsyncOperationWithProgressCompletedHandler<TResult, TProgress>, ABI::Windows::Foundation::IAsyncOperationWithProgress<TResult, TProgress>>(pOperation, static_cast<TLambda&&>(tFunc));
+}
+
+
+// eg. TOperation = IAsyncOperationWithProgress<UINT32, UINT32>
+// eg. THandler = IAsyncOperationWithProgressCompletedHandler<UINT, UINT>
+template<typename TOperation, typename THandler>
+class AsyncEventDelegate
+ : public Microsoft::WRL::RuntimeClass
+ < Microsoft::WRL::RuntimeClassFlags<Microsoft::WRL::RuntimeClassType::Delegate>
+ , THandler
+ , Microsoft::WRL::FtmBase >
+{
+public:
+ AsyncEventDelegate()
+ : _completedEvent(CreateEventEx(nullptr, nullptr, 0, EVENT_ALL_ACCESS))
+ {
+ ComPtr<AsyncEventDelegate> spThis(this);
+ auto lambda = ([this, spThis](_In_ HRESULT hr, _In_ TOperation* pOperation)
+ {
+ SetEvent(_completedEvent.Get());
+ });
+ _func = std::move(lambda);
+ }
+
+ STDMETHOD(Invoke)(
+ _In_ TOperation* pOperation,
+ _In_ AsyncStatus status)
+ {
+ HRESULT hr = S_OK;
+
+ // if we completed successfully, then there is no need for getting hresult
+ if (status != AsyncStatus::Completed)
+ {
+ Microsoft::WRL::ComPtr<TOperation> spOperation(pOperation);
+ Microsoft::WRL::ComPtr<IAsyncInfo> spAsyncInfo;
+ if (SUCCEEDED(spOperation.As(&spAsyncInfo)))
+ {
+ spAsyncInfo->get_ErrorCode(&hr);
+ }
+ }
+
+ _func(hr, pOperation);
+
+ return S_OK;
+ }
+
+ STDMETHOD(SyncWait)(_In_ TOperation* pOperation, _In_ DWORD dwMilliseconds)
+ {
+ HRESULT hr = pOperation->put_Completed(this);
+ if (FAILED(hr))
+ {
+ return hr;
+ }
+
+ DWORD dwWait = WaitForSingleObjectEx(_completedEvent.Get(), dwMilliseconds, TRUE);
+ if (WAIT_IO_COMPLETION == dwWait || WAIT_OBJECT_0 == dwWait)
+ return S_OK;
+
+ return HRESULT_FROM_WIN32(GetLastError());
+ }
+
+private:
+ std::function<void(HRESULT, TOperation*)> _func;
+ Microsoft::WRL::Wrappers::Event _completedEvent;
+};
+template <typename TOperation, typename THandler>
+HRESULT SyncWait(_In_ TOperation* pOperation, _In_ DWORD dwMilliseconds)
+{
+ auto spCallback = Microsoft::WRL::Make<AsyncEventDelegate<TOperation, THandler>>();
+
+ return spCallback->SyncWait(pOperation, dwMilliseconds);
+}
+template <typename TResult>
+HRESULT SyncWait(_In_ ABI::Windows::Foundation::IAsyncAction* pOperation, _In_ DWORD dwMilliseconds = INFINITE)
+{
+ return SyncWait<ABI::Windows::Foundation::IAsyncAction, ABI::Windows::Foundation::IAsyncActionCompletedHandler>(pOperation, dwMilliseconds);
+}
+template <typename TResult>
+HRESULT SyncWait(_In_ ABI::Windows::Foundation::IAsyncOperation<TResult>* pOperation, _In_ DWORD dwMilliseconds = INFINITE)
+{
+ return SyncWait<ABI::Windows::Foundation::IAsyncOperation<TResult>, ABI::Windows::Foundation::IAsyncOperationCompletedHandler<TResult>>(pOperation, dwMilliseconds);
+}
+template <typename TResult, typename TProgress>
+HRESULT SyncWait(_In_ ABI::Windows::Foundation::IAsyncOperationWithProgress<TResult, TProgress>* pOperation, _In_ DWORD dwMilliseconds = INFINITE)
+{
+ return SyncWait<ABI::Windows::Foundation::IAsyncOperationWithProgress<TResult, TProgress>, ABI::Windows::Foundation::IAsyncOperationWithProgressCompletedHandler<TResult, TProgress>>(pOperation, dwMilliseconds);
+}
diff --git a/sys/wasapi2/gstwasapi2client.cpp b/sys/wasapi2/gstwasapi2client.cpp
new file mode 100644
index 000000000..c9181f219
--- /dev/null
+++ b/sys/wasapi2/gstwasapi2client.cpp
@@ -0,0 +1,1777 @@
+/*
+ * Copyright (C) 2008 Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>
+ * Copyright (C) 2013 Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) 2018 Centricular Ltd.
+ * Author: Nirbheek Chauhan <nirbheek@centricular.com>
+ * Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "AsyncOperations.h"
+#include "gstwasapi2client.h"
+#include "gstwasapi2util.h"
+#include <initguid.h>
+#include <windows.foundation.h>
+#include <windows.ui.core.h>
+#include <wrl.h>
+#include <wrl/wrappers/corewrappers.h>
+#include <audioclient.h>
+#include <mmdeviceapi.h>
+#include <string.h>
+#include <string>
+#include <locale>
+#include <codecvt>
+
+using namespace ABI::Windows::ApplicationModel::Core;
+using namespace ABI::Windows::Foundation;
+using namespace ABI::Windows::Foundation::Collections;
+using namespace ABI::Windows::UI::Core;
+using namespace ABI::Windows::Media::Devices;
+using namespace ABI::Windows::Devices::Enumeration;
+
+using namespace Microsoft::WRL;
+using namespace Microsoft::WRL::Wrappers;
+
+G_BEGIN_DECLS
+
+GST_DEBUG_CATEGORY_EXTERN (gst_wasapi2_client_debug);
+#define GST_CAT_DEFAULT gst_wasapi2_client_debug
+
+G_END_DECLS
+
+static void
+gst_wasapi2_client_on_device_activated (GstWasapi2Client * client,
+ IAudioClient3 * audio_client);
+
+class GstWasapiDeviceActivator
+ : public RuntimeClass<RuntimeClassFlags<ClassicCom>, FtmBase,
+ IActivateAudioInterfaceCompletionHandler>
+{
+public:
+ GstWasapiDeviceActivator ()
+ : listener_(nullptr)
+ {
+ }
+
+ ~GstWasapiDeviceActivator ()
+ {
+ }
+
+ HRESULT
+ RuntimeClassInitialize (GstWasapi2Client * listener)
+ {
+ if (!listener)
+ return E_INVALIDARG;
+
+ listener_ = listener;
+
+ findCoreDispatcher ();
+
+ return S_OK;
+ }
+
+ STDMETHOD(ActivateCompleted)
+ (IActivateAudioInterfaceAsyncOperation *async_op)
+ {
+ ComPtr<IAudioClient3> audio_client;
+ HRESULT hr = S_OK;
+ HRESULT hr_async_op = S_OK;
+ ComPtr<IUnknown> audio_interface;
+
+ if (!listener_) {
+ GST_WARNING ("No listener was configured");
+ return S_OK;
+ }
+
+ GST_INFO_OBJECT (listener_, "AsyncOperation done");
+
+ hr = async_op->GetActivateResult(&hr_async_op, &audio_interface);
+
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING_OBJECT (listener_, "Failed to get activate result, hr: 0x%x", hr);
+ goto done;
+ }
+
+ if (!gst_wasapi2_result (hr_async_op)) {
+ GST_WARNING_OBJECT (listener_, "Failed to activate device");
+ goto done;
+ }
+
+ hr = audio_interface.As (&audio_client);
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (listener_, "Failed to get IAudioClient3 interface");
+ goto done;
+ }
+
+ done:
+ /* Should call this method anyway, listener will wait this event */
+ gst_wasapi2_client_on_device_activated (listener_, audio_client.Get());
+
+ /* return S_OK anyway, but listener can know it's succeeded or not
+ * by passed IAudioClient handle via gst_wasapi2_client_on_device_activated
+ */
+ return S_OK;
+ }
+
+ HRESULT
+ ActivateDeviceAsync(const std::wstring &device_id)
+ {
+ return runOnUIThread (INFINITE,
+ [this, device_id] {
+ ComPtr<IActivateAudioInterfaceAsyncOperation> async_op;
+ HRESULT hr = S_OK;
+
+ hr = ActivateAudioInterfaceAsync (device_id.c_str (),
+ __uuidof(IAudioClient3), nullptr, this, &async_op);
+
+ /* for debugging */
+ gst_wasapi2_result (hr);
+
+ return hr;
+ });
+ }
+
+ /* Try to find ICoreDispatcher of main UI so that active audio
+ * interface on main UI thread */
+ void findCoreDispatcher(void)
+ {
+ HRESULT hr;
+ HStringReference hstr_core_app =
+ HStringReference(RuntimeClass_Windows_ApplicationModel_Core_CoreApplication);
+ ComPtr<ICoreApplication> core_app;
+ ComPtr<ICoreApplicationView> core_app_view;
+ ComPtr<ICoreWindow> core_window;
+
+ hr = GetActivationFactory (hstr_core_app.Get(), &core_app);
+ if (!gst_wasapi2_result (hr))
+ return;
+
+ hr = core_app->GetCurrentView (&core_app_view);
+ if (!gst_wasapi2_result (hr))
+ return;
+
+ hr = core_app_view->get_CoreWindow (&core_window);
+ if (!gst_wasapi2_result (hr))
+ return;
+
+ hr = core_window->get_Dispatcher (&dispatcher_);
+ if (!gst_wasapi2_result (hr))
+ return;
+
+ GST_DEBUG ("Main UI dispatcher is available");
+ }
+
+ template <typename CB>
+ HRESULT
+ runOnUIThread (DWORD timeout, CB && cb)
+ {
+ ComPtr<IAsyncAction> async_action;
+ HRESULT hr;
+ HRESULT hr_cb;
+ boolean can_now;
+ DWORD wait_ret;
+
+ if (!dispatcher_)
+ return cb();
+
+ hr = dispatcher_->get_HasThreadAccess (&can_now);
+
+ if (FAILED (hr))
+ return hr;
+
+ if (can_now)
+ return cb ();
+
+ Event event (CreateEventEx (NULL, NULL, CREATE_EVENT_MANUAL_RESET,
+ EVENT_ALL_ACCESS));
+
+ if (!event.IsValid())
+ return E_FAIL;
+
+ auto handler =
+ Callback<Implements<RuntimeClassFlags<ClassicCom>,
+ IDispatchedHandler, FtmBase>>([&hr_cb, &cb, &event] {
+ hr_cb = cb ();
+ SetEvent (event.Get());
+ return S_OK;
+ });
+
+ hr = dispatcher_->RunAsync (CoreDispatcherPriority_Normal,
+ handler.Get(), &async_action);
+
+ if (FAILED (hr))
+ return hr;
+
+ wait_ret = WaitForSingleObject (event.Get(), timeout);
+ if (wait_ret != WAIT_OBJECT_0)
+ return E_FAIL;
+
+ return hr;
+ }
+
+private:
+ GstWasapi2Client * listener_;
+ ComPtr<ICoreDispatcher> dispatcher_;
+};
+
+struct _GstWasapi2Client
+{
+ GstObject parent;
+
+ GstWasapi2ClientDeviceClass device_class;
+ gboolean low_latency;
+ gchar *device_id;
+ gchar *device_name;
+ gint device_index;
+
+ IAudioClient3 *audio_client;
+ IAudioCaptureClient *audio_capture_client;
+ IAudioRenderClient *audio_render_client;
+ ISimpleAudioVolume *audio_volume;
+
+ WAVEFORMATEX *mix_format;
+ GstCaps *supported_caps;
+
+ HANDLE event_handle;
+ HANDLE cancellable;
+ gboolean opened;
+ gboolean running;
+
+ guint32 device_period;
+ guint32 buffer_frame_count;
+
+ GstAudioChannelPosition *positions;
+
+ /* Used for capture mode */
+ GstAdapter *adapter;
+
+ GThread *thread;
+ GMutex lock;
+ GCond cond;
+ GMainContext *context;
+ GMainLoop *loop;
+
+ /* To wait ActivateCompleted event */
+ GMutex init_lock;
+ GCond init_cond;
+ gboolean init_done;
+};
+
+enum
+{
+ PROP_0,
+ PROP_DEVICE,
+ PROP_DEVICE_NAME,
+ PROP_DEVICE_INDEX,
+ PROP_DEVICE_CLASS,
+ PROP_LOW_LATENCY,
+};
+
+#define DEFAULT_DEVICE_INDEX -1
+#define DEFAULT_DEVICE_CLASS GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE
+#define DEFAULT_LOW_LATENCY FALSE
+
+GType
+gst_wasapi2_client_device_class_get_type (void)
+{
+ static volatile GType class_type = 0;
+ static const GEnumValue types[] = {
+ {GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE, "Capture", "capture"},
+ {GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER, "Render", "render"},
+ {0, NULL, NULL}
+ };
+
+ if (g_once_init_enter (&class_type)) {
+ GType gtype = g_enum_register_static ("GstWasapi2ClientDeviceClass", types);
+ g_once_init_leave (&class_type, gtype);
+ }
+
+ return class_type;
+}
+
+static void gst_wasapi2_client_constructed (GObject * object);
+static void gst_wasapi2_client_dispose (GObject * object);
+static void gst_wasapi2_client_finalize (GObject * object);
+static void gst_wasapi2_client_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_wasapi2_client_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+
+static gpointer gst_wasapi2_client_thread_func (GstWasapi2Client * self);
+static gboolean
+gst_wasapi2_client_main_loop_running_cb (GstWasapi2Client * self);
+
+#define gst_wasapi2_client_parent_class parent_class
+G_DEFINE_TYPE (GstWasapi2Client,
+ gst_wasapi2_client, GST_TYPE_OBJECT);
+
+static void
+gst_wasapi2_client_class_init (GstWasapi2ClientClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GParamFlags param_flags =
+ (GParamFlags) (G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY |
+ G_PARAM_STATIC_STRINGS);
+
+ gobject_class->constructed = gst_wasapi2_client_constructed;
+ gobject_class->dispose = gst_wasapi2_client_dispose;
+ gobject_class->finalize = gst_wasapi2_client_finalize;
+ gobject_class->get_property = gst_wasapi2_client_get_property;
+ gobject_class->set_property = gst_wasapi2_client_set_property;
+
+ g_object_class_install_property (gobject_class, PROP_DEVICE,
+ g_param_spec_string ("device", "Device",
+ "WASAPI playback device as a GUID string", NULL, param_flags));
+ g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
+ g_param_spec_string ("device-name", "Device Name",
+ "The human-readable device name", NULL, param_flags));
+ g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
+ g_param_spec_int ("device-index", "Device Index",
+ "The zero-based device index", -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
+ param_flags));
+ g_object_class_install_property (gobject_class, PROP_DEVICE_CLASS,
+ g_param_spec_enum ("device-class", "Device Class",
+ "Device class", GST_TYPE_WASAPI2_CLIENT_DEVICE_CLASS,
+ DEFAULT_DEVICE_CLASS, param_flags));
+ g_object_class_install_property (gobject_class, PROP_LOW_LATENCY,
+ g_param_spec_boolean ("low-latency", "Low latency",
+ "Optimize all settings for lowest latency. Always safe to enable.",
+ DEFAULT_LOW_LATENCY, param_flags));
+}
+
+static void
+gst_wasapi2_client_init (GstWasapi2Client * self)
+{
+ self->device_index = DEFAULT_DEVICE_INDEX;
+ self->device_class = DEFAULT_DEVICE_CLASS;
+ self->low_latency = DEFAULT_LOW_LATENCY;
+
+ self->adapter = gst_adapter_new ();
+ self->event_handle = CreateEvent (NULL, FALSE, FALSE, NULL);
+ self->cancellable = CreateEvent (NULL, TRUE, FALSE, NULL);
+
+ g_mutex_init (&self->lock);
+ g_cond_init (&self->cond);
+
+ g_mutex_init (&self->init_lock);
+ g_cond_init (&self->init_cond);
+
+ self->context = g_main_context_new ();
+ self->loop = g_main_loop_new (self->context, FALSE);
+}
+
+static void
+gst_wasapi2_client_constructed (GObject * object)
+{
+ GstWasapi2Client *self = GST_WASAPI2_CLIENT (object);
+
+ /* Create a new thread to ensure that COM thread can be MTA thread.
+ * We cannot ensure whether CoInitializeEx() was called outside of here for
+ * this thread or not. If it was called with non-COINIT_MULTITHREADED option,
+ * we cannot update it */
+ g_mutex_lock (&self->lock);
+ self->thread = g_thread_new ("GstWasapi2ClientWinRT",
+ (GThreadFunc) gst_wasapi2_client_thread_func, self);
+ while (!self->loop || !g_main_loop_is_running (self->loop))
+ g_cond_wait (&self->cond, &self->lock);
+ g_mutex_unlock (&self->lock);
+
+ G_OBJECT_CLASS (parent_class)->constructed (object);
+}
+
+static void
+gst_wasapi2_client_dispose (GObject * object)
+{
+ GstWasapi2Client *self = GST_WASAPI2_CLIENT (object);
+
+ GST_DEBUG_OBJECT (self, "dispose");
+
+ gst_clear_caps (&self->supported_caps);
+
+ if (self->loop) {
+ g_main_loop_quit (self->loop);
+ g_thread_join (self->thread);
+ g_main_context_unref (self->context);
+ g_main_loop_unref (self->loop);
+
+ self->thread = NULL;
+ self->context = NULL;
+ self->loop = NULL;
+ }
+
+ g_clear_object (&self->adapter);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_wasapi2_client_finalize (GObject * object)
+{
+ GstWasapi2Client *self = GST_WASAPI2_CLIENT (object);
+
+ g_free (self->device_id);
+ g_free (self->device_name);
+
+ g_free (self->positions);
+
+ CoTaskMemFree (self->mix_format);
+ CloseHandle (self->event_handle);
+ CloseHandle (self->cancellable);
+
+ g_mutex_clear (&self->lock);
+ g_cond_clear (&self->cond);
+
+ g_mutex_clear (&self->init_lock);
+ g_cond_clear (&self->init_cond);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_wasapi2_client_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstWasapi2Client *self = GST_WASAPI2_CLIENT (object);
+
+ switch (prop_id) {
+ case PROP_DEVICE:
+ g_value_set_string (value, self->device_id);
+ break;
+ case PROP_DEVICE_NAME:
+ g_value_set_string (value, self->device_name);
+ break;
+ case PROP_DEVICE_INDEX:
+ g_value_set_int (value, self->device_index);
+ break;
+ case PROP_DEVICE_CLASS:
+ g_value_set_enum (value, self->device_class);
+ break;
+ case PROP_LOW_LATENCY:
+ g_value_set_boolean (value, self->low_latency);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_wasapi2_client_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstWasapi2Client *self = GST_WASAPI2_CLIENT (object);
+
+ switch (prop_id) {
+ case PROP_DEVICE:
+ g_free (self->device_id);
+ self->device_id = g_value_dup_string (value);
+ break;
+ case PROP_DEVICE_NAME:
+ g_free (self->device_name);
+ self->device_name = g_value_dup_string (value);
+ break;
+ case PROP_DEVICE_INDEX:
+ self->device_index = g_value_get_int (value);
+ break;
+ case PROP_DEVICE_CLASS:
+ self->device_class =
+ (GstWasapi2ClientDeviceClass) g_value_get_enum (value);
+ break;
+ case PROP_LOW_LATENCY:
+ self->low_latency = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_wasapi2_client_main_loop_running_cb (GstWasapi2Client * self)
+{
+ GST_DEBUG_OBJECT (self, "Main loop running now");
+
+ g_mutex_lock (&self->lock);
+ g_cond_signal (&self->cond);
+ g_mutex_unlock (&self->lock);
+
+ return G_SOURCE_REMOVE;
+}
+
+static void
+gst_wasapi2_client_on_device_activated (GstWasapi2Client * self,
+ IAudioClient3 * audio_client)
+{
+ GST_INFO_OBJECT (self, "Device activated");
+
+ g_mutex_lock (&self->init_lock);
+ if (audio_client) {
+ audio_client->AddRef();
+ self->audio_client = audio_client;
+ }
+ self->init_done = TRUE;
+ g_cond_broadcast (&self->init_cond);
+ g_mutex_unlock (&self->init_lock);
+}
+
+static std::string
+convert_wstring_to_string (const std::wstring &wstr)
+{
+ std::wstring_convert<std::codecvt_utf8<wchar_t>, wchar_t> converter;
+
+ return converter.to_bytes (wstr.c_str());
+}
+
+static std::string
+convert_hstring_to_string (HString * hstr)
+{
+ const wchar_t *raw_hstr;
+
+ if (!hstr)
+ return std::string();
+
+ raw_hstr = hstr->GetRawBuffer (nullptr);
+ if (!raw_hstr)
+ return std::string();
+
+ return convert_wstring_to_string (std::wstring (raw_hstr));
+}
+
+static std::wstring
+gst_wasapi2_client_get_default_device_id (GstWasapi2Client * self)
+{
+ HRESULT hr;
+ PWSTR default_device_id_wstr = nullptr;
+
+ if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE)
+ hr = StringFromIID (DEVINTERFACE_AUDIO_CAPTURE, &default_device_id_wstr);
+ else
+ hr = StringFromIID (DEVINTERFACE_AUDIO_RENDER, &default_device_id_wstr);
+
+ if (!gst_wasapi2_result (hr))
+ return std::wstring();
+
+ std::wstring ret = std::wstring (default_device_id_wstr);
+ CoTaskMemFree (default_device_id_wstr);
+
+ return ret;
+}
+
+static void
+gst_wasapi2_client_thread_func_internal (GstWasapi2Client * self)
+{
+ HRESULT hr;
+ GSource *source;
+ ComPtr<GstWasapiDeviceActivator> activator;
+ ComPtr<IDeviceInformationStatics> device_info_static;
+ ComPtr<IAsyncOperation<DeviceInformationCollection*>> async_op;
+ ComPtr<IVectorView<DeviceInformation*>> device_list;
+ HStringReference hstr_device_info =
+ HStringReference(RuntimeClass_Windows_Devices_Enumeration_DeviceInformation);
+ DeviceClass device_class;
+ unsigned int count = 0;
+ gint device_index = 0;
+ std::wstring default_device_id_wstring;
+ std::string default_device_id;
+ std::wstring target_device_id_wstring;
+ std::string target_device_id;
+ std::string target_device_name;
+ gboolean use_default_device = FALSE;
+
+ g_main_context_push_thread_default (self->context);
+
+ GST_INFO_OBJECT (self,
+ "requested device info, device-class: %s, device: %s, device-index: %d",
+ self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE ? "capture" :
+ "render", GST_STR_NULL (self->device_id), self->device_index);
+
+ hr = MakeAndInitialize<GstWasapiDeviceActivator> (&activator, self);
+ if (!gst_wasapi2_result (hr))
+ goto run_loop;
+
+ if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE) {
+ device_class = DeviceClass::DeviceClass_AudioCapture;
+ } else {
+ device_class = DeviceClass::DeviceClass_AudioRender;
+ }
+
+ default_device_id_wstring = gst_wasapi2_client_get_default_device_id (self);
+ if (default_device_id_wstring.empty ()) {
+ GST_WARNING_OBJECT (self, "Couldn't get default device id");
+ goto run_loop;
+ }
+
+ default_device_id = convert_wstring_to_string (default_device_id_wstring);
+ GST_DEBUG_OBJECT (self, "Default device id: %s", default_device_id.c_str ());
+
+ /* When
+ * 1) default device was requested or
+ * 2) no explicitly requested device or
+ * 3) requested device string id is null but device index is zero
+ * will use default device
+ *
+ * Note that default device is much preferred
+ * See https://docs.microsoft.com/en-us/windows/win32/coreaudio/automatic-stream-routing
+ */
+ if (self->device_id &&
+ g_ascii_strcasecmp (self->device_id, default_device_id.c_str()) == 0) {
+ GST_DEBUG_OBJECT (self, "Default device was requested");
+ use_default_device = TRUE;
+ } else if (self->device_index < 0 && !self->device_id) {
+ GST_DEBUG_OBJECT (self,
+ "No device was explicitly requested, use default device");
+ use_default_device = TRUE;
+ } else if (!self->device_id && self->device_index == 0) {
+ GST_DEBUG_OBJECT (self, "device-index == zero means default device");
+ use_default_device = TRUE;
+ }
+
+ if (use_default_device) {
+ target_device_id_wstring = default_device_id_wstring;
+ target_device_id = default_device_id;
+ if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE)
+ target_device_name = "Default Audio Capture Device";
+ else
+ target_device_name = "Default Audio Render Device";
+ goto activate;
+ }
+
+ hr = GetActivationFactory (hstr_device_info.Get(), &device_info_static);
+ if (!gst_wasapi2_result (hr))
+ goto run_loop;
+
+ hr = device_info_static->FindAllAsyncDeviceClass (device_class, &async_op);
+ device_info_static.Reset ();
+ if (!gst_wasapi2_result (hr))
+ goto run_loop;
+
+ hr = SyncWait<DeviceInformationCollection*>(async_op.Get ());
+ if (!gst_wasapi2_result (hr))
+ goto run_loop;
+
+ hr = async_op->GetResults (&device_list);
+ async_op.Reset ();
+ if (!gst_wasapi2_result (hr))
+ goto run_loop;
+
+ hr = device_list->get_Size (&count);
+ if (!gst_wasapi2_result (hr))
+ goto run_loop;
+
+ if (count == 0) {
+ GST_WARNING_OBJECT (self, "No available device");
+ goto run_loop;
+ }
+
+ /* device_index 0 will be assigned for default device
+ * so the number of available device is count + 1 (for default device) */
+ if (self->device_index >= 0 && self->device_index > (gint) count) {
+ GST_WARNING_OBJECT (self, "Device index %d is unavailable");
+ goto run_loop;
+ }
+
+ GST_DEBUG_OBJECT (self, "Available device count: %d", count);
+
+ /* zero is for default device */
+ device_index = 1;
+ for (unsigned int i = 0; i < count; i++) {
+ ComPtr<IDeviceInformation> device_info;
+ HString id;
+ HString name;
+ boolean b_value;
+ std::string cur_device_id;
+ std::string cur_device_name;
+
+ hr = device_list->GetAt (i, &device_info);
+ if (!gst_wasapi2_result (hr))
+ continue;
+
+ hr = device_info->get_IsEnabled (&b_value);
+ if (!gst_wasapi2_result (hr))
+ continue;
+
+ /* select only enabled device */
+ if (!b_value) {
+ GST_DEBUG_OBJECT (self, "Device index %d is disabled", i);
+ continue;
+ }
+
+ /* To ensure device id and device name are available,
+ * will query this later again once target device is determined */
+ hr = device_info->get_Id (id.GetAddressOf());
+ if (!gst_wasapi2_result (hr))
+ continue;
+
+ if (!id.IsValid()) {
+ GST_WARNING_OBJECT (self, "Device index %d has invalid id", i);
+ continue;
+ }
+
+ hr = device_info->get_Name (name.GetAddressOf());
+ if (!gst_wasapi2_result (hr))
+ continue;
+
+ if (!name.IsValid ()) {
+ GST_WARNING_OBJECT (self, "Device index %d has invalid name", i);
+ continue;
+ }
+
+ cur_device_id = convert_hstring_to_string (&id);
+ if (cur_device_id.empty ()) {
+ GST_WARNING_OBJECT (self, "Device index %d has empty id", i);
+ continue;
+ }
+
+ cur_device_name = convert_hstring_to_string (&name);
+ if (cur_device_name.empty ()) {
+ GST_WARNING_OBJECT (self, "Device index %d has empty device name", i);
+ continue;
+ }
+
+ GST_DEBUG_OBJECT (self, "device [%d] id: %s, name: %s",
+ device_index, cur_device_id.c_str(), cur_device_name.c_str());
+
+ if (self->device_id &&
+ g_ascii_strcasecmp (self->device_id, cur_device_id.c_str ()) == 0) {
+ GST_INFO_OBJECT (self,
+ "Device index %d has matching device id %s", device_index,
+ cur_device_id.c_str ());
+ target_device_id_wstring = id.GetRawBuffer (nullptr);
+ target_device_id = cur_device_id;
+ target_device_name = cur_device_name;
+ break;
+ }
+
+ if (self->device_index >= 0 && self->device_index == device_index) {
+ GST_INFO_OBJECT (self, "Select device index %d, device id %s",
+ device_index, cur_device_id.c_str ());
+ target_device_id_wstring = id.GetRawBuffer (nullptr);
+ target_device_id = cur_device_id;
+ target_device_name = cur_device_name;
+ break;
+ }
+
+ /* count only available devices */
+ device_index++;
+ }
+
+ if (target_device_id_wstring.empty ()) {
+ GST_WARNING_OBJECT (self, "Couldn't find target device");
+ goto run_loop;
+ }
+
+activate:
+ /* fill device id and name */
+ g_free (self->device_id);
+ self->device_id = g_strdup (target_device_id.c_str());
+
+ g_free (self->device_name);
+ self->device_name = g_strdup (target_device_name.c_str ());
+
+ self->device_index = device_index;
+
+ hr = activator->ActivateDeviceAsync (target_device_id_wstring);
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING_OBJECT (self, "Failed to activate device");
+ goto run_loop;
+ }
+
+ /* Wait ActivateCompleted event */
+ GST_DEBUG_OBJECT (self, "Wait device activation");
+ g_mutex_lock (&self->init_lock);
+ while (!self->init_done)
+ g_cond_wait (&self->init_cond, &self->init_lock);
+ g_mutex_unlock (&self->init_lock);
+ GST_DEBUG_OBJECT (self, "Done device activation");
+
+run_loop:
+ source = g_idle_source_new ();
+ g_source_set_callback (source,
+ (GSourceFunc) gst_wasapi2_client_main_loop_running_cb, self, NULL);
+ g_source_attach (source, self->context);
+ g_source_unref (source);
+
+ GST_DEBUG_OBJECT (self, "Starting main loop");
+ g_main_loop_run (self->loop);
+ GST_DEBUG_OBJECT (self, "Stopped main loop");
+
+ g_main_context_pop_thread_default (self->context);
+
+ gst_wasapi2_client_stop (self);
+
+ if (self->audio_volume) {
+ self->audio_volume->Release ();
+ self->audio_volume = NULL;
+ }
+
+ if (self->audio_render_client) {
+ self->audio_render_client->Release ();
+ self->audio_render_client = NULL;
+ }
+
+ if (self->audio_capture_client) {
+ self->audio_capture_client->Release ();
+ self->audio_capture_client = NULL;
+ }
+
+ if (self->audio_client) {
+ self->audio_client->Release ();
+ self->audio_client = NULL;
+ }
+
+ GST_DEBUG_OBJECT (self, "Exit thread function");
+
+ return;
+}
+
+static gpointer
+gst_wasapi2_client_thread_func (GstWasapi2Client * self)
+{
+ RoInitializeWrapper initialize (RO_INIT_MULTITHREADED);
+
+ /* Wrap thread function so that ensure everything happens inside of
+ * RoInitializeWrapper */
+ gst_wasapi2_client_thread_func_internal (self);
+
+ return NULL;
+}
+
+static const gchar *
+gst_waveformatex_to_audio_format (WAVEFORMATEXTENSIBLE * format)
+{
+ const gchar *fmt_str = NULL;
+ GstAudioFormat fmt = GST_AUDIO_FORMAT_UNKNOWN;
+
+ if (format->Format.wFormatTag == WAVE_FORMAT_PCM) {
+ fmt = gst_audio_format_build_integer (TRUE, G_LITTLE_ENDIAN,
+ format->Format.wBitsPerSample, format->Format.wBitsPerSample);
+ } else if (format->Format.wFormatTag == WAVE_FORMAT_IEEE_FLOAT) {
+ if (format->Format.wBitsPerSample == 32)
+ fmt = GST_AUDIO_FORMAT_F32LE;
+ else if (format->Format.wBitsPerSample == 64)
+ fmt = GST_AUDIO_FORMAT_F64LE;
+ } else if (format->Format.wFormatTag == WAVE_FORMAT_EXTENSIBLE) {
+ if (IsEqualGUID (format->SubFormat, KSDATAFORMAT_SUBTYPE_PCM)) {
+ fmt = gst_audio_format_build_integer (TRUE, G_LITTLE_ENDIAN,
+ format->Format.wBitsPerSample, format->Samples.wValidBitsPerSample);
+ } else if (IsEqualGUID (format->SubFormat,
+ KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)) {
+ if (format->Format.wBitsPerSample == 32
+ && format->Samples.wValidBitsPerSample == 32)
+ fmt = GST_AUDIO_FORMAT_F32LE;
+ else if (format->Format.wBitsPerSample == 64 &&
+ format->Samples.wValidBitsPerSample == 64)
+ fmt = GST_AUDIO_FORMAT_F64LE;
+ }
+ }
+
+ if (fmt != GST_AUDIO_FORMAT_UNKNOWN)
+ fmt_str = gst_audio_format_to_string (fmt);
+
+ return fmt_str;
+}
+
+static void
+gst_wasapi_util_channel_position_all_none (guint channels,
+ GstAudioChannelPosition * position)
+{
+ int ii;
+ for (ii = 0; ii < channels; ii++)
+ position[ii] = GST_AUDIO_CHANNEL_POSITION_NONE;
+}
+
+static struct
+{
+ guint64 wasapi_pos;
+ GstAudioChannelPosition gst_pos;
+} wasapi_to_gst_pos[] = {
+ {SPEAKER_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT},
+ {SPEAKER_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT},
+ {SPEAKER_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER},
+ {SPEAKER_LOW_FREQUENCY, GST_AUDIO_CHANNEL_POSITION_LFE1},
+ {SPEAKER_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_REAR_LEFT},
+ {SPEAKER_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT},
+ {SPEAKER_FRONT_LEFT_OF_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER},
+ {SPEAKER_FRONT_RIGHT_OF_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER},
+ {SPEAKER_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_REAR_CENTER},
+ /* Enum values diverge from this point onwards */
+ {SPEAKER_SIDE_LEFT, GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT},
+ {SPEAKER_SIDE_RIGHT, GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT},
+ {SPEAKER_TOP_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_CENTER},
+ {SPEAKER_TOP_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_LEFT},
+ {SPEAKER_TOP_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_CENTER},
+ {SPEAKER_TOP_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_RIGHT},
+ {SPEAKER_TOP_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_LEFT},
+ {SPEAKER_TOP_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER},
+ {SPEAKER_TOP_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT}
+};
+
+/* Parse WAVEFORMATEX to get the gstreamer channel mask, and the wasapi channel
+ * positions so GstAudioRingbuffer can reorder the audio data to match the
+ * gstreamer channel order. */
+static guint64
+gst_wasapi_util_waveformatex_to_channel_mask (WAVEFORMATEXTENSIBLE * format,
+ GstAudioChannelPosition ** out_position)
+{
+ int ii, ch;
+ guint64 mask = 0;
+ WORD nChannels = format->Format.nChannels;
+ DWORD dwChannelMask = format->dwChannelMask;
+ GstAudioChannelPosition *pos = NULL;
+
+ pos = g_new (GstAudioChannelPosition, nChannels);
+ gst_wasapi_util_channel_position_all_none (nChannels, pos);
+
+ /* Too many channels, have to assume that they are all non-positional */
+ if (nChannels > G_N_ELEMENTS (wasapi_to_gst_pos)) {
+ GST_INFO ("Got too many (%i) channels, assuming non-positional", nChannels);
+ goto out;
+ }
+
+ /* Too many bits in the channel mask, and the bits don't match nChannels */
+ if (dwChannelMask >> (G_N_ELEMENTS (wasapi_to_gst_pos) + 1) != 0) {
+ GST_WARNING ("Too many bits in channel mask (%lu), assuming "
+ "non-positional", dwChannelMask);
+ goto out;
+ }
+
+ /* Map WASAPI's channel mask to Gstreamer's channel mask and positions.
+ * If the no. of bits in the mask > nChannels, we will ignore the extra. */
+ for (ii = 0, ch = 0; ii < G_N_ELEMENTS (wasapi_to_gst_pos) && ch < nChannels;
+ ii++) {
+ if (!(dwChannelMask & wasapi_to_gst_pos[ii].wasapi_pos))
+ /* no match, try next */
+ continue;
+ mask |= G_GUINT64_CONSTANT (1) << wasapi_to_gst_pos[ii].gst_pos;
+ pos[ch++] = wasapi_to_gst_pos[ii].gst_pos;
+ }
+
+ /* XXX: Warn if some channel masks couldn't be mapped? */
+
+ GST_DEBUG ("Converted WASAPI mask 0x%" G_GINT64_MODIFIER "x -> 0x%"
+ G_GINT64_MODIFIER "x", (guint64) dwChannelMask, (guint64) mask);
+
+out:
+ if (out_position)
+ *out_position = pos;
+ return mask;
+}
+
+static gboolean
+gst_wasapi2_util_parse_waveformatex (WAVEFORMATEXTENSIBLE * format,
+ GstCaps * template_caps, GstCaps ** out_caps,
+ GstAudioChannelPosition ** out_positions)
+{
+ int ii;
+ const gchar *afmt;
+ guint64 channel_mask;
+
+ *out_caps = NULL;
+
+ /* TODO: handle SPDIF and other encoded formats */
+
+ /* 1 or 2 channels <= 16 bits sample size OR
+ * 1 or 2 channels > 16 bits sample size or >2 channels */
+ if (format->Format.wFormatTag != WAVE_FORMAT_PCM &&
+ format->Format.wFormatTag != WAVE_FORMAT_IEEE_FLOAT &&
+ format->Format.wFormatTag != WAVE_FORMAT_EXTENSIBLE)
+ /* Unhandled format tag */
+ return FALSE;
+
+ /* WASAPI can only tell us one canonical mix format that it will accept. The
+ * alternative is calling IsFormatSupported on all combinations of formats.
+ * Instead, it's simpler and faster to require conversion inside gstreamer */
+ afmt = gst_waveformatex_to_audio_format (format);
+ if (afmt == NULL)
+ return FALSE;
+
+ *out_caps = gst_caps_copy (template_caps);
+
+ /* This will always return something that might be usable */
+ channel_mask =
+ gst_wasapi_util_waveformatex_to_channel_mask (format, out_positions);
+
+ for (ii = 0; ii < gst_caps_get_size (*out_caps); ii++) {
+ GstStructure *s = gst_caps_get_structure (*out_caps, ii);
+
+ gst_structure_set (s,
+ "format", G_TYPE_STRING, afmt,
+ "channels", G_TYPE_INT, format->Format.nChannels,
+ "rate", G_TYPE_INT, format->Format.nSamplesPerSec, NULL);
+
+ if (channel_mask) {
+ gst_structure_set (s,
+ "channel-mask", GST_TYPE_BITMASK, channel_mask, NULL);
+ }
+ }
+
+ return TRUE;
+}
+
+GstCaps *
+gst_wasapi2_client_get_caps (GstWasapi2Client * client)
+{
+ WAVEFORMATEX *format = NULL;
+ static GstStaticCaps static_caps = GST_STATIC_CAPS (GST_WASAPI2_STATIC_CAPS);
+ GstCaps *scaps;
+ HRESULT hr;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), NULL);
+
+ if (client->supported_caps)
+ return gst_caps_ref (client->supported_caps);
+
+ if (!client->audio_client) {
+ GST_WARNING_OBJECT (client, "IAudioClient3 wasn't configured");
+ return NULL;
+ }
+
+ CoTaskMemFree (client->mix_format);
+ client->mix_format = nullptr;
+
+ g_clear_pointer (&client->positions, g_free);
+
+ hr = client->audio_client->GetMixFormat (&format);
+ if (!gst_wasapi2_result (hr))
+ return NULL;
+
+ scaps = gst_static_caps_get (&static_caps);
+ gst_wasapi2_util_parse_waveformatex ((WAVEFORMATEXTENSIBLE *) format,
+ scaps, &client->supported_caps, &client->positions);
+ gst_caps_unref (scaps);
+
+ client->mix_format = format;
+
+ if (!client->supported_caps) {
+ GST_ERROR_OBJECT (client, "No caps from subclass");
+ return NULL;
+ }
+
+ return gst_caps_ref (client->supported_caps);
+}
+
+static gboolean
+gst_wasapi2_client_initialize_audio_client3 (GstWasapi2Client * self)
+{
+ HRESULT hr;
+ UINT32 default_period, fundamental_period, min_period, max_period;
+ DWORD stream_flags = AUDCLNT_STREAMFLAGS_EVENTCALLBACK;
+ WAVEFORMATEX *format = NULL;
+ UINT32 period;
+ gboolean ret = FALSE;
+ IAudioClient3 *audio_client = self->audio_client;
+
+ hr = audio_client->GetSharedModeEnginePeriod (self->mix_format,
+ &default_period, &fundamental_period, &min_period, &max_period);
+ if (!gst_wasapi2_result (hr))
+ goto done;
+
+ GST_INFO_OBJECT (self, "Using IAudioClient3, default period %d frames, "
+ "fundamental period %d frames, minimum period %d frames, maximum period "
+ "%d frames", default_period, fundamental_period, min_period, max_period);
+
+ hr = audio_client->InitializeSharedAudioStream (stream_flags, min_period,
+ self->mix_format, nullptr);
+
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING_OBJECT (self, "Failed to initialize IAudioClient3");
+ goto done;
+ }
+
+ /* query period again to be ensured */
+ hr = audio_client->GetCurrentSharedModeEnginePeriod (&format, &period);
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING_OBJECT (self, "Failed to get current period");
+ goto done;
+ }
+
+ self->device_period = period;
+ ret = TRUE;
+
+done:
+ CoTaskMemFree (format);
+
+ return ret;
+}
+
+static void
+gst_wasapi2_util_get_best_buffer_sizes (GstAudioRingBufferSpec * spec,
+ REFERENCE_TIME default_period, REFERENCE_TIME min_period,
+ REFERENCE_TIME * ret_period, REFERENCE_TIME * ret_buffer_duration)
+{
+ REFERENCE_TIME use_period, use_buffer;
+
+ /* Shared mode always runs at the default period, so if we want a larger
+ * period (for lower CPU usage), we do it as a multiple of that */
+ use_period = default_period;
+
+ /* Ensure that the period (latency_time) used is an integral multiple of
+ * either the default period or the minimum period */
+ use_period = use_period * MAX ((spec->latency_time * 10) / use_period, 1);
+
+ /* Ask WASAPI to create a software ringbuffer of at least this size; it may
+ * be larger so the actual buffer time may be different, which is why after
+ * initialization we read the buffer duration actually in-use and set
+ * segsize/segtotal from that. */
+ use_buffer = spec->buffer_time * 10;
+ /* Has to be at least twice the period */
+ if (use_buffer < 2 * use_period)
+ use_buffer = 2 * use_period;
+
+ *ret_period = use_period;
+ *ret_buffer_duration = use_buffer;
+}
+
+static gboolean
+gst_wasapi2_client_initialize_audio_client (GstWasapi2Client * self,
+ GstAudioRingBufferSpec * spec)
+{
+ REFERENCE_TIME default_period, min_period;
+ REFERENCE_TIME device_period, device_buffer_duration;
+ guint rate;
+ DWORD stream_flags = AUDCLNT_STREAMFLAGS_EVENTCALLBACK;
+ HRESULT hr;
+ IAudioClient3 *audio_client = self->audio_client;
+
+ hr = audio_client->GetDevicePeriod (&default_period, &min_period);
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING_OBJECT (self, "Couldn't get device period info");
+ return FALSE;
+ }
+
+ GST_INFO_OBJECT (self, "wasapi2 default period: %" G_GINT64_FORMAT
+ ", min period: %" G_GINT64_FORMAT, default_period, min_period);
+
+ rate = GST_AUDIO_INFO_RATE (&spec->info);
+
+ /* Clamp values to integral multiples of an appropriate period */
+ gst_wasapi2_util_get_best_buffer_sizes (spec,
+ default_period, min_period, &device_period, &device_buffer_duration);
+
+ hr = audio_client->Initialize (AUDCLNT_SHAREMODE_SHARED, stream_flags,
+ device_buffer_duration, 0, self->mix_format, nullptr);
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING_OBJECT (self, "Couldn't initialize audioclient");
+ return FALSE;
+ }
+
+ /* device_period can be a non-power-of-10 value so round while converting */
+ self->device_period =
+ gst_util_uint64_scale_round (device_period, rate * 100, GST_SECOND);
+
+ return TRUE;
+}
+
+gboolean
+gst_wasapi2_client_open (GstWasapi2Client * client, GstAudioRingBufferSpec * spec,
+ GstAudioRingBuffer * buf)
+{
+ HRESULT hr;
+ REFERENCE_TIME latency_rt;
+ guint bpf, rate;
+ IAudioClient3 *audio_client;
+ ComPtr<ISimpleAudioVolume> audio_volume;
+ gboolean initialized = FALSE;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
+
+ /* FIXME: Once IAudioClient3 was initialized, we may need to re-open
+ * IAudioClient3 in order to handle audio format change */
+ if (client->opened) {
+ GST_INFO_OBJECT (client, "IAudioClient3 object is initialized already");
+ return TRUE;
+ }
+
+ audio_client = client->audio_client;
+
+ if (!audio_client) {
+ GST_ERROR_OBJECT (client, "IAudioClient3 object wasn't configured");
+ return FALSE;
+ }
+
+ if (!client->mix_format) {
+ GST_ERROR_OBJECT (client, "Unknown mix format");
+ return FALSE;
+ }
+
+ /* Only use audioclient3 when low-latency is requested because otherwise
+ * very slow machines and VMs with 1 CPU allocated will get glitches:
+ * https://bugzilla.gnome.org/show_bug.cgi?id=794497 */
+ if (client->low_latency)
+ initialized = gst_wasapi2_client_initialize_audio_client3 (client);
+ else
+ initialized = gst_wasapi2_client_initialize_audio_client (client, spec);
+
+ if (!initialized) {
+ GST_ERROR_OBJECT (client, "Failed to initialize audioclient");
+ return FALSE;
+ }
+
+ bpf = GST_AUDIO_INFO_BPF (&spec->info);
+ rate = GST_AUDIO_INFO_RATE (&spec->info);
+
+ /* Total size in frames of the allocated buffer that we will read from */
+ hr = audio_client->GetBufferSize (&client->buffer_frame_count);
+ if (!gst_wasapi2_result (hr)) {
+ return FALSE;
+ }
+
+ GST_INFO_OBJECT (client, "buffer size is %i frames, device period is %i "
+ "frames, bpf is %i bytes, rate is %i Hz", client->buffer_frame_count,
+ client->device_period, bpf, rate);
+
+ /* Actual latency-time/buffer-time will be different now */
+ spec->segsize = client->device_period * bpf;
+
+ /* We need a minimum of 2 segments to ensure glitch-free playback */
+ spec->segtotal = MAX (client->buffer_frame_count * bpf / spec->segsize, 2);
+
+ GST_INFO_OBJECT (client, "segsize is %i, segtotal is %i", spec->segsize,
+ spec->segtotal);
+
+ /* Get WASAPI latency for logging */
+ hr = audio_client->GetStreamLatency (&latency_rt);
+ if (!gst_wasapi2_result (hr)) {
+ return FALSE;
+ }
+
+ GST_INFO_OBJECT (client, "wasapi2 stream latency: %" G_GINT64_FORMAT " (%"
+ G_GINT64_FORMAT " ms)", latency_rt, latency_rt / 10000);
+
+ /* Set the event handler which will trigger read/write */
+ hr = audio_client->SetEventHandle (client->event_handle);
+ if (!gst_wasapi2_result (hr))
+ return FALSE;
+
+ if (client->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
+ ComPtr<IAudioRenderClient> render_client;
+
+ hr = audio_client->GetService (IID_PPV_ARGS (&render_client));
+ if (!gst_wasapi2_result (hr))
+ return FALSE;
+
+ client->audio_render_client = render_client.Detach ();
+ } else {
+ ComPtr<IAudioCaptureClient> capture_client;
+
+ hr = audio_client->GetService (IID_PPV_ARGS (&capture_client));
+ if (!gst_wasapi2_result (hr))
+ return FALSE;
+
+ client->audio_capture_client = capture_client.Detach ();
+ }
+
+ hr = audio_client->GetService (IID_PPV_ARGS (&audio_volume));
+ if (!gst_wasapi2_result (hr))
+ return FALSE;
+
+ client->audio_volume = audio_volume.Detach ();
+
+ gst_audio_ring_buffer_set_channel_positions (buf, client->positions);
+
+ client->opened = TRUE;
+
+ return TRUE;
+}
+
+/* Get the empty space in the buffer that we have to write to */
+static gint
+gst_wasapi2_client_get_can_frames (GstWasapi2Client * self)
+{
+ HRESULT hr;
+ UINT32 n_frames_padding;
+ IAudioClient3 *audio_client = self->audio_client;
+
+ if (!audio_client) {
+ GST_WARNING_OBJECT (self, "IAudioClient3 wasn't configured");
+ return -1;
+ }
+
+ /* Frames the card hasn't rendered yet */
+ hr = audio_client->GetCurrentPadding (&n_frames_padding);
+ if (!gst_wasapi2_result (hr))
+ return -1;
+
+ GST_LOG_OBJECT (self, "%d unread frames (padding)", n_frames_padding);
+
+ /* We can write out these many frames */
+ return self->buffer_frame_count - n_frames_padding;
+}
+
+gboolean
+gst_wasapi2_client_start (GstWasapi2Client * client)
+{
+ HRESULT hr;
+ IAudioClient3 *audio_client;
+ WAVEFORMATEX *mix_format;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
+
+ audio_client = client->audio_client;
+ mix_format = client->mix_format;
+
+ if (!audio_client) {
+ GST_ERROR_OBJECT (client, "IAudioClient3 object wasn't configured");
+ return FALSE;
+ }
+
+ if (!mix_format) {
+ GST_ERROR_OBJECT (client, "Unknown MixFormat");
+ return FALSE;
+ }
+
+ if (client->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE &&
+ !client->audio_capture_client) {
+ GST_ERROR_OBJECT (client, "IAudioCaptureClient wasn't configured");
+ return FALSE;
+ }
+
+ if (client->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER &&
+ !client->audio_render_client) {
+ GST_ERROR_OBJECT (client, "IAudioRenderClient wasn't configured");
+ return FALSE;
+ }
+
+ ResetEvent (client->cancellable);
+
+ if (client->running) {
+ GST_WARNING_OBJECT (client, "IAudioClient3 is running already");
+ return TRUE;
+ }
+
+ /* To avoid start-up glitches, before starting the streaming, we fill the
+ * buffer with silence as recommended by the documentation:
+ * https://msdn.microsoft.com/en-us/library/windows/desktop/dd370879%28v=vs.85%29.aspx */
+ if (client->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
+ IAudioRenderClient *render_client = client->audio_render_client;
+ gint n_frames, len;
+ BYTE *dst = NULL;
+
+ n_frames = gst_wasapi2_client_get_can_frames (client);
+ if (n_frames < 1) {
+ GST_ERROR_OBJECT (client,
+ "should have more than %i frames to write", n_frames);
+ return FALSE;
+ }
+
+ len = n_frames * mix_format->nBlockAlign;
+
+ hr = render_client->GetBuffer (n_frames, &dst);
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (client, "Couldn't get buffer");
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (client, "pre-wrote %i bytes of silence", len);
+
+ hr = render_client->ReleaseBuffer (n_frames, AUDCLNT_BUFFERFLAGS_SILENT);
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (client, "Couldn't release buffer");
+ return FALSE;
+ }
+ }
+
+ hr = audio_client->Start ();
+ client->running = gst_wasapi2_result (hr);
+ gst_adapter_clear (client->adapter);
+
+ return client->running;
+}
+
+gboolean
+gst_wasapi2_client_stop (GstWasapi2Client * client)
+{
+ HRESULT hr;
+ IAudioClient3 *audio_client;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
+
+ audio_client = client->audio_client;
+
+ if (!client->running) {
+ GST_DEBUG_OBJECT (client, "We are not running now");
+ return TRUE;
+ }
+
+ if (!client->audio_client) {
+ GST_ERROR_OBJECT (client, "IAudioClient3 object wasn't configured");
+ return FALSE;
+ }
+
+ client->running = FALSE;
+ SetEvent (client->cancellable);
+
+ hr = audio_client->Stop ();
+ if (!gst_wasapi2_result (hr))
+ return FALSE;
+
+ /* reset state for reuse case */
+ hr = audio_client->Reset ();
+ return gst_wasapi2_result (hr);
+}
+
+gint
+gst_wasapi2_client_read (GstWasapi2Client * client, gpointer data, guint length)
+{
+ IAudioCaptureClient *capture_client;
+ WAVEFORMATEX *mix_format;
+ HRESULT hr;
+ BYTE *from = NULL;
+ guint wanted = length;
+ guint bpf;
+ DWORD flags;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
+ g_return_val_if_fail (client->audio_capture_client != NULL, -1);
+ g_return_val_if_fail (client->mix_format != NULL, -1);
+
+ capture_client = client->audio_capture_client;
+ mix_format = client->mix_format;
+
+ if (!client->running) {
+ GST_ERROR_OBJECT (client, "client is not running now");
+ return -1;
+ }
+
+ /* If we've accumulated enough data, return it immediately */
+ if (gst_adapter_available (client->adapter) >= wanted) {
+ memcpy (data, gst_adapter_map (client->adapter, wanted), wanted);
+ gst_adapter_flush (client->adapter, wanted);
+ GST_DEBUG_OBJECT (client, "Adapter has enough data, returning %i", wanted);
+ return wanted;
+ }
+
+ bpf = mix_format->nBlockAlign;
+
+ while (wanted > 0) {
+ DWORD dwWaitResult;
+ guint got_frames, avail_frames, n_frames, want_frames, read_len;
+ HANDLE event_handle[2];
+
+ event_handle[0] = client->event_handle;
+ event_handle[1] = client->cancellable;
+
+ /* Wait for data to become available */
+ dwWaitResult = WaitForMultipleObjects (2, event_handle, FALSE, INFINITE);
+ if (dwWaitResult != WAIT_OBJECT_0 && dwWaitResult != WAIT_OBJECT_0 + 1) {
+ GST_ERROR_OBJECT (client, "Error waiting for event handle: %x",
+ (guint) dwWaitResult);
+ return -1;
+ }
+
+ if (!client->running) {
+ GST_DEBUG_OBJECT (client, "Cancelled");
+ return -1;
+ }
+
+ hr = capture_client->GetBuffer (&from, &got_frames, &flags, nullptr,
+ nullptr);
+ if (!gst_wasapi2_result (hr)) {
+ if (hr == AUDCLNT_S_BUFFER_EMPTY) {
+ GST_INFO_OBJECT (client, "Client buffer is empty, retry");
+ return 0;
+ }
+
+ GST_ERROR_OBJECT (client, "Couldn't get buffer from capture client");
+ return -1;
+ }
+
+ if (got_frames == 0) {
+ GST_DEBUG_OBJECT (client, "No buffer to read");
+ capture_client->ReleaseBuffer (got_frames);
+ return 0;
+ }
+
+ if (G_UNLIKELY (flags != 0)) {
+ /* https://docs.microsoft.com/en-us/windows/win32/api/audioclient/ne-audioclient-_audclnt_bufferflags */
+ if (flags & AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY)
+ GST_DEBUG_OBJECT (client, "WASAPI reported discontinuity (glitch?)");
+ if (flags & AUDCLNT_BUFFERFLAGS_TIMESTAMP_ERROR)
+ GST_DEBUG_OBJECT (client, "WASAPI reported a timestamp error");
+ }
+
+ /* Copy all the frames we got into the adapter, and then extract at most
+ * @wanted size of frames from it. This helps when ::GetBuffer returns more
+ * data than we can handle right now. */
+ {
+ GstBuffer *tmp = gst_buffer_new_allocate (NULL, got_frames * bpf, NULL);
+ /* If flags has AUDCLNT_BUFFERFLAGS_SILENT, we will ignore the actual
+ * data and write out silence, see:
+ * https://docs.microsoft.com/en-us/windows/win32/api/audioclient/ne-audioclient-_audclnt_bufferflags */
+ if (flags & AUDCLNT_BUFFERFLAGS_SILENT)
+ memset (from, 0, got_frames * bpf);
+ gst_buffer_fill (tmp, 0, from, got_frames * bpf);
+ gst_adapter_push (client->adapter, tmp);
+ }
+
+ /* Release all captured buffers; we copied them above */
+ hr = capture_client->ReleaseBuffer (got_frames);
+ from = NULL;
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (client, "Failed to release buffer");
+ return -1;
+ }
+
+ want_frames = wanted / bpf;
+ avail_frames = gst_adapter_available (client->adapter) / bpf;
+
+ /* Only copy data that will fit into the allocated buffer of size @length */
+ n_frames = MIN (avail_frames, want_frames);
+ read_len = n_frames * bpf;
+
+ if (read_len == 0) {
+ GST_WARNING_OBJECT (client, "No data to read");
+ return 0;
+ }
+
+ GST_LOG_OBJECT (client, "frames captured: %d (%d bytes), "
+ "can read: %d (%d bytes), will read: %d (%d bytes), "
+ "adapter has: %d (%d bytes)", got_frames, got_frames * bpf, want_frames,
+ wanted, n_frames, read_len, avail_frames, avail_frames * bpf);
+
+ memcpy (data, gst_adapter_map (client->adapter, read_len), read_len);
+ gst_adapter_flush (client->adapter, read_len);
+ wanted -= read_len;
+ }
+
+ return length;
+}
+
+gint
+gst_wasapi2_client_write (GstWasapi2Client * client, gpointer data,
+ guint length)
+{
+ IAudioRenderClient *render_client;
+ WAVEFORMATEX *mix_format;
+ HRESULT hr;
+ BYTE *dst = nullptr;
+ DWORD dwWaitResult;
+ guint can_frames, have_frames, n_frames, write_len = 0;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), -1);
+ g_return_val_if_fail (client->audio_render_client != NULL, -1);
+ g_return_val_if_fail (client->mix_format != NULL, -1);
+
+ if (!client->running) {
+ GST_WARNING_OBJECT (client, "client is not running now");
+ return -1;
+ }
+
+ render_client = client->audio_render_client;
+ mix_format = client->mix_format;
+
+ /* We have N frames to be written out */
+ have_frames = length / (mix_format->nBlockAlign);
+
+ /* In shared mode we can write parts of the buffer, so only wait
+ * in case we can't write anything */
+ can_frames = gst_wasapi2_client_get_can_frames (client);
+ if (can_frames < 0) {
+ GST_ERROR_OBJECT (client, "Error getting frames to write to");
+ return -1;
+ }
+
+ if (can_frames == 0) {
+ HANDLE event_handle[2];
+
+ event_handle[0] = client->event_handle;
+ event_handle[1] = client->cancellable;
+
+ dwWaitResult = WaitForMultipleObjects (2, event_handle, FALSE, INFINITE);
+ if (dwWaitResult != WAIT_OBJECT_0 && dwWaitResult != WAIT_OBJECT_0 + 1) {
+ GST_ERROR_OBJECT (client, "Error waiting for event handle: %x",
+ (guint) dwWaitResult);
+ return -1;
+ }
+
+ if (!client->running) {
+ GST_DEBUG_OBJECT (client, "Cancelled");
+ return -1;
+ }
+
+ can_frames = gst_wasapi2_client_get_can_frames (client);
+ if (can_frames < 0) {
+ GST_ERROR_OBJECT (client, "Error getting frames to write to");
+ return -1;
+ }
+ }
+
+ /* We will write out these many frames, and this much length */
+ n_frames = MIN (can_frames, have_frames);
+ write_len = n_frames * mix_format->nBlockAlign;
+
+ GST_LOG_OBJECT (client, "total: %d, have_frames: %d (%d bytes), "
+ "can_frames: %d, will write: %d (%d bytes)", client->buffer_frame_count,
+ have_frames, length, can_frames, n_frames, write_len);
+
+ hr = render_client->GetBuffer (n_frames, &dst);
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (client, "Couldn't get buffer from client");
+ return -1;
+ }
+
+ memcpy (dst, data, write_len);
+ hr = render_client->ReleaseBuffer (n_frames, 0);
+
+ return write_len;
+}
+
+guint
+gst_wasapi2_client_delay (GstWasapi2Client * client)
+{
+ HRESULT hr;
+ UINT32 delay;
+ IAudioClient3 *audio_client;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), 0);
+
+ audio_client = client->audio_client;
+
+ if (!audio_client) {
+ GST_WARNING_OBJECT (client, "IAudioClient3 wasn't configured");
+ return 0;
+ }
+
+ hr = audio_client->GetCurrentPadding (&delay);
+ if (!gst_wasapi2_result (hr))
+ return 0;
+
+ return delay;
+}
+
+gboolean
+gst_wasapi2_client_set_mute (GstWasapi2Client * client, gboolean mute)
+{
+ HRESULT hr;
+ ISimpleAudioVolume *audio_volume;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
+
+ audio_volume = client->audio_volume;
+
+ if (!audio_volume) {
+ GST_WARNING_OBJECT (client, "ISimpleAudioVolume object wasn't configured");
+ return FALSE;
+ }
+
+ hr = audio_volume->SetMute (mute, nullptr);
+ GST_DEBUG_OBJECT (client, "Set mute %s, hr: 0x%x",
+ mute ? "enabled" : "disabled", (gint) hr);
+
+ return gst_wasapi2_result (hr);
+}
+
+gboolean
+gst_wasapi2_client_get_mute (GstWasapi2Client * client, gboolean * mute)
+{
+ HRESULT hr;
+ ISimpleAudioVolume *audio_volume;
+ BOOL current_mute = FALSE;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
+ g_return_val_if_fail (mute != NULL, FALSE);
+
+ audio_volume = client->audio_volume;
+
+ if (!audio_volume) {
+ GST_WARNING_OBJECT (client, "ISimpleAudioVolume object wasn't configured");
+ return FALSE;
+ }
+
+ hr = audio_volume->GetMute (&current_mute);
+ if (!gst_wasapi2_result (hr))
+ return FALSE;
+
+ *mute = (gboolean) current_mute;
+
+ return TRUE;
+}
+
+gboolean
+gst_wasapi2_client_set_volume (GstWasapi2Client * client, gfloat volume)
+{
+ HRESULT hr;
+ ISimpleAudioVolume *audio_volume;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
+ g_return_val_if_fail (volume >= 0 && volume <= 1.0, FALSE);
+
+ audio_volume = client->audio_volume;
+
+ if (!audio_volume) {
+ GST_WARNING_OBJECT (client, "ISimpleAudioVolume object wasn't configured");
+ return FALSE;
+ }
+
+ hr = audio_volume->SetMasterVolume (volume, nullptr);
+ GST_DEBUG_OBJECT (client, "Set volume %.2f hr: 0x%x", volume, (gint) hr);
+
+ return gst_wasapi2_result (hr);
+}
+
+gboolean
+gst_wasapi2_client_get_volume (GstWasapi2Client * client, gfloat * volume)
+{
+ HRESULT hr;
+ ISimpleAudioVolume *audio_volume;
+ float current_volume = FALSE;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), FALSE);
+ g_return_val_if_fail (volume != NULL, FALSE);
+
+ audio_volume = client->audio_volume;
+
+ if (!audio_volume) {
+ GST_WARNING_OBJECT (client, "ISimpleAudioVolume object wasn't configured");
+ return FALSE;
+ }
+
+ hr = audio_volume->GetMasterVolume (&current_volume);
+ if (!gst_wasapi2_result (hr))
+ return FALSE;
+
+ *volume = current_volume;
+
+ return TRUE;
+}
+
+GstWasapi2Client *
+gst_wasapi2_client_new (GstWasapi2ClientDeviceClass device_class,
+ gboolean low_latency, gint device_index, const gchar * device_id)
+{
+ GstWasapi2Client *self;
+
+ self = (GstWasapi2Client *) g_object_new (GST_TYPE_WASAPI2_CLIENT,
+ "device-class", device_class, "low-latency", low_latency,
+ "device-index", device_index, "device", device_id, NULL);
+
+ if (!self->audio_client) {
+ gst_object_unref (self);
+ return NULL;
+ }
+
+ gst_object_ref_sink (self);
+
+ return self;
+}
diff --git a/sys/wasapi2/gstwasapi2client.h b/sys/wasapi2/gstwasapi2client.h
new file mode 100644
index 000000000..adc9125df
--- /dev/null
+++ b/sys/wasapi2/gstwasapi2client.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_WASAPI2_CLIENT_H__
+#define __GST_WASAPI2_CLIENT_H__
+
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+
+G_BEGIN_DECLS
+
+typedef enum
+{
+ GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE = 0,
+ GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER,
+} GstWasapi2ClientDeviceClass;
+
+#define GST_TYPE_WASAPI2_CLIENT_DEVICE_CLASS (gst_wasapi2_client_device_class_get_type())
+GType gst_wasapi2_client_device_class_get_type (void);
+
+#define GST_TYPE_WASAPI2_CLIENT (gst_wasapi2_client_get_type())
+G_DECLARE_FINAL_TYPE (GstWasapi2Client,
+ gst_wasapi2_client, GST, WASAPI2_CLIENT, GstObject);
+
+GstCaps * gst_wasapi2_client_get_caps (GstWasapi2Client * client);
+
+gboolean gst_wasapi2_client_open (GstWasapi2Client * client,
+ GstAudioRingBufferSpec * spec,
+ GstAudioRingBuffer * buf);
+
+gboolean gst_wasapi2_client_start (GstWasapi2Client * client);
+
+gboolean gst_wasapi2_client_stop (GstWasapi2Client * client);
+
+gint gst_wasapi2_client_read (GstWasapi2Client * client,
+ gpointer data,
+ guint length);
+
+gint gst_wasapi2_client_write (GstWasapi2Client * client,
+ gpointer data,
+ guint length);
+
+guint gst_wasapi2_client_delay (GstWasapi2Client * client);
+
+gboolean gst_wasapi2_client_set_mute (GstWasapi2Client * client,
+ gboolean mute);
+
+gboolean gst_wasapi2_client_get_mute (GstWasapi2Client * client,
+ gboolean * mute);
+
+gboolean gst_wasapi2_client_set_volume (GstWasapi2Client * client,
+ gfloat volume);
+
+gboolean gst_wasapi2_client_get_volume (GstWasapi2Client * client,
+ gfloat * volume);
+
+GstWasapi2Client * gst_wasapi2_client_new (GstWasapi2ClientDeviceClass device_class,
+ gboolean low_latency,
+ gint device_index,
+ const gchar * device_id);
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC (GstWasapi2Client, gst_object_unref)
+
+G_END_DECLS
+
+#endif /* __GST_WASAPI2_CLIENT_H__ */
diff --git a/sys/wasapi2/gstwasapi2sink.c b/sys/wasapi2/gstwasapi2sink.c
new file mode 100644
index 000000000..a55767c7a
--- /dev/null
+++ b/sys/wasapi2/gstwasapi2sink.c
@@ -0,0 +1,548 @@
+/*
+ * Copyright (C) 2008 Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>
+ * Copyright (C) 2013 Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) 2018 Centricular Ltd.
+ * Author: Nirbheek Chauhan <nirbheek@centricular.com>
+ * Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-wasapi2sink
+ * @title: wasapi2sink
+ *
+ * Provides audio playback using the Windows Audio Session API available with
+ * Windows 10.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 -v audiotestsrc samplesperbuffer=160 ! wasapi2sink
+ * ]| Generate 20 ms buffers and render to the default audio device.
+ *
+ * |[
+ * gst-launch-1.0 -v audiotestsrc samplesperbuffer=160 ! wasapi2sink low-latency=true
+ * ]| Same as above, but with the minimum possible latency
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include "gstwasapi2sink.h"
+#include "gstwasapi2util.h"
+#include "gstwasapi2client.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_wasapi2_sink_debug);
+#define GST_CAT_DEFAULT gst_wasapi2_sink_debug
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_WASAPI2_STATIC_CAPS));
+
+#define DEFAULT_LOW_LATENCY FALSE
+#define DEFAULT_MUTE FALSE
+#define DEFAULT_VOLUME 1.0
+
+#define GST_WASAPI2_SINK_LOCK(s) g_mutex_lock(&(s)->lock)
+#define GST_WASAPI2_SINK_UNLOCK(s) g_mutex_unlock(&(s)->lock)
+
+enum
+{
+ PROP_0,
+ PROP_DEVICE,
+ PROP_LOW_LATENCY,
+ PROP_MUTE,
+ PROP_VOLUME,
+};
+
+struct _GstWasapi2Sink
+{
+ GstAudioSink parent;
+
+ GstWasapi2Client *client;
+ GstCaps *cached_caps;
+ gboolean started;
+
+ /* properties */
+ gchar *device_id;
+ gboolean low_latency;
+ gboolean mute;
+ gdouble volume;
+
+ gboolean mute_changed;
+ gboolean volume_changed;
+
+ /* to protect audioclient from set/get property */
+ GMutex lock;
+};
+
+static void gst_wasapi2_sink_dispose (GObject * object);
+static void gst_wasapi2_sink_finalize (GObject * object);
+static void gst_wasapi2_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_wasapi2_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstCaps *gst_wasapi2_sink_get_caps (GstBaseSink * bsink,
+ GstCaps * filter);
+
+static gboolean gst_wasapi2_sink_prepare (GstAudioSink * asink,
+ GstAudioRingBufferSpec * spec);
+static gboolean gst_wasapi2_sink_unprepare (GstAudioSink * asink);
+static gboolean gst_wasapi2_sink_open (GstAudioSink * asink);
+static gboolean gst_wasapi2_sink_close (GstAudioSink * asink);
+static gint gst_wasapi2_sink_write (GstAudioSink * asink,
+ gpointer data, guint length);
+static guint gst_wasapi2_sink_delay (GstAudioSink * asink);
+static void gst_wasapi2_sink_reset (GstAudioSink * asink);
+
+static void gst_wasapi2_sink_set_mute (GstWasapi2Sink * self, gboolean mute);
+static gboolean gst_wasapi2_sink_get_mute (GstWasapi2Sink * self);
+static void gst_wasapi2_sink_set_volume (GstWasapi2Sink * self, gdouble volume);
+static gdouble gst_wasapi2_sink_get_volume (GstWasapi2Sink * self);
+
+#define gst_wasapi2_sink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstWasapi2Sink, gst_wasapi2_sink, GST_TYPE_AUDIO_SINK,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL));
+
+static void
+gst_wasapi2_sink_class_init (GstWasapi2SinkClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseSinkClass *basesink_class = GST_BASE_SINK_CLASS (klass);
+ GstAudioSinkClass *audiosink_class = GST_AUDIO_SINK_CLASS (klass);
+
+ gobject_class->dispose = gst_wasapi2_sink_dispose;
+ gobject_class->finalize = gst_wasapi2_sink_finalize;
+ gobject_class->set_property = gst_wasapi2_sink_set_property;
+ gobject_class->get_property = gst_wasapi2_sink_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_DEVICE,
+ g_param_spec_string ("device", "Device",
+ "WASAPI playback device as a GUID string",
+ NULL, GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_LOW_LATENCY,
+ g_param_spec_boolean ("low-latency", "Low latency",
+ "Optimize all settings for lowest latency. Always safe to enable.",
+ DEFAULT_LOW_LATENCY, GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MUTE,
+ g_param_spec_boolean ("mute", "Mute", "Mute state of this stream",
+ DEFAULT_MUTE, GST_PARAM_MUTABLE_PLAYING | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_VOLUME,
+ g_param_spec_double ("volume", "Volume", "Volume of this stream",
+ 0.0, 1.0, DEFAULT_VOLUME,
+ GST_PARAM_MUTABLE_PLAYING | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_set_static_metadata (element_class, "Wasapi2Sink",
+ "Sink/Audio/Hardware",
+ "Stream audio to an audio capture device through WASAPI",
+ "Nirbheek Chauhan <nirbheek@centricular.com>, "
+ "Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>, "
+ "Seungha Yang <seungha@centricular.com>");
+
+ basesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_get_caps);
+
+ audiosink_class->prepare = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_prepare);
+ audiosink_class->unprepare = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_unprepare);
+ audiosink_class->open = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_open);
+ audiosink_class->close = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_close);
+ audiosink_class->write = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_write);
+ audiosink_class->delay = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_delay);
+ audiosink_class->reset = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_reset);
+
+ GST_DEBUG_CATEGORY_INIT (gst_wasapi2_sink_debug, "wasapi2sink",
+ 0, "Windows audio session API sink");
+}
+
+static void
+gst_wasapi2_sink_init (GstWasapi2Sink * self)
+{
+ self->low_latency = DEFAULT_LOW_LATENCY;
+ self->mute = DEFAULT_MUTE;
+ self->volume = DEFAULT_VOLUME;
+
+ g_mutex_init (&self->lock);
+}
+
+static void
+gst_wasapi2_sink_dispose (GObject * object)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (object);
+
+ GST_WASAPI2_SINK_LOCK (self);
+ gst_clear_object (&self->client);
+ gst_clear_caps (&self->cached_caps);
+ GST_WASAPI2_SINK_UNLOCK (self);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_wasapi2_sink_finalize (GObject * object)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (object);
+
+ g_free (self->device_id);
+ g_mutex_clear (&self->lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_wasapi2_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (object);
+
+ switch (prop_id) {
+ case PROP_DEVICE:
+ g_free (self->device_id);
+ self->device_id = g_value_dup_string (value);
+ break;
+ case PROP_LOW_LATENCY:
+ self->low_latency = g_value_get_boolean (value);
+ break;
+ case PROP_MUTE:
+ gst_wasapi2_sink_set_mute (self, g_value_get_boolean (value));
+ break;
+ case PROP_VOLUME:
+ gst_wasapi2_sink_set_volume (self, g_value_get_double (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_wasapi2_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (object);
+
+ switch (prop_id) {
+ case PROP_DEVICE:
+ g_value_set_string (value, self->device_id);
+ break;
+ case PROP_LOW_LATENCY:
+ g_value_set_boolean (value, self->low_latency);
+ break;
+ case PROP_MUTE:
+ g_value_set_boolean (value, gst_wasapi2_sink_get_mute (self));
+ break;
+ case PROP_VOLUME:
+ g_value_set_double (value, gst_wasapi2_sink_get_volume (self));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstCaps *
+gst_wasapi2_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (bsink);
+ GstCaps *caps = NULL;
+
+ /* store one caps here so that we can return device caps even if
+ * audioclient was closed due to unprepare() */
+ if (!self->cached_caps && self->client)
+ self->cached_caps = gst_wasapi2_client_get_caps (self->client);
+
+ if (self->client)
+ caps = gst_wasapi2_client_get_caps (self->client);
+
+ if (!caps && self->cached_caps)
+ caps = gst_caps_ref (self->cached_caps);
+
+ if (!caps)
+ caps = gst_pad_get_pad_template_caps (bsink->sinkpad);
+
+ if (filter) {
+ GstCaps *filtered =
+ gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = filtered;
+ }
+
+ GST_DEBUG_OBJECT (self, "returning caps %" GST_PTR_FORMAT, caps);
+
+ return caps;
+}
+
+static gboolean
+gst_wasapi2_sink_open_unlocked (GstAudioSink * asink)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
+
+ self->client =
+ gst_wasapi2_client_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER,
+ self->low_latency, -1, self->device_id);
+
+ return ! !self->client;
+}
+
+static gboolean
+gst_wasapi2_sink_open (GstAudioSink * asink)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
+ gboolean ret;
+
+ GST_DEBUG_OBJECT (self, "Opening device");
+
+ GST_WASAPI2_SINK_LOCK (self);
+ ret = gst_wasapi2_sink_open_unlocked (asink);
+ GST_WASAPI2_SINK_UNLOCK (self);
+
+ if (!ret) {
+ GST_ELEMENT_ERROR (self, RESOURCE, OPEN_WRITE, (NULL),
+ ("Failed to open device"));
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_wasapi2_sink_close (GstAudioSink * asink)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
+
+ GST_WASAPI2_SINK_LOCK (self);
+
+ gst_clear_object (&self->client);
+ gst_clear_caps (&self->cached_caps);
+ self->started = FALSE;
+
+ GST_WASAPI2_SINK_UNLOCK (self);
+
+ return TRUE;
+}
+
+static gboolean
+gst_wasapi2_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
+ GstAudioBaseSink *bsink = GST_AUDIO_BASE_SINK (asink);
+ gboolean ret = FALSE;
+
+ GST_WASAPI2_SINK_LOCK (self);
+ if (!self->client && !gst_wasapi2_sink_open_unlocked (asink)) {
+ GST_ERROR_OBJECT (self, "No audio client was configured");
+ goto done;
+ }
+
+ if (!gst_wasapi2_client_open (self->client, spec, bsink->ringbuffer)) {
+ GST_ERROR_OBJECT (self, "Couldn't open audio client");
+ goto done;
+ }
+
+ /* Set mute and volume here again, maybe when "mute" property was set, audioclient
+ * might not be configured at that moment */
+ if (self->mute_changed) {
+ gst_wasapi2_client_set_mute (self->client, self->mute);
+ self->mute_changed = FALSE;
+ }
+
+ if (self->volume_changed) {
+ gst_wasapi2_client_set_volume (self->client, self->volume);
+ self->volume_changed = FALSE;
+ }
+
+ /* Will start IAudioClient on the first write request */
+ self->started = FALSE;
+ ret = TRUE;
+
+done:
+ GST_WASAPI2_SINK_UNLOCK (self);
+
+ return ret;
+}
+
+static gboolean
+gst_wasapi2_sink_unprepare (GstAudioSink * asink)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
+
+ self->started = FALSE;
+
+ /* Will reopen device later prepare() */
+ GST_WASAPI2_SINK_LOCK (self);
+ if (self->client) {
+ gst_wasapi2_client_stop (self->client);
+ gst_clear_object (&self->client);
+ }
+ GST_WASAPI2_SINK_UNLOCK (self);
+
+ return TRUE;
+}
+
+static gint
+gst_wasapi2_sink_write (GstAudioSink * asink, gpointer data, guint length)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
+
+ if (!self->client) {
+ GST_ERROR_OBJECT (self, "No audio client was configured");
+ return -1;
+ }
+
+ if (!self->started) {
+ if (!gst_wasapi2_client_start (self->client)) {
+ GST_ERROR_OBJECT (self, "Failed to re-start client");
+ return -1;
+ }
+
+ self->started = TRUE;
+ }
+
+ return gst_wasapi2_client_write (self->client, data, length);
+}
+
+static guint
+gst_wasapi2_sink_delay (GstAudioSink * asink)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
+
+ if (!self->client)
+ return 0;
+
+ return gst_wasapi2_client_delay (self->client);
+}
+
+static void
+gst_wasapi2_sink_reset (GstAudioSink * asink)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
+
+ GST_INFO_OBJECT (self, "reset called");
+
+ self->started = FALSE;
+
+ if (!self->client)
+ return;
+
+ gst_wasapi2_client_stop (self->client);
+}
+
+static void
+gst_wasapi2_sink_set_mute (GstWasapi2Sink * self, gboolean mute)
+{
+ GST_WASAPI2_SINK_LOCK (self);
+
+ self->mute = mute;
+ self->mute_changed = TRUE;
+
+ if (self->client) {
+ if (!gst_wasapi2_client_set_mute (self->client, mute)) {
+ GST_INFO_OBJECT (self, "Couldn't set mute");
+ } else {
+ self->mute_changed = FALSE;
+ }
+ } else {
+ GST_DEBUG_OBJECT (self, "audio client is not configured yet");
+ }
+
+ GST_WASAPI2_SINK_UNLOCK (self);
+}
+
+static gboolean
+gst_wasapi2_sink_get_mute (GstWasapi2Sink * self)
+{
+ gboolean mute;
+
+ GST_WASAPI2_SINK_LOCK (self);
+
+ mute = self->mute;
+
+ if (self->client) {
+ if (!gst_wasapi2_client_get_mute (self->client, &mute)) {
+ GST_INFO_OBJECT (self, "Couldn't get mute state");
+ } else {
+ self->mute = mute;
+ }
+ } else {
+ GST_DEBUG_OBJECT (self, "audio client is not configured yet");
+ }
+
+ GST_WASAPI2_SINK_UNLOCK (self);
+
+ return mute;
+}
+
+static void
+gst_wasapi2_sink_set_volume (GstWasapi2Sink * self, gdouble volume)
+{
+ GST_WASAPI2_SINK_LOCK (self);
+
+ self->volume = volume;
+ /* clip volume value */
+ self->volume = MAX (0.0, self->volume);
+ self->volume = MIN (1.0, self->volume);
+ self->volume_changed = TRUE;
+
+ if (self->client) {
+ if (!gst_wasapi2_client_set_volume (self->client, (gfloat) self->volume)) {
+ GST_INFO_OBJECT (self, "Couldn't set volume");
+ } else {
+ self->volume_changed = FALSE;
+ }
+ } else {
+ GST_DEBUG_OBJECT (self, "audio client is not configured yet");
+ }
+
+ GST_WASAPI2_SINK_UNLOCK (self);
+}
+
+static gdouble
+gst_wasapi2_sink_get_volume (GstWasapi2Sink * self)
+{
+ gfloat volume;
+
+ GST_WASAPI2_SINK_LOCK (self);
+
+ volume = (gfloat) self->volume;
+
+ if (self->client) {
+ if (!gst_wasapi2_client_get_volume (self->client, &volume)) {
+ GST_INFO_OBJECT (self, "Couldn't get volume");
+ } else {
+ self->volume = volume;
+ }
+ } else {
+ GST_DEBUG_OBJECT (self, "audio client is not configured yet");
+ }
+
+ GST_WASAPI2_SINK_UNLOCK (self);
+
+ volume = MAX (0.0, volume);
+ volume = MIN (1.0, volume);
+
+ return volume;
+}
diff --git a/sys/wasapi2/gstwasapi2sink.h b/sys/wasapi2/gstwasapi2sink.h
new file mode 100644
index 000000000..433dcb656
--- /dev/null
+++ b/sys/wasapi2/gstwasapi2sink.h
@@ -0,0 +1,34 @@
+/* GStreamer
+ * Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_WASAPI2_SINK_H__
+#define __GST_WASAPI2_SINK_H__
+
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_WASAPI2_SINK (gst_wasapi2_sink_get_type ())
+G_DECLARE_FINAL_TYPE (GstWasapi2Sink,
+ gst_wasapi2_sink, GST, WASAPI2_SINK, GstAudioSink);
+
+G_END_DECLS
+
+#endif /* __GST_WASAPI2_SINK_H__ */
diff --git a/sys/wasapi2/gstwasapi2src.c b/sys/wasapi2/gstwasapi2src.c
new file mode 100644
index 000000000..7f3bbee8a
--- /dev/null
+++ b/sys/wasapi2/gstwasapi2src.c
@@ -0,0 +1,546 @@
+/*
+ * Copyright (C) 2008 Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>
+ * Copyright (C) 2018 Centricular Ltd.
+ * Author: Nirbheek Chauhan <nirbheek@centricular.com>
+ * Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-wasapi2src
+ * @title: wasapi2src
+ *
+ * Provides audio capture from the Windows Audio Session API available with
+ * Windows 10.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 -v wasapi2src ! fakesrc
+ * ]| Capture from the default audio device and render to fakesrc.
+ *
+ * |[
+ * gst-launch-1.0 -v wasapi2src low-latency=true ! fakesrc
+ * ]| Capture from the default audio device with the minimum possible latency and render to fakesrc.
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include "gstwasapi2src.h"
+#include "gstwasapi2util.h"
+#include "gstwasapi2client.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_wasapi2_src_debug);
+#define GST_CAT_DEFAULT gst_wasapi2_src_debug
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_WASAPI2_STATIC_CAPS));
+
+#define DEFAULT_LOW_LATENCY FALSE
+#define DEFAULT_MUTE FALSE
+#define DEFAULT_VOLUME 1.0
+
+#define GST_WASAPI2_SRC_LOCK(s) g_mutex_lock(&(s)->lock)
+#define GST_WASAPI2_SRC_UNLOCK(s) g_mutex_unlock(&(s)->lock)
+
+enum
+{
+ PROP_0,
+ PROP_DEVICE,
+ PROP_LOW_LATENCY,
+ PROP_MUTE,
+ PROP_VOLUME,
+};
+
+struct _GstWasapi2Src
+{
+ GstAudioSrc parent;
+
+ GstWasapi2Client *client;
+ GstCaps *cached_caps;
+ gboolean started;
+
+ /* properties */
+ gchar *device_id;
+ gboolean low_latency;
+ gboolean mute;
+ gdouble volume;
+
+ gboolean mute_changed;
+ gboolean volume_changed;
+
+ /* to protect audioclient from set/get property */
+ GMutex lock;
+};
+
+static void gst_wasapi2_src_dispose (GObject * object);
+static void gst_wasapi2_src_finalize (GObject * object);
+static void gst_wasapi2_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_wasapi2_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstCaps *gst_wasapi2_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter);
+
+static gboolean gst_wasapi2_src_open (GstAudioSrc * asrc);
+static gboolean gst_wasapi2_src_close (GstAudioSrc * asrc);
+static gboolean gst_wasapi2_src_prepare (GstAudioSrc * asrc,
+ GstAudioRingBufferSpec * spec);
+static gboolean gst_wasapi2_src_unprepare (GstAudioSrc * asrc);
+static guint gst_wasapi2_src_read (GstAudioSrc * asrc, gpointer data,
+ guint length, GstClockTime * timestamp);
+static guint gst_wasapi2_src_delay (GstAudioSrc * asrc);
+static void gst_wasapi2_src_reset (GstAudioSrc * asrc);
+
+static void gst_wasapi2_src_set_mute (GstWasapi2Src * self, gboolean mute);
+static gboolean gst_wasapi2_src_get_mute (GstWasapi2Src * self);
+static void gst_wasapi2_src_set_volume (GstWasapi2Src * self, gdouble volume);
+static gdouble gst_wasapi2_src_get_volume (GstWasapi2Src * self);
+
+#define gst_wasapi2_src_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstWasapi2Src, gst_wasapi2_src, GST_TYPE_AUDIO_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL));
+
+static void
+gst_wasapi2_src_class_init (GstWasapi2SrcClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseSrcClass *basesrc_class = GST_BASE_SRC_CLASS (klass);
+ GstAudioSrcClass *audiosrc_class = GST_AUDIO_SRC_CLASS (klass);
+
+ gobject_class->dispose = gst_wasapi2_src_dispose;
+ gobject_class->finalize = gst_wasapi2_src_finalize;
+ gobject_class->set_property = gst_wasapi2_src_set_property;
+ gobject_class->get_property = gst_wasapi2_src_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_DEVICE,
+ g_param_spec_string ("device", "Device",
+ "WASAPI playback device as a GUID string",
+ NULL, GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_LOW_LATENCY,
+ g_param_spec_boolean ("low-latency", "Low latency",
+ "Optimize all settings for lowest latency. Always safe to enable.",
+ DEFAULT_LOW_LATENCY, GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MUTE,
+ g_param_spec_boolean ("mute", "Mute", "Mute state of this stream",
+ DEFAULT_MUTE, GST_PARAM_MUTABLE_PLAYING | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_VOLUME,
+ g_param_spec_double ("volume", "Volume", "Volume of this stream",
+ 0.0, 1.0, DEFAULT_VOLUME,
+ GST_PARAM_MUTABLE_PLAYING | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_set_static_metadata (element_class, "Wasapi2Src",
+ "Source/Audio/Hardware",
+ "Stream audio from an audio capture device through WASAPI",
+ "Nirbheek Chauhan <nirbheek@centricular.com>, "
+ "Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>, "
+ "Seungha Yang <seungha@centricular.com>");
+
+ basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_wasapi2_src_get_caps);
+
+ audiosrc_class->open = GST_DEBUG_FUNCPTR (gst_wasapi2_src_open);
+ audiosrc_class->close = GST_DEBUG_FUNCPTR (gst_wasapi2_src_close);
+ audiosrc_class->read = GST_DEBUG_FUNCPTR (gst_wasapi2_src_read);
+ audiosrc_class->prepare = GST_DEBUG_FUNCPTR (gst_wasapi2_src_prepare);
+ audiosrc_class->unprepare = GST_DEBUG_FUNCPTR (gst_wasapi2_src_unprepare);
+ audiosrc_class->delay = GST_DEBUG_FUNCPTR (gst_wasapi2_src_delay);
+ audiosrc_class->reset = GST_DEBUG_FUNCPTR (gst_wasapi2_src_reset);
+
+ GST_DEBUG_CATEGORY_INIT (gst_wasapi2_src_debug, "wasapi2src",
+ 0, "Windows audio session API source");
+}
+
+static void
+gst_wasapi2_src_init (GstWasapi2Src * self)
+{
+ self->mute = DEFAULT_MUTE;
+ self->volume = DEFAULT_VOLUME;
+ self->low_latency = DEFAULT_LOW_LATENCY;
+
+ g_mutex_init (&self->lock);
+}
+
+static void
+gst_wasapi2_src_dispose (GObject * object)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (object);
+
+ GST_WASAPI2_SRC_LOCK (self);
+ gst_clear_object (&self->client);
+ gst_clear_caps (&self->cached_caps);
+ GST_WASAPI2_SRC_UNLOCK (self);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_wasapi2_src_finalize (GObject * object)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (object);
+
+ g_free (self->device_id);
+ g_mutex_clear (&self->lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_wasapi2_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (object);
+
+ switch (prop_id) {
+ case PROP_DEVICE:
+ g_free (self->device_id);
+ self->device_id = g_value_dup_string (value);
+ break;
+ case PROP_LOW_LATENCY:
+ self->low_latency = g_value_get_boolean (value);
+ break;
+ case PROP_MUTE:
+ gst_wasapi2_src_set_mute (self, g_value_get_boolean (value));
+ break;
+ case PROP_VOLUME:
+ gst_wasapi2_src_set_volume (self, g_value_get_double (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_wasapi2_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (object);
+
+ switch (prop_id) {
+ case PROP_DEVICE:
+ g_value_set_string (value, self->device_id);
+ break;
+ case PROP_LOW_LATENCY:
+ g_value_set_boolean (value, self->low_latency);
+ break;
+ case PROP_MUTE:
+ g_value_set_boolean (value, gst_wasapi2_src_get_mute (self));
+ break;
+ case PROP_VOLUME:
+ g_value_set_double (value, gst_wasapi2_src_get_volume (self));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstCaps *
+gst_wasapi2_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (bsrc);
+ GstCaps *caps = NULL;
+
+ /* store one caps here so that we can return device caps even if
+ * audioclient was closed due to unprepare() */
+ if (!self->cached_caps && self->client)
+ self->cached_caps = gst_wasapi2_client_get_caps (self->client);
+
+ if (self->client)
+ caps = gst_wasapi2_client_get_caps (self->client);
+
+ if (!caps && self->cached_caps)
+ caps = gst_caps_ref (self->cached_caps);
+
+ if (!caps)
+ caps = gst_pad_get_pad_template_caps (bsrc->srcpad);
+
+ if (filter) {
+ GstCaps *filtered =
+ gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = filtered;
+ }
+
+ GST_DEBUG_OBJECT (self, "returning caps %" GST_PTR_FORMAT, caps);
+
+ return caps;
+}
+
+static gboolean
+gst_wasapi2_src_open_unlocked (GstAudioSrc * asrc)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
+
+ self->client =
+ gst_wasapi2_client_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE,
+ self->low_latency, -1, self->device_id);
+
+ return ! !self->client;
+}
+
+static gboolean
+gst_wasapi2_src_open (GstAudioSrc * asrc)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
+ gboolean ret;
+
+ GST_DEBUG_OBJECT (self, "Opening device");
+
+ GST_WASAPI2_SRC_LOCK (self);
+ ret = gst_wasapi2_src_open_unlocked (asrc);
+ GST_WASAPI2_SRC_UNLOCK (self);
+
+ if (!ret) {
+ GST_ELEMENT_ERROR (self, RESOURCE, OPEN_READ, (NULL),
+ ("Failed to open device"));
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_wasapi2_src_close (GstAudioSrc * asrc)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
+
+ GST_WASAPI2_SRC_LOCK (self);
+
+ gst_clear_object (&self->client);
+ gst_clear_caps (&self->cached_caps);
+ self->started = FALSE;
+
+ GST_WASAPI2_SRC_UNLOCK (self);
+
+ return TRUE;
+}
+
+static gboolean
+gst_wasapi2_src_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
+ GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC (asrc);
+ gboolean ret = FALSE;
+
+ GST_WASAPI2_SRC_LOCK (self);
+ if (!self->client && !gst_wasapi2_src_open_unlocked (asrc)) {
+ GST_ERROR_OBJECT (self, "No audio client was configured");
+ goto done;
+ }
+
+ if (!gst_wasapi2_client_open (self->client, spec, bsrc->ringbuffer)) {
+ GST_ERROR_OBJECT (self, "Couldn't open audio client");
+ goto done;
+ }
+
+ /* Set mute and volume here again, maybe when "mute" property was set, audioclient
+ * might not be configured at that moment */
+ if (self->mute_changed) {
+ gst_wasapi2_client_set_mute (self->client, self->mute);
+ self->mute_changed = FALSE;
+ }
+
+ if (self->volume_changed) {
+ gst_wasapi2_client_set_volume (self->client, self->volume);
+ self->volume_changed = FALSE;
+ }
+
+ /* Will start IAudioClient on the first read request */
+ self->started = FALSE;
+ ret = TRUE;
+
+done:
+ GST_WASAPI2_SRC_UNLOCK (self);
+
+ return ret;
+}
+
+static gboolean
+gst_wasapi2_src_unprepare (GstAudioSrc * asrc)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
+
+ self->started = FALSE;
+
+ /* Will reopen device later prepare() */
+ GST_WASAPI2_SRC_LOCK (self);
+ if (self->client) {
+ gst_wasapi2_client_stop (self->client);
+ gst_clear_object (&self->client);
+ }
+ GST_WASAPI2_SRC_UNLOCK (self);
+
+ return TRUE;
+}
+
+static guint
+gst_wasapi2_src_read (GstAudioSrc * asrc, gpointer data, guint length,
+ GstClockTime * timestamp)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
+
+ if (!self->client) {
+ GST_ERROR_OBJECT (self, "No audio client was configured");
+ return -1;
+ }
+
+ if (!self->started) {
+ if (!gst_wasapi2_client_start (self->client)) {
+ GST_ERROR_OBJECT (self, "Failed to re-start client");
+ return -1;
+ }
+
+ self->started = TRUE;
+ }
+
+ return gst_wasapi2_client_read (self->client, data, length);
+}
+
+static guint
+gst_wasapi2_src_delay (GstAudioSrc * asrc)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
+
+ if (!self->client)
+ return 0;
+
+ return gst_wasapi2_client_delay (self->client);
+}
+
+static void
+gst_wasapi2_src_reset (GstAudioSrc * asrc)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
+
+ GST_DEBUG_OBJECT (self, "reset called");
+
+ self->started = FALSE;
+
+ if (!self->client)
+ return;
+
+ gst_wasapi2_client_stop (self->client);
+}
+
+static void
+gst_wasapi2_src_set_mute (GstWasapi2Src * self, gboolean mute)
+{
+ GST_WASAPI2_SRC_LOCK (self);
+
+ self->mute = mute;
+ self->mute_changed = TRUE;
+
+ if (self->client) {
+ if (!gst_wasapi2_client_set_mute (self->client, mute)) {
+ GST_INFO_OBJECT (self, "Couldn't set mute");
+ } else {
+ self->mute_changed = FALSE;
+ }
+ } else {
+ GST_DEBUG_OBJECT (self, "audio client is not configured yet");
+ }
+
+ GST_WASAPI2_SRC_UNLOCK (self);
+}
+
+static gboolean
+gst_wasapi2_src_get_mute (GstWasapi2Src * self)
+{
+ gboolean mute;
+
+ GST_WASAPI2_SRC_LOCK (self);
+
+ mute = self->mute;
+
+ if (self->client) {
+ if (!gst_wasapi2_client_get_mute (self->client, &mute)) {
+ GST_INFO_OBJECT (self, "Couldn't get mute state");
+ } else {
+ self->mute = mute;
+ }
+ } else {
+ GST_DEBUG_OBJECT (self, "audio client is not configured yet");
+ }
+
+ GST_WASAPI2_SRC_UNLOCK (self);
+
+ return mute;
+}
+
+static void
+gst_wasapi2_src_set_volume (GstWasapi2Src * self, gdouble volume)
+{
+ GST_WASAPI2_SRC_LOCK (self);
+
+ self->volume = volume;
+ /* clip volume value */
+ self->volume = MAX (0.0, self->volume);
+ self->volume = MIN (1.0, self->volume);
+ self->volume_changed = TRUE;
+
+ if (self->client) {
+ if (!gst_wasapi2_client_set_volume (self->client, (gfloat) self->volume)) {
+ GST_INFO_OBJECT (self, "Couldn't set volume");
+ } else {
+ self->volume_changed = FALSE;
+ }
+ } else {
+ GST_DEBUG_OBJECT (self, "audio client is not configured yet");
+ }
+
+ GST_WASAPI2_SRC_UNLOCK (self);
+}
+
+static gdouble
+gst_wasapi2_src_get_volume (GstWasapi2Src * self)
+{
+ gfloat volume;
+
+ GST_WASAPI2_SRC_LOCK (self);
+
+ volume = (gfloat) self->volume;
+
+ if (self->client) {
+ if (!gst_wasapi2_client_get_volume (self->client, &volume)) {
+ GST_INFO_OBJECT (self, "Couldn't get volume");
+ } else {
+ self->volume = volume;
+ }
+ } else {
+ GST_DEBUG_OBJECT (self, "audio client is not configured yet");
+ }
+
+ GST_WASAPI2_SRC_UNLOCK (self);
+
+ volume = MAX (0.0, volume);
+ volume = MIN (1.0, volume);
+
+ return volume;
+}
diff --git a/sys/wasapi2/gstwasapi2src.h b/sys/wasapi2/gstwasapi2src.h
new file mode 100644
index 000000000..c832db5e8
--- /dev/null
+++ b/sys/wasapi2/gstwasapi2src.h
@@ -0,0 +1,34 @@
+/* GStreamer
+ * Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_WASAPI2_SRC_H__
+#define __GST_WASAPI2_SRC_H__
+
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_WASAPI2_SRC (gst_wasapi2_src_get_type ())
+G_DECLARE_FINAL_TYPE (GstWasapi2Src,
+ gst_wasapi2_src, GST, WASAPI2_SRC, GstAudioSrc);
+
+G_END_DECLS
+
+#endif /* __GST_WASAPI2_SRC_H__ */
diff --git a/sys/wasapi2/gstwasapi2util.c b/sys/wasapi2/gstwasapi2util.c
new file mode 100644
index 000000000..c618b83cc
--- /dev/null
+++ b/sys/wasapi2/gstwasapi2util.c
@@ -0,0 +1,209 @@
+/*
+ * Copyright (C) 2008 Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>
+ * Copyright (C) 2018 Centricular Ltd.
+ * Author: Nirbheek Chauhan <nirbheek@centricular.com>
+ * Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include "gstwasapi2util.h"
+#include <audioclient.h>
+
+GST_DEBUG_CATEGORY_EXTERN (gst_wasapi_debug);
+#define GST_CAT_DEFAULT gst_wasapi_debug
+
+/* *INDENT-OFF* */
+static struct
+{
+ guint64 wasapi_pos;
+ GstAudioChannelPosition gst_pos;
+} wasapi_to_gst_pos[] = {
+ {SPEAKER_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT},
+ {SPEAKER_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT},
+ {SPEAKER_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER},
+ {SPEAKER_LOW_FREQUENCY, GST_AUDIO_CHANNEL_POSITION_LFE1},
+ {SPEAKER_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_REAR_LEFT},
+ {SPEAKER_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT},
+ {SPEAKER_FRONT_LEFT_OF_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER},
+ {SPEAKER_FRONT_RIGHT_OF_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER},
+ {SPEAKER_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_REAR_CENTER},
+ /* Enum values diverge from this point onwards */
+ {SPEAKER_SIDE_LEFT, GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT},
+ {SPEAKER_SIDE_RIGHT, GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT},
+ {SPEAKER_TOP_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_CENTER},
+ {SPEAKER_TOP_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_LEFT},
+ {SPEAKER_TOP_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_CENTER},
+ {SPEAKER_TOP_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_RIGHT},
+ {SPEAKER_TOP_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_LEFT},
+ {SPEAKER_TOP_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER},
+ {SPEAKER_TOP_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT}
+};
+/* *INDENT-ON* */
+
+static const gchar *
+hresult_to_string_fallback (HRESULT hr)
+{
+ const gchar *s = "unknown error";
+
+ switch (hr) {
+ case AUDCLNT_E_NOT_INITIALIZED:
+ s = "AUDCLNT_E_NOT_INITIALIZED";
+ break;
+ case AUDCLNT_E_ALREADY_INITIALIZED:
+ s = "AUDCLNT_E_ALREADY_INITIALIZED";
+ break;
+ case AUDCLNT_E_WRONG_ENDPOINT_TYPE:
+ s = "AUDCLNT_E_WRONG_ENDPOINT_TYPE";
+ break;
+ case AUDCLNT_E_DEVICE_INVALIDATED:
+ s = "AUDCLNT_E_DEVICE_INVALIDATED";
+ break;
+ case AUDCLNT_E_NOT_STOPPED:
+ s = "AUDCLNT_E_NOT_STOPPED";
+ break;
+ case AUDCLNT_E_BUFFER_TOO_LARGE:
+ s = "AUDCLNT_E_BUFFER_TOO_LARGE";
+ break;
+ case AUDCLNT_E_OUT_OF_ORDER:
+ s = "AUDCLNT_E_OUT_OF_ORDER";
+ break;
+ case AUDCLNT_E_UNSUPPORTED_FORMAT:
+ s = "AUDCLNT_E_UNSUPPORTED_FORMAT";
+ break;
+ case AUDCLNT_E_INVALID_DEVICE_PERIOD:
+ s = "AUDCLNT_E_INVALID_DEVICE_PERIOD";
+ break;
+ case AUDCLNT_E_INVALID_SIZE:
+ s = "AUDCLNT_E_INVALID_SIZE";
+ break;
+ case AUDCLNT_E_DEVICE_IN_USE:
+ s = "AUDCLNT_E_DEVICE_IN_USE";
+ break;
+ case AUDCLNT_E_BUFFER_OPERATION_PENDING:
+ s = "AUDCLNT_E_BUFFER_OPERATION_PENDING";
+ break;
+ case AUDCLNT_E_BUFFER_SIZE_ERROR:
+ s = "AUDCLNT_E_BUFFER_SIZE_ERROR";
+ break;
+ case AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED:
+ s = "AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED";
+ break;
+ case AUDCLNT_E_THREAD_NOT_REGISTERED:
+ s = "AUDCLNT_E_THREAD_NOT_REGISTERED";
+ break;
+ case AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED:
+ s = "AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED";
+ break;
+ case AUDCLNT_E_ENDPOINT_CREATE_FAILED:
+ s = "AUDCLNT_E_ENDPOINT_CREATE_FAILED";
+ break;
+ case AUDCLNT_E_SERVICE_NOT_RUNNING:
+ s = "AUDCLNT_E_SERVICE_NOT_RUNNING";
+ break;
+ case AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED:
+ s = "AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED";
+ break;
+ case AUDCLNT_E_EXCLUSIVE_MODE_ONLY:
+ s = "AUDCLNT_E_EXCLUSIVE_MODE_ONLY";
+ break;
+ case AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL:
+ s = "AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL";
+ break;
+ case AUDCLNT_E_EVENTHANDLE_NOT_SET:
+ s = "AUDCLNT_E_EVENTHANDLE_NOT_SET";
+ break;
+ case AUDCLNT_E_INCORRECT_BUFFER_SIZE:
+ s = "AUDCLNT_E_INCORRECT_BUFFER_SIZE";
+ break;
+ case AUDCLNT_E_CPUUSAGE_EXCEEDED:
+ s = "AUDCLNT_E_CPUUSAGE_EXCEEDED";
+ break;
+ case AUDCLNT_S_BUFFER_EMPTY:
+ s = "AUDCLNT_S_BUFFER_EMPTY";
+ break;
+ case AUDCLNT_S_THREAD_ALREADY_REGISTERED:
+ s = "AUDCLNT_S_THREAD_ALREADY_REGISTERED";
+ break;
+ case AUDCLNT_S_POSITION_STALLED:
+ s = "AUDCLNT_S_POSITION_STALLED";
+ break;
+ case E_POINTER:
+ s = "E_POINTER";
+ break;
+ case E_INVALIDARG:
+ s = "E_INVALIDARG";
+ break;
+ }
+
+ return s;
+}
+
+static gchar *
+gst_wasapi2_util_hresult_to_string (HRESULT hr)
+{
+ DWORD flags;
+ gchar *ret_text;
+ LPTSTR error_text = NULL;
+
+ flags = FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ALLOCATE_BUFFER
+ | FORMAT_MESSAGE_IGNORE_INSERTS;
+ FormatMessage (flags, NULL, hr, MAKELANGID (LANG_NEUTRAL, SUBLANG_DEFAULT),
+ (LPTSTR) & error_text, 0, NULL);
+
+ /* If we couldn't get the error msg, try the fallback switch statement */
+ if (error_text == NULL)
+ return g_strdup (hresult_to_string_fallback (hr));
+
+#ifdef UNICODE
+ /* If UNICODE is defined, LPTSTR is LPWSTR which is UTF-16 */
+ ret_text = g_utf16_to_utf8 (error_text, 0, NULL, NULL, NULL);
+#else
+ ret_text = g_strdup (error_text);
+#endif
+
+ LocalFree (error_text);
+ return ret_text;
+}
+
+gboolean
+_gst_wasapi2_result (HRESULT hr, GstDebugCategory * cat, const gchar * file,
+ const gchar * function, gint line)
+{
+#ifndef GST_DISABLE_GST_DEBUG
+ gboolean ret = TRUE;
+
+ if (FAILED (hr)) {
+ gchar *error_text = NULL;
+
+ error_text = gst_wasapi2_util_hresult_to_string (hr);
+ gst_debug_log (cat, GST_LEVEL_WARNING, file, function, line,
+ NULL, "WASAPI call failed: 0x%x, %s", (guint) hr, error_text);
+ g_free (error_text);
+
+ ret = FALSE;
+ }
+
+ return ret;
+#else
+ return SUCCEEDED (hr);
+#endif
+}
diff --git a/sys/wasapi2/gstwasapi2util.h b/sys/wasapi2/gstwasapi2util.h
new file mode 100644
index 000000000..a5a51ae2c
--- /dev/null
+++ b/sys/wasapi2/gstwasapi2util.h
@@ -0,0 +1,47 @@
+/* GStreamer
+ * Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_WASAPI2_UTIL_H__
+#define __GST_WASAPI2_UTIL_H__
+
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+#include <windows.h>
+
+G_BEGIN_DECLS
+
+/* Static Caps shared between source, sink, and device provider */
+#define GST_WASAPI2_STATIC_CAPS "audio/x-raw, " \
+ "format = (string) " GST_AUDIO_FORMATS_ALL ", " \
+ "layout = (string) interleaved, " \
+ "rate = " GST_AUDIO_RATE_RANGE ", " \
+ "channels = " GST_AUDIO_CHANNELS_RANGE
+
+gboolean _gst_wasapi2_result (HRESULT hr,
+ GstDebugCategory * cat,
+ const gchar * file,
+ const gchar * function,
+ gint line);
+
+#define gst_wasapi2_result(result) \
+ _gst_wasapi2_result (result, GST_CAT_DEFAULT, __FILE__, GST_FUNCTION, __LINE__)
+
+G_END_DECLS
+
+#endif /* __GST_WASAPI_UTIL_H__ */
diff --git a/sys/wasapi2/meson.build b/sys/wasapi2/meson.build
new file mode 100644
index 000000000..742c4846d
--- /dev/null
+++ b/sys/wasapi2/meson.build
@@ -0,0 +1,92 @@
+wasapi2_sources = [
+ 'gstwasapi2src.c',
+ 'gstwasapi2sink.c',
+ 'gstwasapi2util.c',
+ 'gstwasapi2client.cpp',
+ 'plugin.c',
+]
+
+mmdeviceapi_symbols = [
+ 'ActivateAudioInterfaceAsync',
+ 'DEVINTERFACE_AUDIO_RENDER',
+ 'DEVINTERFACE_AUDIO_CAPTURE',
+]
+
+wasapi2_option = get_option('wasapi2')
+if host_system != 'windows'
+ if wasapi2_option.disabled()
+ subdir_done()
+ elif wasapi2_option.enabled()
+ error('Cannot build wasapi2 plugin when not building for Windows')
+ endif
+endif
+
+ole32_dep = cc.find_library('ole32', required : get_option('wasapi2'))
+ksuser_dep = cc.find_library('ksuser', required : get_option('wasapi2'))
+runtimeobject_dep = cc.find_library('runtimeobject', required : get_option('wasapi2'))
+mmdeviceapi_dep = cc.find_library('mmdevapi', required : get_option('wasapi2'))
+wasapi2_dep = [ole32_dep, ksuser_dep, runtimeobject_dep, mmdeviceapi_dep]
+have_symbols = false
+
+foreach dep: wasapi2_dep
+ if not dep.found()
+ if wasapi2_option.enabled()
+ error('wasapi2 plugin was enabled explicitly, but required dependencies were not found')
+ else
+ subdir_done()
+ endif
+ endif
+endforeach
+
+if not cxx.has_header_symbol ('audioclient.h', 'IAudioClient3', dependencies : wasapi2_dep)
+ if wasapi2_option.enabled()
+ error('wasapi2 plugin was enabled explicitly, but IAudioClient3 is unavailable')
+ else
+ subdir_done()
+ endif
+endif
+
+foreach symbol: mmdeviceapi_symbols
+ if not cxx.has_header_symbol ('mmdeviceapi.h', symbol, dependencies : wasapi2_dep)
+ if wasapi2_option.enabled()
+ error('wasapi2 plugin was enabled explicitly, but @1@ is unavailable'.format(symbol))
+ else
+ subdir_done()
+ endif
+ endif
+endforeach
+
+winapi_app = cxx.compiles('''#include <winapifamily.h>
+ #include <windows.applicationmodel.core.h>
+ #include <wrl.h>
+ #include <wrl/wrappers/corewrappers.h>
+ #include <audioclient.h>
+ #include <mmdeviceapi.h>
+ #if !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP)
+ #error "not winrt"
+ #endif
+ int main (int argc, char ** argv) {
+ IAudioClient3 *client = NULL;
+ return 0;
+ } ''',
+ dependencies: wasapi2_dep,
+ name: 'checking if building winapi-partiion-app')
+
+if not winapi_app
+ if wasapi2_option.enabled()
+ error('wasapi2 plugin was enabled explicitly, but build target is not include WINAPI_PARTITION_APP')
+ else
+ subdir_done()
+ endif
+endif
+
+gstwasapi2 = library('gstwasapi2',
+ wasapi2_sources,
+ c_args : gst_plugins_bad_args + ['-DCOBJMACROS'],
+ cpp_args : gst_plugins_bad_args,
+ include_directories : [configinc],
+ dependencies : [gstaudio_dep] + wasapi2_dep,
+ install : true,
+ install_dir : plugins_install_dir)
+pkgconfig.generate(gstwasapi2, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstwasapi2]
diff --git a/sys/wasapi2/plugin.c b/sys/wasapi2/plugin.c
new file mode 100644
index 000000000..fa88cb008
--- /dev/null
+++ b/sys/wasapi2/plugin.c
@@ -0,0 +1,56 @@
+/* GStreamer
+ * Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <winapifamily.h>
+
+#include "gstwasapi2sink.h"
+#include "gstwasapi2src.h"
+
+GST_DEBUG_CATEGORY (gst_wasapi2_debug);
+GST_DEBUG_CATEGORY (gst_wasapi2_client_debug);
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ GstRank rank = GST_RANK_SECONDARY;
+
+#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP) && !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
+ /* If we are building for UWP, wasapi2 plugin should have the highest rank */
+ rank = GST_RANK_PRIMARY + 1;
+#endif
+
+ GST_DEBUG_CATEGORY_INIT (gst_wasapi2_debug, "wasapi2", 0, "wasapi2");
+ GST_DEBUG_CATEGORY_INIT (gst_wasapi2_client_debug, "wasapi2client",
+ 0, "wasapi2client");
+
+ gst_element_register (plugin, "wasapi2sink", rank, GST_TYPE_WASAPI2_SINK);
+ gst_element_register (plugin, "wasapi2src", rank, GST_TYPE_WASAPI2_SRC);
+
+ return TRUE;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ wasapi2,
+ "Windows audio session API plugin",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)