summaryrefslogtreecommitdiff
path: root/sys
diff options
context:
space:
mode:
authorSeungha Yang <seungha@centricular.com>2021-05-10 20:45:28 +0900
committerSeungha Yang <seungha@centricular.com>2021-06-08 19:39:27 +0900
commita8ec40c850b678272c279ea4914390ac7abc8408 (patch)
treeec6fe6075e6ab927876046f3c633c8a25a11c262 /sys
parent4b42671c9924674e77b8b35f3b22abab651e8bb3 (diff)
downloadgstreamer-plugins-bad-a8ec40c850b678272c279ea4914390ac7abc8408.tar.gz
wasapi2: Rewrite plugin and implement audioringbuffer subclass
... based on MediaFoundation work queue API. By this commit, wasapi2 plugin will make use of pull mode scheduling with audioringbuffer subclass. There are several drawbacks of audiosrc/audiosink subclassing (not audiobasesrc/audiobasesink) for WASAPI API, which are: * audiosrc/audiosink classes try to set high priority to read/write thread via MMCSS (Multimedia Class Scheduler Service) but it's not allowed in case of UWP application. In order to use MMCSS in UWP, application should use MediaFoundation work queue indirectly. Since audiosrc/audiosink scheduling model is not compatible with MediaFoundation's work queue model, audioringbuffer subclassing is required. * WASAPI capture device might report larger packet size than expected (i.e., larger frames we can read than expected frame size per period). Meanwhile, in any case, application should drain all packets at that moment. In order to handle the case, wasapi/wasapi2 plugins were making use of GstAdapter which is obviously sub-optimal because it requires additional memory allocation and copy. By implementing audioringbuffer subclassing, we can avoid such inefficiency. In this commit, all the device read/write operations will be moved to newly implemented wasapi2ringbuffer class and existing wasapi2client class will take care of device enumeration and activation parts only. Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/2306>
Diffstat (limited to 'sys')
-rw-r--r--sys/wasapi2/gstwasapi2client.cpp1137
-rw-r--r--sys/wasapi2/gstwasapi2client.h44
-rw-r--r--sys/wasapi2/gstwasapi2device.c2
-rw-r--r--sys/wasapi2/gstwasapi2ringbuffer.cpp1015
-rw-r--r--sys/wasapi2/gstwasapi2ringbuffer.h55
-rw-r--r--sys/wasapi2/gstwasapi2sink.c391
-rw-r--r--sys/wasapi2/gstwasapi2sink.h2
-rw-r--r--sys/wasapi2/gstwasapi2src.c388
-rw-r--r--sys/wasapi2/gstwasapi2src.h2
-rw-r--r--sys/wasapi2/gstwasapi2util.c237
-rw-r--r--sys/wasapi2/gstwasapi2util.h18
-rw-r--r--sys/wasapi2/meson.build4
-rw-r--r--sys/wasapi2/plugin.c18
13 files changed, 1629 insertions, 1684 deletions
diff --git a/sys/wasapi2/gstwasapi2client.cpp b/sys/wasapi2/gstwasapi2client.cpp
index b7555e4d5..c27d6f8ff 100644
--- a/sys/wasapi2/gstwasapi2client.cpp
+++ b/sys/wasapi2/gstwasapi2client.cpp
@@ -41,51 +41,6 @@
#include <locale>
#include <codecvt>
-/* Desktop only defines */
-#ifndef KSAUDIO_SPEAKER_MONO
-#define KSAUDIO_SPEAKER_MONO (SPEAKER_FRONT_CENTER)
-#endif
-#ifndef KSAUDIO_SPEAKER_1POINT1
-#define KSAUDIO_SPEAKER_1POINT1 (SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY)
-#endif
-#ifndef KSAUDIO_SPEAKER_STEREO
-#define KSAUDIO_SPEAKER_STEREO (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT)
-#endif
-#ifndef KSAUDIO_SPEAKER_2POINT1
-#define KSAUDIO_SPEAKER_2POINT1 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_LOW_FREQUENCY)
-#endif
-#ifndef KSAUDIO_SPEAKER_3POINT0
-#define KSAUDIO_SPEAKER_3POINT0 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER)
-#endif
-#ifndef KSAUDIO_SPEAKER_3POINT1
-#define KSAUDIO_SPEAKER_3POINT1 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
- SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY)
-#endif
-#ifndef KSAUDIO_SPEAKER_QUAD
-#define KSAUDIO_SPEAKER_QUAD (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
- SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT)
-#endif
-#define KSAUDIO_SPEAKER_SURROUND (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
- SPEAKER_FRONT_CENTER | SPEAKER_BACK_CENTER)
-#ifndef KSAUDIO_SPEAKER_5POINT0
-#define KSAUDIO_SPEAKER_5POINT0 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | \
- SPEAKER_SIDE_LEFT | SPEAKER_SIDE_RIGHT)
-#endif
-#define KSAUDIO_SPEAKER_5POINT1 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
- SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | \
- SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT)
-#ifndef KSAUDIO_SPEAKER_7POINT0
-#define KSAUDIO_SPEAKER_7POINT0 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | \
- SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT | \
- SPEAKER_SIDE_LEFT | SPEAKER_SIDE_RIGHT)
-#endif
-#ifndef KSAUDIO_SPEAKER_7POINT1
-#define KSAUDIO_SPEAKER_7POINT1 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
- SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | \
- SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT | \
- SPEAKER_FRONT_LEFT_OF_CENTER | SPEAKER_FRONT_RIGHT_OF_CENTER)
-#endif
-
/* *INDENT-OFF* */
using namespace ABI::Windows::ApplicationModel::Core;
using namespace ABI::Windows::Foundation;
@@ -107,8 +62,7 @@ G_END_DECLS
static void
gst_wasapi2_client_on_device_activated (GstWasapi2Client * client,
- IAudioClient3 * audio_client);
-
+ IAudioClient * audio_client);
/* *INDENT-OFF* */
class GstWasapiDeviceActivator
@@ -150,7 +104,7 @@ public:
STDMETHOD(ActivateCompleted)
(IActivateAudioInterfaceAsyncOperation *async_op)
{
- ComPtr<IAudioClient3> audio_client;
+ ComPtr<IAudioClient> audio_client;
HRESULT hr = S_OK;
HRESULT hr_async_op = S_OK;
ComPtr<IUnknown> audio_interface;
@@ -255,6 +209,7 @@ private:
ComPtr<ICoreDispatcher> dispatcher_;
};
/* *INDENT-ON* */
+
typedef enum
{
GST_WASAPI2_CLIENT_ACTIVATE_FAILED = -1,
@@ -270,47 +225,27 @@ enum
PROP_DEVICE_NAME,
PROP_DEVICE_INDEX,
PROP_DEVICE_CLASS,
- PROP_LOW_LATENCY,
PROP_DISPATCHER,
};
#define DEFAULT_DEVICE_INDEX -1
#define DEFAULT_DEVICE_CLASS GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE
-#define DEFAULT_LOW_LATENCY FALSE
struct _GstWasapi2Client
{
GstObject parent;
GstWasapi2ClientDeviceClass device_class;
- gboolean low_latency;
gchar *device_id;
gchar *device_name;
gint device_index;
gpointer dispatcher;
- IAudioClient3 *audio_client;
- IAudioCaptureClient *audio_capture_client;
- IAudioRenderClient *audio_render_client;
- ISimpleAudioVolume *audio_volume;
+ IAudioClient *audio_client;
GstWasapiDeviceActivator *activator;
- WAVEFORMATEX *mix_format;
GstCaps *supported_caps;
- HANDLE event_handle;
- HANDLE cancellable;
- gboolean opened;
- gboolean running;
-
- guint32 device_period;
- guint32 buffer_frame_count;
-
- GstAudioChannelPosition *positions;
-
- /* Used for capture mode */
- GstAdapter *adapter;
-
GThread *thread;
GMutex lock;
GCond cond;
@@ -330,7 +265,7 @@ gst_wasapi2_client_device_class_get_type (void)
static const GEnumValue types[] = {
{GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE, "Capture", "capture"},
{GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER, "Render", "render"},
- {0, NULL, NULL}
+ {0, nullptr, nullptr}
};
if (g_once_init_enter (&class_type)) {
@@ -342,7 +277,6 @@ gst_wasapi2_client_device_class_get_type (void)
}
static void gst_wasapi2_client_constructed (GObject * object);
-static void gst_wasapi2_client_dispose (GObject * object);
static void gst_wasapi2_client_finalize (GObject * object);
static void gst_wasapi2_client_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
@@ -365,17 +299,16 @@ gst_wasapi2_client_class_init (GstWasapi2ClientClass * klass)
G_PARAM_STATIC_STRINGS);
gobject_class->constructed = gst_wasapi2_client_constructed;
- gobject_class->dispose = gst_wasapi2_client_dispose;
gobject_class->finalize = gst_wasapi2_client_finalize;
gobject_class->get_property = gst_wasapi2_client_get_property;
gobject_class->set_property = gst_wasapi2_client_set_property;
g_object_class_install_property (gobject_class, PROP_DEVICE,
g_param_spec_string ("device", "Device",
- "WASAPI playback device as a GUID string", NULL, param_flags));
+ "WASAPI playback device as a GUID string", nullptr, param_flags));
g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
g_param_spec_string ("device-name", "Device Name",
- "The human-readable device name", NULL, param_flags));
+ "The human-readable device name", nullptr, param_flags));
g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
g_param_spec_int ("device-index", "Device Index",
"The zero-based device index", -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
@@ -384,10 +317,6 @@ gst_wasapi2_client_class_init (GstWasapi2ClientClass * klass)
g_param_spec_enum ("device-class", "Device Class",
"Device class", GST_TYPE_WASAPI2_CLIENT_DEVICE_CLASS,
DEFAULT_DEVICE_CLASS, param_flags));
- g_object_class_install_property (gobject_class, PROP_LOW_LATENCY,
- g_param_spec_boolean ("low-latency", "Low latency",
- "Optimize all settings for lowest latency. Always safe to enable.",
- DEFAULT_LOW_LATENCY, param_flags));
g_object_class_install_property (gobject_class, PROP_DISPATCHER,
g_param_spec_pointer ("dispatcher", "Dispatcher",
"ICoreDispatcher COM object to use", param_flags));
@@ -398,11 +327,6 @@ gst_wasapi2_client_init (GstWasapi2Client * self)
{
self->device_index = DEFAULT_DEVICE_INDEX;
self->device_class = DEFAULT_DEVICE_CLASS;
- self->low_latency = DEFAULT_LOW_LATENCY;
-
- self->adapter = gst_adapter_new ();
- self->event_handle = CreateEvent (NULL, FALSE, FALSE, NULL);
- self->cancellable = CreateEvent (NULL, TRUE, FALSE, NULL);
g_mutex_init (&self->lock);
g_cond_init (&self->cond);
@@ -438,44 +362,26 @@ gst_wasapi2_client_constructed (GObject * object)
}
static void
-gst_wasapi2_client_dispose (GObject * object)
+gst_wasapi2_client_finalize (GObject * object)
{
GstWasapi2Client *self = GST_WASAPI2_CLIENT (object);
- GST_DEBUG_OBJECT (self, "dispose");
-
- gst_clear_caps (&self->supported_caps);
-
if (self->loop) {
g_main_loop_quit (self->loop);
g_thread_join (self->thread);
g_main_context_unref (self->context);
g_main_loop_unref (self->loop);
- self->thread = NULL;
- self->context = NULL;
- self->loop = NULL;
+ self->thread = nullptr;
+ self->context = nullptr;
+ self->loop = nullptr;
}
- g_clear_object (&self->adapter);
-
- G_OBJECT_CLASS (parent_class)->dispose (object);
-}
-
-static void
-gst_wasapi2_client_finalize (GObject * object)
-{
- GstWasapi2Client *self = GST_WASAPI2_CLIENT (object);
+ gst_clear_caps (&self->supported_caps);
g_free (self->device_id);
g_free (self->device_name);
- g_free (self->positions);
-
- CoTaskMemFree (self->mix_format);
- CloseHandle (self->event_handle);
- CloseHandle (self->cancellable);
-
g_mutex_clear (&self->lock);
g_cond_clear (&self->cond);
@@ -504,9 +410,6 @@ gst_wasapi2_client_get_property (GObject * object, guint prop_id,
case PROP_DEVICE_CLASS:
g_value_set_enum (value, self->device_class);
break;
- case PROP_LOW_LATENCY:
- g_value_set_boolean (value, self->low_latency);
- break;
case PROP_DISPATCHER:
g_value_set_pointer (value, self->dispatcher);
break;
@@ -538,9 +441,6 @@ gst_wasapi2_client_set_property (GObject * object, guint prop_id,
self->device_class =
(GstWasapi2ClientDeviceClass) g_value_get_enum (value);
break;
- case PROP_LOW_LATENCY:
- self->low_latency = g_value_get_boolean (value);
- break;
case PROP_DISPATCHER:
self->dispatcher = g_value_get_pointer (value);
break;
@@ -564,7 +464,7 @@ gst_wasapi2_client_main_loop_running_cb (GstWasapi2Client * self)
static void
gst_wasapi2_client_on_device_activated (GstWasapi2Client * self,
- IAudioClient3 * audio_client)
+ IAudioClient * audio_client)
{
GST_INFO_OBJECT (self, "Device activated");
@@ -912,7 +812,7 @@ run_loop:
source = g_idle_source_new ();
g_source_set_callback (source,
- (GSourceFunc) gst_wasapi2_client_main_loop_running_cb, self, NULL);
+ (GSourceFunc) gst_wasapi2_client_main_loop_running_cb, self, nullptr);
g_source_attach (source, self->context);
g_source_unref (source);
@@ -922,32 +822,7 @@ run_loop:
g_main_context_pop_thread_default (self->context);
- gst_wasapi2_client_stop (self);
-
- if (self->audio_volume) {
- /* this mute state seems to be global setting for this device
- * Explicitly disable mute for later use of this audio device
- * by other application. Otherwise users would blame GStreamer
- * if we close audio device with muted state */
- self->audio_volume->SetMute (FALSE, nullptr);
- self->audio_volume->Release ();
- self->audio_volume = NULL;
- }
-
- if (self->audio_render_client) {
- self->audio_render_client->Release ();
- self->audio_render_client = NULL;
- }
-
- if (self->audio_capture_client) {
- self->audio_capture_client->Release ();
- self->audio_capture_client = NULL;
- }
-
- if (self->audio_client) {
- self->audio_client->Release ();
- self->audio_client = NULL;
- }
+ GST_WASAPI2_CLEAR_COM (self->audio_client);
/* Reset explicitly to ensure that it happens before
* RoInitializeWrapper dtor is called */
@@ -955,986 +830,48 @@ run_loop:
GST_DEBUG_OBJECT (self, "Exit thread function");
- return NULL;
-}
-
-static const gchar *
-gst_waveformatex_to_audio_format (WAVEFORMATEXTENSIBLE * format)
-{
- const gchar *fmt_str = NULL;
- GstAudioFormat fmt = GST_AUDIO_FORMAT_UNKNOWN;
-
- if (format->Format.wFormatTag == WAVE_FORMAT_PCM) {
- fmt = gst_audio_format_build_integer (TRUE, G_LITTLE_ENDIAN,
- format->Format.wBitsPerSample, format->Format.wBitsPerSample);
- } else if (format->Format.wFormatTag == WAVE_FORMAT_IEEE_FLOAT) {
- if (format->Format.wBitsPerSample == 32)
- fmt = GST_AUDIO_FORMAT_F32LE;
- else if (format->Format.wBitsPerSample == 64)
- fmt = GST_AUDIO_FORMAT_F64LE;
- } else if (format->Format.wFormatTag == WAVE_FORMAT_EXTENSIBLE) {
- if (IsEqualGUID (format->SubFormat, KSDATAFORMAT_SUBTYPE_PCM)) {
- fmt = gst_audio_format_build_integer (TRUE, G_LITTLE_ENDIAN,
- format->Format.wBitsPerSample, format->Samples.wValidBitsPerSample);
- } else if (IsEqualGUID (format->SubFormat, KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)) {
- if (format->Format.wBitsPerSample == 32
- && format->Samples.wValidBitsPerSample == 32)
- fmt = GST_AUDIO_FORMAT_F32LE;
- else if (format->Format.wBitsPerSample == 64 &&
- format->Samples.wValidBitsPerSample == 64)
- fmt = GST_AUDIO_FORMAT_F64LE;
- }
- }
-
- if (fmt != GST_AUDIO_FORMAT_UNKNOWN)
- fmt_str = gst_audio_format_to_string (fmt);
-
- return fmt_str;
-}
-
-static void
-gst_wasapi_util_channel_position_all_none (guint channels,
- GstAudioChannelPosition * position)
-{
- int ii;
- for (ii = 0; ii < channels; ii++)
- position[ii] = GST_AUDIO_CHANNEL_POSITION_NONE;
-}
-
-static struct
-{
- guint64 wasapi_pos;
- GstAudioChannelPosition gst_pos;
-} wasapi_to_gst_pos[] = {
- {SPEAKER_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT},
- {SPEAKER_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT},
- {SPEAKER_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER},
- {SPEAKER_LOW_FREQUENCY, GST_AUDIO_CHANNEL_POSITION_LFE1},
- {SPEAKER_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_REAR_LEFT},
- {SPEAKER_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT},
- {SPEAKER_FRONT_LEFT_OF_CENTER,
- GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER},
- {SPEAKER_FRONT_RIGHT_OF_CENTER,
- GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER},
- {SPEAKER_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_REAR_CENTER},
- /* Enum values diverge from this point onwards */
- {SPEAKER_SIDE_LEFT, GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT},
- {SPEAKER_SIDE_RIGHT, GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT},
- {SPEAKER_TOP_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_CENTER},
- {SPEAKER_TOP_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_LEFT},
- {SPEAKER_TOP_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_CENTER},
- {SPEAKER_TOP_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_RIGHT},
- {SPEAKER_TOP_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_LEFT},
- {SPEAKER_TOP_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER},
- {SPEAKER_TOP_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT}
-};
-
-static DWORD default_ch_masks[] = {
- 0,
- KSAUDIO_SPEAKER_MONO,
- /* 2ch */
- KSAUDIO_SPEAKER_STEREO,
- /* 2.1ch */
- /* KSAUDIO_SPEAKER_3POINT0 ? */
- KSAUDIO_SPEAKER_2POINT1,
- /* 4ch */
- /* KSAUDIO_SPEAKER_3POINT1 or KSAUDIO_SPEAKER_SURROUND ? */
- KSAUDIO_SPEAKER_QUAD,
- /* 5ch */
- KSAUDIO_SPEAKER_5POINT0,
- /* 5.1ch */
- KSAUDIO_SPEAKER_5POINT1,
- /* 7ch */
- KSAUDIO_SPEAKER_7POINT0,
- /* 7.1ch */
- KSAUDIO_SPEAKER_7POINT1,
-};
-
-/* Parse WAVEFORMATEX to get the gstreamer channel mask, and the wasapi channel
- * positions so GstAudioRingbuffer can reorder the audio data to match the
- * gstreamer channel order. */
-static guint64
-gst_wasapi2_util_waveformatex_to_channel_mask (WORD nChannels,
- DWORD dwChannelMask, GstAudioChannelPosition ** out_position)
-{
- int ii, ch;
- guint64 mask = 0;
- GstAudioChannelPosition *pos = NULL;
-
- pos = g_new (GstAudioChannelPosition, nChannels);
- gst_wasapi_util_channel_position_all_none (nChannels, pos);
-
- /* Too many channels, have to assume that they are all non-positional */
- if (nChannels > G_N_ELEMENTS (wasapi_to_gst_pos)) {
- GST_INFO ("Got too many (%i) channels, assuming non-positional", nChannels);
- goto out;
- }
-
- /* Too many bits in the channel mask, and the bits don't match nChannels */
- if (dwChannelMask >> (G_N_ELEMENTS (wasapi_to_gst_pos) + 1) != 0) {
- GST_WARNING ("Too many bits in channel mask (%lu), assuming "
- "non-positional", dwChannelMask);
- goto out;
- }
-
- /* Map WASAPI's channel mask to Gstreamer's channel mask and positions.
- * If the no. of bits in the mask > nChannels, we will ignore the extra. */
- for (ii = 0, ch = 0; ii < G_N_ELEMENTS (wasapi_to_gst_pos) && ch < nChannels;
- ii++) {
- if (!(dwChannelMask & wasapi_to_gst_pos[ii].wasapi_pos))
- /* no match, try next */
- continue;
- mask |= G_GUINT64_CONSTANT (1) << wasapi_to_gst_pos[ii].gst_pos;
- pos[ch++] = wasapi_to_gst_pos[ii].gst_pos;
- }
-
- /* XXX: Warn if some channel masks couldn't be mapped? */
-
- GST_DEBUG ("Converted WASAPI mask 0x%" G_GINT64_MODIFIER "x -> 0x%"
- G_GINT64_MODIFIER "x", (guint64) dwChannelMask, (guint64) mask);
-
-out:
- if (out_position)
- *out_position = pos;
- return mask;
-}
-
-static gboolean
-gst_wasapi2_util_parse_waveformatex (WAVEFORMATEXTENSIBLE * format,
- GstCaps * template_caps, GstCaps ** out_caps,
- GstAudioChannelPosition ** out_positions)
-{
- const gchar *afmt;
- guint64 channel_mask = 0;
- DWORD dwChannelMask = 0;
- WORD nChannels;
-
- *out_caps = NULL;
-
- /* TODO: handle SPDIF and other encoded formats */
-
- /* 1 or 2 channels <= 16 bits sample size OR
- * 1 or 2 channels > 16 bits sample size or >2 channels */
- if (format->Format.wFormatTag != WAVE_FORMAT_PCM &&
- format->Format.wFormatTag != WAVE_FORMAT_IEEE_FLOAT &&
- format->Format.wFormatTag != WAVE_FORMAT_EXTENSIBLE)
- /* Unhandled format tag */
- return FALSE;
-
- /* WASAPI can only tell us one canonical mix format that it will accept. The
- * alternative is calling IsFormatSupported on all combinations of formats.
- * Instead, it's simpler and faster to require conversion inside gstreamer */
- afmt = gst_waveformatex_to_audio_format (format);
- if (afmt == NULL)
- return FALSE;
-
- *out_caps = gst_caps_copy (template_caps);
-
- nChannels = format->Format.nChannels;
- if (format->Format.wFormatTag == WAVE_FORMAT_EXTENSIBLE) {
- dwChannelMask = format->dwChannelMask;
- }
-
- if (nChannels > 2 && !dwChannelMask) {
- GST_WARNING ("Unknown channel mask value for %d channel stream",
- format->Format.nChannels);
- if (nChannels >= G_N_ELEMENTS (default_ch_masks)) {
- GST_ERROR ("To may channels %d", nChannels);
- return FALSE;
- }
-
- dwChannelMask = default_ch_masks[nChannels];
- }
-
- channel_mask =
- gst_wasapi2_util_waveformatex_to_channel_mask (nChannels,
- dwChannelMask, out_positions);
-
- gst_caps_set_simple (*out_caps,
- "format", G_TYPE_STRING, afmt,
- "channels", G_TYPE_INT, format->Format.nChannels,
- "rate", G_TYPE_INT, format->Format.nSamplesPerSec, NULL);
-
- if (channel_mask) {
- gst_caps_set_simple (*out_caps,
- "channel-mask", GST_TYPE_BITMASK, channel_mask, NULL);
- }
-
- return TRUE;
+ return nullptr;
}
GstCaps *
gst_wasapi2_client_get_caps (GstWasapi2Client * client)
{
- WAVEFORMATEX *format = NULL;
+ WAVEFORMATEX *mix_format = nullptr;
static GstStaticCaps static_caps = GST_STATIC_CAPS (GST_WASAPI2_STATIC_CAPS);
GstCaps *scaps;
HRESULT hr;
- g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), NULL);
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), nullptr);
if (client->supported_caps)
return gst_caps_ref (client->supported_caps);
if (!client->audio_client) {
GST_WARNING_OBJECT (client, "IAudioClient3 wasn't configured");
- return NULL;
+ return nullptr;
}
- CoTaskMemFree (client->mix_format);
- client->mix_format = nullptr;
-
- g_clear_pointer (&client->positions, g_free);
-
- hr = client->audio_client->GetMixFormat (&format);
- if (!gst_wasapi2_result (hr))
- return NULL;
+ hr = client->audio_client->GetMixFormat (&mix_format);
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING_OBJECT (client, "Failed to get mix format");
+ return nullptr;
+ }
scaps = gst_static_caps_get (&static_caps);
- gst_wasapi2_util_parse_waveformatex ((WAVEFORMATEXTENSIBLE *) format,
- scaps, &client->supported_caps, &client->positions);
+ gst_wasapi2_util_parse_waveformatex (mix_format,
+ scaps, &client->supported_caps, nullptr);
gst_caps_unref (scaps);
- client->mix_format = format;
+ CoTaskMemFree (mix_format);
if (!client->supported_caps) {
GST_ERROR_OBJECT (client, "No caps from subclass");
- return NULL;
+ return nullptr;
}
return gst_caps_ref (client->supported_caps);
}
-static HRESULT
-gst_wasapi2_client_initialize_audio_client3 (GstWasapi2Client * self)
-{
- HRESULT hr = S_OK;
- UINT32 default_period, fundamental_period, min_period, max_period;
- /* AUDCLNT_STREAMFLAGS_NOPERSIST is not allowed for
- * InitializeSharedAudioStream */
- DWORD stream_flags = AUDCLNT_STREAMFLAGS_EVENTCALLBACK;
- WAVEFORMATEX *format = NULL;
- UINT32 period;
- IAudioClient3 *audio_client = self->audio_client;
-
- hr = audio_client->GetSharedModeEnginePeriod (self->mix_format,
- &default_period, &fundamental_period, &min_period, &max_period);
- if (!gst_wasapi2_result (hr))
- goto done;
-
- GST_INFO_OBJECT (self, "Using IAudioClient3, default period %d frames, "
- "fundamental period %d frames, minimum period %d frames, maximum period "
- "%d frames", default_period, fundamental_period, min_period, max_period);
-
- hr = audio_client->InitializeSharedAudioStream (stream_flags, min_period,
- self->mix_format, nullptr);
-
- if (!gst_wasapi2_result (hr)) {
- GST_WARNING_OBJECT (self, "Failed to initialize IAudioClient3");
- goto done;
- }
-
- /* query period again to be ensured */
- hr = audio_client->GetCurrentSharedModeEnginePeriod (&format, &period);
- if (!gst_wasapi2_result (hr)) {
- GST_WARNING_OBJECT (self, "Failed to get current period");
- goto done;
- }
-
- self->device_period = period;
-
-done:
- CoTaskMemFree (format);
-
- return hr;
-}
-
-static void
-gst_wasapi2_util_get_best_buffer_sizes (GstAudioRingBufferSpec * spec,
- REFERENCE_TIME default_period, REFERENCE_TIME min_period,
- REFERENCE_TIME * ret_period, REFERENCE_TIME * ret_buffer_duration)
-{
- REFERENCE_TIME use_period, use_buffer;
-
- /* Shared mode always runs at the default period, so if we want a larger
- * period (for lower CPU usage), we do it as a multiple of that */
- use_period = default_period;
-
- /* Ensure that the period (latency_time) used is an integral multiple of
- * either the default period or the minimum period */
- use_period = use_period * MAX ((spec->latency_time * 10) / use_period, 1);
-
- /* Ask WASAPI to create a software ringbuffer of at least this size; it may
- * be larger so the actual buffer time may be different, which is why after
- * initialization we read the buffer duration actually in-use and set
- * segsize/segtotal from that. */
- use_buffer = spec->buffer_time * 10;
- /* Has to be at least twice the period */
- if (use_buffer < 2 * use_period)
- use_buffer = 2 * use_period;
-
- *ret_period = use_period;
- *ret_buffer_duration = use_buffer;
-}
-
-static HRESULT
-gst_wasapi2_client_initialize_audio_client (GstWasapi2Client * self,
- GstAudioRingBufferSpec * spec)
-{
- REFERENCE_TIME default_period, min_period;
- REFERENCE_TIME device_period, device_buffer_duration;
- guint rate;
- DWORD stream_flags =
- AUDCLNT_STREAMFLAGS_EVENTCALLBACK | AUDCLNT_STREAMFLAGS_NOPERSIST;
- HRESULT hr;
- IAudioClient3 *audio_client = self->audio_client;
-
- hr = audio_client->GetDevicePeriod (&default_period, &min_period);
- if (!gst_wasapi2_result (hr)) {
- GST_WARNING_OBJECT (self, "Couldn't get device period info");
- return hr;
- }
-
- GST_INFO_OBJECT (self, "wasapi2 default period: %" G_GINT64_FORMAT
- ", min period: %" G_GINT64_FORMAT, default_period, min_period);
-
- rate = GST_AUDIO_INFO_RATE (&spec->info);
-
- if (self->low_latency) {
- device_period = default_period;
- /* this should be same as hnsPeriodicity
- * when AUDCLNT_STREAMFLAGS_EVENTCALLBACK is used
- * And in case of shared mode, hnsPeriodicity should be zero, so
- * this value should be zero as well */
- device_buffer_duration = 0;
- } else {
- /* Clamp values to integral multiples of an appropriate period */
- gst_wasapi2_util_get_best_buffer_sizes (spec,
- default_period, min_period, &device_period, &device_buffer_duration);
- }
-
- hr = audio_client->Initialize (AUDCLNT_SHAREMODE_SHARED, stream_flags,
- device_buffer_duration,
- /* This must always be 0 in shared mode */
- 0, self->mix_format, nullptr);
- if (!gst_wasapi2_result (hr)) {
- GST_WARNING_OBJECT (self, "Couldn't initialize audioclient");
- return hr;
- }
-
- /* device_period can be a non-power-of-10 value so round while converting */
- self->device_period =
- gst_util_uint64_scale_round (device_period, rate * 100, GST_SECOND);
-
- return S_OK;
-}
-
-HRESULT
-gst_wasapi2_client_open (GstWasapi2Client * client,
- GstAudioRingBufferSpec * spec, GstAudioRingBuffer * buf)
-{
- HRESULT hr = E_FAIL;
- REFERENCE_TIME latency_rt;
- guint bpf, rate;
- IAudioClient3 *audio_client;
- /* *INDENT-OFF* */
- ComPtr<ISimpleAudioVolume> audio_volume;
- /* *INDENT-ON* */
-
- g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), E_INVALIDARG);
-
- /* FIXME: Once IAudioClient3 was initialized, we may need to re-open
- * IAudioClient3 in order to handle audio format change */
- if (client->opened) {
- GST_INFO_OBJECT (client, "IAudioClient3 object is initialized already");
- return S_OK;
- }
-
- audio_client = client->audio_client;
-
- if (!audio_client) {
- GST_ERROR_OBJECT (client, "IAudioClient3 object wasn't configured");
- return E_FAIL;
- }
-
- if (!client->mix_format) {
- GST_ERROR_OBJECT (client, "Unknown mix format");
- return E_FAIL;
- }
-
- /* Only use audioclient3 when low-latency is requested because otherwise
- * very slow machines and VMs with 1 CPU allocated will get glitches:
- * https://bugzilla.gnome.org/show_bug.cgi?id=794497 */
- if (client->low_latency)
- hr = gst_wasapi2_client_initialize_audio_client3 (client);
-
- /* Try again if IAudioClinet3 API is unavailable.
- * NOTE: IAudioClinet3:: methods might not be available for default device
- * NOTE: The default device is a special device which is needed for supporting
- * automatic stream routing
- * https://docs.microsoft.com/en-us/windows/win32/coreaudio/automatic-stream-routing
- */
- if (FAILED (hr))
- hr = gst_wasapi2_client_initialize_audio_client (client, spec);
-
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (client, "Failed to initialize audioclient");
- return hr;
- }
-
- bpf = GST_AUDIO_INFO_BPF (&spec->info);
- rate = GST_AUDIO_INFO_RATE (&spec->info);
-
- /* Total size in frames of the allocated buffer that we will read from */
- hr = audio_client->GetBufferSize (&client->buffer_frame_count);
- if (!gst_wasapi2_result (hr))
- return hr;
-
- GST_INFO_OBJECT (client, "buffer size is %i frames, device period is %i "
- "frames, bpf is %i bytes, rate is %i Hz", client->buffer_frame_count,
- client->device_period, bpf, rate);
-
- /* Actual latency-time/buffer-time will be different now */
- spec->segsize = client->device_period * bpf;
-
- /* We need a minimum of 2 segments to ensure glitch-free playback */
- spec->segtotal = MAX (client->buffer_frame_count * bpf / spec->segsize, 2);
-
- GST_INFO_OBJECT (client, "segsize is %i, segtotal is %i", spec->segsize,
- spec->segtotal);
-
- /* Get WASAPI latency for logging */
- hr = audio_client->GetStreamLatency (&latency_rt);
- if (!gst_wasapi2_result (hr))
- return hr;
-
- GST_INFO_OBJECT (client, "wasapi2 stream latency: %" G_GINT64_FORMAT " (%"
- G_GINT64_FORMAT " ms)", latency_rt, latency_rt / 10000);
-
- /* Set the event handler which will trigger read/write */
- hr = audio_client->SetEventHandle (client->event_handle);
- if (!gst_wasapi2_result (hr))
- return hr;
-
- if (client->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
- /* *INDENT-OFF* */
- ComPtr<IAudioRenderClient> render_client;
- /* *INDENT-ON* */
-
- hr = audio_client->GetService (IID_PPV_ARGS (&render_client));
- if (!gst_wasapi2_result (hr))
- return hr;
-
- client->audio_render_client = render_client.Detach ();
- } else {
- /* *INDENT-OFF* */
- ComPtr<IAudioCaptureClient> capture_client;
- /* *INDENT-ON* */
-
- hr = audio_client->GetService (IID_PPV_ARGS (&capture_client));
- if (!gst_wasapi2_result (hr))
- return hr;
-
- client->audio_capture_client = capture_client.Detach ();
- }
-
- hr = audio_client->GetService (IID_PPV_ARGS (&audio_volume));
- if (!gst_wasapi2_result (hr))
- return hr;
-
- client->audio_volume = audio_volume.Detach ();
- client->audio_volume->SetMute (FALSE, nullptr);
-
- gst_audio_ring_buffer_set_channel_positions (buf, client->positions);
-
- client->opened = TRUE;
-
- return S_OK;
-}
-
-/* Get the empty space in the buffer that we have to write to */
-static HRESULT
-gst_wasapi2_client_get_can_frames (GstWasapi2Client * self, guint32 * n_frames)
-{
- HRESULT hr;
- UINT32 n_frames_padding;
- IAudioClient3 *audio_client = self->audio_client;
-
- *n_frames = 0;
-
- if (!audio_client) {
- GST_WARNING_OBJECT (self, "IAudioClient3 wasn't configured");
- return E_FAIL;
- }
-
- /* Frames the card hasn't rendered yet */
- hr = audio_client->GetCurrentPadding (&n_frames_padding);
- if (!gst_wasapi2_result (hr))
- return hr;
-
- GST_LOG_OBJECT (self, "%d unread frames (padding)", n_frames_padding);
-
- /* We can write out these many frames */
- *n_frames = self->buffer_frame_count - n_frames_padding;
-
- return S_OK;
-}
-
-HRESULT
-gst_wasapi2_client_start (GstWasapi2Client * client)
-{
- HRESULT hr;
- IAudioClient3 *audio_client;
- WAVEFORMATEX *mix_format;
-
- g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), E_INVALIDARG);
-
- if (client->running) {
- GST_WARNING_OBJECT (client, "IAudioClient3 is running already");
- return S_OK;
- }
-
- audio_client = client->audio_client;
- mix_format = client->mix_format;
-
- if (!audio_client) {
- GST_ERROR_OBJECT (client, "IAudioClient object wasn't configured");
- return E_FAIL;
- }
-
- if (!mix_format) {
- GST_ERROR_OBJECT (client, "Unknown MixFormat");
- return E_FAIL;
- }
-
- if (client->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE &&
- !client->audio_capture_client) {
- GST_ERROR_OBJECT (client, "IAudioCaptureClient wasn't configured");
- return E_FAIL;
- }
-
- if (client->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER &&
- !client->audio_render_client) {
- GST_ERROR_OBJECT (client, "IAudioRenderClient wasn't configured");
- return E_FAIL;
- }
-
- ResetEvent (client->cancellable);
-
- /* To avoid start-up glitches, before starting the streaming, we fill the
- * buffer with silence as recommended by the documentation:
- * https://msdn.microsoft.com/en-us/library/windows/desktop/dd370879%28v=vs.85%29.aspx */
- if (client->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
- IAudioRenderClient *render_client = client->audio_render_client;
- guint32 n_frames, len;
- BYTE *dst = NULL;
-
- hr = gst_wasapi2_client_get_can_frames (client, &n_frames);
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (client,
- "should have more than %i frames to write", n_frames);
- return hr;
- }
-
- len = n_frames * mix_format->nBlockAlign;
-
- hr = render_client->GetBuffer (n_frames, &dst);
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (client, "Couldn't get buffer");
- return hr;
- }
-
- GST_DEBUG_OBJECT (client, "pre-wrote %i bytes of silence", len);
-
- hr = render_client->ReleaseBuffer (n_frames, AUDCLNT_BUFFERFLAGS_SILENT);
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (client, "Couldn't release buffer");
- return hr;
- }
- }
-
- hr = audio_client->Start ();
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (client, "Couldn't start audio client");
- return hr;
- }
-
- client->running = TRUE;
- gst_adapter_clear (client->adapter);
-
- return S_OK;
-}
-
-HRESULT
-gst_wasapi2_client_stop (GstWasapi2Client * client)
-{
- HRESULT hr;
- IAudioClient3 *audio_client;
-
- g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), E_INVALIDARG);
-
- audio_client = client->audio_client;
-
- if (!client->running) {
- GST_DEBUG_OBJECT (client, "We are not running now");
- return S_OK;
- }
-
- if (!client->audio_client) {
- GST_ERROR_OBJECT (client, "IAudioClient3 object wasn't configured");
- return E_FAIL;
- }
-
- client->running = FALSE;
- SetEvent (client->cancellable);
-
- hr = audio_client->Stop ();
- if (!gst_wasapi2_result (hr))
- return hr;
-
- /* reset state for reuse case */
- return audio_client->Reset ();
-}
-
-HRESULT
-gst_wasapi2_client_read (GstWasapi2Client * client, gpointer data, guint length,
- guint * read_length)
-{
- IAudioCaptureClient *capture_client;
- WAVEFORMATEX *mix_format;
- HRESULT hr;
- BYTE *from = NULL;
- guint wanted = length;
- guint bpf;
- DWORD flags;
-
- g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), E_INVALIDARG);
- g_return_val_if_fail (client->audio_capture_client != NULL, E_INVALIDARG);
- g_return_val_if_fail (client->mix_format != NULL, E_INVALIDARG);
- g_return_val_if_fail (read_length != NULL, E_INVALIDARG);
-
- *read_length = 0;
-
- capture_client = client->audio_capture_client;
- mix_format = client->mix_format;
-
- if (!client->running) {
- GST_ERROR_OBJECT (client, "client is not running now");
- return E_FAIL;
- }
-
- /* If we've accumulated enough data, return it immediately */
- if (gst_adapter_available (client->adapter) >= wanted) {
- memcpy (data, gst_adapter_map (client->adapter, wanted), wanted);
- gst_adapter_flush (client->adapter, wanted);
- GST_DEBUG_OBJECT (client, "Adapter has enough data, returning %i", wanted);
-
- *read_length = wanted;
-
- return S_OK;
- }
-
- bpf = mix_format->nBlockAlign;
-
- while (wanted > 0) {
- DWORD dwWaitResult;
- guint got_frames, avail_frames, n_frames, want_frames, read_len;
- HANDLE event_handle[2];
-
- event_handle[0] = client->event_handle;
- event_handle[1] = client->cancellable;
-
- /* Wait for data to become available */
- dwWaitResult = WaitForMultipleObjects (2, event_handle, FALSE, INFINITE);
- if (dwWaitResult != WAIT_OBJECT_0 && dwWaitResult != WAIT_OBJECT_0 + 1) {
- GST_ERROR_OBJECT (client, "Error waiting for event handle: %x",
- (guint) dwWaitResult);
- return E_FAIL;
- }
-
- if (!client->running) {
- GST_DEBUG_OBJECT (client, "Cancelled");
- return S_OK;
- }
-
- hr = capture_client->GetBuffer (&from, &got_frames, &flags, nullptr,
- nullptr);
- if (!gst_wasapi2_result (hr)) {
- if (hr == AUDCLNT_S_BUFFER_EMPTY) {
- GST_INFO_OBJECT (client, "Client buffer is empty, retry");
- return S_OK;
- }
-
- GST_ERROR_OBJECT (client, "Couldn't get buffer from capture client");
- return hr;
- }
-
- if (got_frames == 0) {
- GST_DEBUG_OBJECT (client, "No buffer to read");
- capture_client->ReleaseBuffer (got_frames);
- return S_OK;
- }
-
- if (G_UNLIKELY (flags != 0)) {
- /* https://docs.microsoft.com/en-us/windows/win32/api/audioclient/ne-audioclient-_audclnt_bufferflags */
- if (flags & AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY)
- GST_DEBUG_OBJECT (client, "WASAPI reported discontinuity (glitch?)");
- if (flags & AUDCLNT_BUFFERFLAGS_TIMESTAMP_ERROR)
- GST_DEBUG_OBJECT (client, "WASAPI reported a timestamp error");
- }
-
- /* Copy all the frames we got into the adapter, and then extract at most
- * @wanted size of frames from it. This helps when ::GetBuffer returns more
- * data than we can handle right now. */
- {
- GstBuffer *tmp = gst_buffer_new_allocate (NULL, got_frames * bpf, NULL);
- /* If flags has AUDCLNT_BUFFERFLAGS_SILENT, we will ignore the actual
- * data and write out silence, see:
- * https://docs.microsoft.com/en-us/windows/win32/api/audioclient/ne-audioclient-_audclnt_bufferflags */
- if (flags & AUDCLNT_BUFFERFLAGS_SILENT)
- memset (from, 0, got_frames * bpf);
- gst_buffer_fill (tmp, 0, from, got_frames * bpf);
- gst_adapter_push (client->adapter, tmp);
- }
-
- /* Release all captured buffers; we copied them above */
- hr = capture_client->ReleaseBuffer (got_frames);
- from = NULL;
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (client, "Failed to release buffer");
- return hr;
- }
-
- want_frames = wanted / bpf;
- avail_frames = gst_adapter_available (client->adapter) / bpf;
-
- /* Only copy data that will fit into the allocated buffer of size @length */
- n_frames = MIN (avail_frames, want_frames);
- read_len = n_frames * bpf;
-
- if (read_len == 0) {
- GST_WARNING_OBJECT (client, "No data to read");
- return S_OK;
- }
-
- GST_LOG_OBJECT (client, "frames captured: %d (%d bytes), "
- "can read: %d (%d bytes), will read: %d (%d bytes), "
- "adapter has: %d (%d bytes)", got_frames, got_frames * bpf, want_frames,
- wanted, n_frames, read_len, avail_frames, avail_frames * bpf);
-
- memcpy (data, gst_adapter_map (client->adapter, read_len), read_len);
- gst_adapter_flush (client->adapter, read_len);
- wanted -= read_len;
- }
-
- *read_length = length;
-
- return S_OK;
-}
-
-HRESULT
-gst_wasapi2_client_write (GstWasapi2Client * client, gpointer data,
- guint length, guint * write_length)
-{
- IAudioRenderClient *render_client;
- WAVEFORMATEX *mix_format;
- HRESULT hr;
- BYTE *dst = nullptr;
- DWORD dwWaitResult;
- guint can_frames, have_frames, n_frames, write_len = 0;
-
- g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), E_INVALIDARG);
- g_return_val_if_fail (client->audio_render_client != NULL, E_INVALIDARG);
- g_return_val_if_fail (client->mix_format != NULL, E_INVALIDARG);
- g_return_val_if_fail (write_length != NULL, E_INVALIDARG);
-
- *write_length = 0;
-
- if (!client->running) {
- GST_WARNING_OBJECT (client, "client is not running now");
- return -1;
- }
-
- render_client = client->audio_render_client;
- mix_format = client->mix_format;
-
- /* We have N frames to be written out */
- have_frames = length / (mix_format->nBlockAlign);
-
- /* In shared mode we can write parts of the buffer, so only wait
- * in case we can't write anything */
- hr = gst_wasapi2_client_get_can_frames (client, &can_frames);
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (client, "Error getting frames to write to");
- return hr;
- }
-
- if (can_frames == 0) {
- HANDLE event_handle[2];
-
- event_handle[0] = client->event_handle;
- event_handle[1] = client->cancellable;
-
- dwWaitResult = WaitForMultipleObjects (2, event_handle, FALSE, INFINITE);
- if (dwWaitResult != WAIT_OBJECT_0 && dwWaitResult != WAIT_OBJECT_0 + 1) {
- GST_ERROR_OBJECT (client, "Error waiting for event handle: %x",
- (guint) dwWaitResult);
- return E_FAIL;
- }
-
- if (!client->running) {
- GST_DEBUG_OBJECT (client, "Cancelled");
- return S_OK;
- }
-
- hr = gst_wasapi2_client_get_can_frames (client, &can_frames);
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (client, "Error getting frames to write to");
- return hr;
- }
- }
-
- /* We will write out these many frames, and this much length */
- n_frames = MIN (can_frames, have_frames);
- write_len = n_frames * mix_format->nBlockAlign;
-
- GST_LOG_OBJECT (client, "total: %d, have_frames: %d (%d bytes), "
- "can_frames: %d, will write: %d (%d bytes)", client->buffer_frame_count,
- have_frames, length, can_frames, n_frames, write_len);
-
- hr = render_client->GetBuffer (n_frames, &dst);
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (client, "Couldn't get buffer from client");
- return hr;
- }
-
- memcpy (dst, data, write_len);
- hr = render_client->ReleaseBuffer (n_frames, 0);
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (client, "Failed to release buffer");
- return hr;
- }
-
- *write_length = write_len;
-
- return S_OK;
-}
-
-HRESULT
-gst_wasapi2_client_delay (GstWasapi2Client * client, guint32 * delay)
-{
- IAudioClient3 *audio_client;
-
- g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), E_INVALIDARG);
- g_return_val_if_fail (delay != nullptr, E_INVALIDARG);
-
- *delay = 0;
-
- audio_client = client->audio_client;
- if (!audio_client) {
- GST_WARNING_OBJECT (client, "IAudioClient3 wasn't configured");
- return E_FAIL;
- }
-
- return audio_client->GetCurrentPadding (delay);
-}
-
-HRESULT
-gst_wasapi2_client_set_mute (GstWasapi2Client * client, gboolean mute)
-{
- HRESULT hr;
- ISimpleAudioVolume *audio_volume;
-
- g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), E_INVALIDARG);
-
- audio_volume = client->audio_volume;
-
- if (!audio_volume) {
- GST_WARNING_OBJECT (client, "ISimpleAudioVolume object wasn't configured");
- return E_FAIL;
- }
-
- hr = audio_volume->SetMute (mute, nullptr);
- GST_DEBUG_OBJECT (client, "Set mute %s, hr: 0x%x",
- mute ? "enabled" : "disabled", (gint) hr);
-
- return hr;
-}
-
-HRESULT
-gst_wasapi2_client_get_mute (GstWasapi2Client * client, gboolean * mute)
-{
- HRESULT hr;
- ISimpleAudioVolume *audio_volume;
- BOOL current_mute = FALSE;
-
- g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), E_INVALIDARG);
- g_return_val_if_fail (mute != NULL, E_INVALIDARG);
-
- audio_volume = client->audio_volume;
-
- if (!audio_volume) {
- GST_WARNING_OBJECT (client, "ISimpleAudioVolume object wasn't configured");
- return E_FAIL;
- }
-
- hr = audio_volume->GetMute (&current_mute);
- if (!gst_wasapi2_result (hr))
- return hr;
-
- *mute = (gboolean) current_mute;
-
- return S_OK;
-}
-
-HRESULT
-gst_wasapi2_client_set_volume (GstWasapi2Client * client, gfloat volume)
-{
- HRESULT hr;
- ISimpleAudioVolume *audio_volume;
-
- g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), E_INVALIDARG);
- g_return_val_if_fail (volume >= 0 && volume <= 1.0, E_INVALIDARG);
-
- audio_volume = client->audio_volume;
-
- if (!audio_volume) {
- GST_WARNING_OBJECT (client, "ISimpleAudioVolume object wasn't configured");
- return E_FAIL;
- }
-
- hr = audio_volume->SetMasterVolume (volume, nullptr);
- GST_DEBUG_OBJECT (client, "Set volume %.2f hr: 0x%x", volume, (gint) hr);
-
- return hr;
-}
-
-HRESULT
-gst_wasapi2_client_get_volume (GstWasapi2Client * client, gfloat * volume)
-{
- HRESULT hr;
- ISimpleAudioVolume *audio_volume;
- float current_volume = FALSE;
-
- g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), E_INVALIDARG);
- g_return_val_if_fail (volume != NULL, E_INVALIDARG);
-
- audio_volume = client->audio_volume;
-
- if (!audio_volume) {
- GST_WARNING_OBJECT (client, "ISimpleAudioVolume object wasn't configured");
- return E_FAIL;
- }
-
- hr = audio_volume->GetMasterVolume (&current_volume);
- if (!gst_wasapi2_result (hr))
- return hr;
-
- *volume = current_volume;
-
- return S_OK;
-}
-
gboolean
gst_wasapi2_client_ensure_activation (GstWasapi2Client * client)
{
@@ -1980,8 +917,7 @@ find_dispatcher (ICoreDispatcher ** dispatcher)
GstWasapi2Client *
gst_wasapi2_client_new (GstWasapi2ClientDeviceClass device_class,
- gboolean low_latency, gint device_index, const gchar * device_id,
- gpointer dispatcher)
+ gint device_index, const gchar * device_id, gpointer dispatcher)
{
GstWasapi2Client *self;
/* *INDENT-OFF* */
@@ -2007,9 +943,8 @@ gst_wasapi2_client_new (GstWasapi2ClientDeviceClass device_class,
}
self = (GstWasapi2Client *) g_object_new (GST_TYPE_WASAPI2_CLIENT,
- "device-class", device_class, "low-latency", low_latency,
- "device-index", device_index, "device", device_id,
- "dispatcher", dispatcher, NULL);
+ "device-class", device_class, "device-index", device_index,
+ "device", device_id, "dispatcher", dispatcher, nullptr);
/* Reset explicitly to ensure that it happens before
* RoInitializeWrapper dtor is called */
@@ -2017,10 +952,18 @@ gst_wasapi2_client_new (GstWasapi2ClientDeviceClass device_class,
if (self->activate_state == GST_WASAPI2_CLIENT_ACTIVATE_FAILED) {
gst_object_unref (self);
- return NULL;
+ return nullptr;
}
gst_object_ref_sink (self);
return self;
}
+
+IAudioClient *
+gst_wasapi2_client_get_handle (GstWasapi2Client * client)
+{
+ g_return_val_if_fail (GST_IS_WASAPI2_CLIENT (client), nullptr);
+
+ return client->audio_client;
+}
diff --git a/sys/wasapi2/gstwasapi2client.h b/sys/wasapi2/gstwasapi2client.h
index 57b281101..6c8c78e21 100644
--- a/sys/wasapi2/gstwasapi2client.h
+++ b/sys/wasapi2/gstwasapi2client.h
@@ -39,50 +39,16 @@ GType gst_wasapi2_client_device_class_get_type (void);
G_DECLARE_FINAL_TYPE (GstWasapi2Client,
gst_wasapi2_client, GST, WASAPI2_CLIENT, GstObject);
-GstCaps * gst_wasapi2_client_get_caps (GstWasapi2Client * client);
-
-HRESULT gst_wasapi2_client_open (GstWasapi2Client * client,
- GstAudioRingBufferSpec * spec,
- GstAudioRingBuffer * buf);
-
-HRESULT gst_wasapi2_client_start (GstWasapi2Client * client);
-
-HRESULT gst_wasapi2_client_stop (GstWasapi2Client * client);
-
-HRESULT gst_wasapi2_client_read (GstWasapi2Client * client,
- gpointer data,
- guint length,
- guint * read_length);
-
-HRESULT gst_wasapi2_client_write (GstWasapi2Client * client,
- gpointer data,
- guint length,
- guint * write_length);
-
-HRESULT gst_wasapi2_client_delay (GstWasapi2Client * client,
- guint32 * delay);
-
-HRESULT gst_wasapi2_client_set_mute (GstWasapi2Client * client,
- gboolean mute);
-
-HRESULT gst_wasapi2_client_get_mute (GstWasapi2Client * client,
- gboolean * mute);
-
-HRESULT gst_wasapi2_client_set_volume (GstWasapi2Client * client,
- gfloat volume);
-
-HRESULT gst_wasapi2_client_get_volume (GstWasapi2Client * client,
- gfloat * volume);
-
-gboolean gst_wasapi2_client_ensure_activation (GstWasapi2Client * client);
-
GstWasapi2Client * gst_wasapi2_client_new (GstWasapi2ClientDeviceClass device_class,
- gboolean low_latency,
gint device_index,
const gchar * device_id,
gpointer dispatcher);
-G_DEFINE_AUTOPTR_CLEANUP_FUNC (GstWasapi2Client, gst_object_unref)
+gboolean gst_wasapi2_client_ensure_activation (GstWasapi2Client * client);
+
+IAudioClient * gst_wasapi2_client_get_handle (GstWasapi2Client * client);
+
+GstCaps * gst_wasapi2_client_get_caps (GstWasapi2Client * client);
G_END_DECLS
diff --git a/sys/wasapi2/gstwasapi2device.c b/sys/wasapi2/gstwasapi2device.c
index 6a36fcecd..4fd9abfaf 100644
--- a/sys/wasapi2/gstwasapi2device.c
+++ b/sys/wasapi2/gstwasapi2device.c
@@ -183,7 +183,7 @@ gst_wasapi2_device_provider_probe_internal (GstWasapi2DeviceProvider * self,
gchar *device_id = NULL;
gchar *device_name = NULL;
- client = gst_wasapi2_client_new (client_class, FALSE, i, NULL, NULL);
+ client = gst_wasapi2_client_new (client_class, i, NULL, NULL);
if (!client)
return;
diff --git a/sys/wasapi2/gstwasapi2ringbuffer.cpp b/sys/wasapi2/gstwasapi2ringbuffer.cpp
new file mode 100644
index 000000000..5a85458b3
--- /dev/null
+++ b/sys/wasapi2/gstwasapi2ringbuffer.cpp
@@ -0,0 +1,1015 @@
+/* GStreamer
+ * Copyright (C) 2021 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "gstwasapi2ringbuffer.h"
+#include <string.h>
+#include <mfapi.h>
+#include <wrl.h>
+
+GST_DEBUG_CATEGORY_STATIC (gst_wasapi2_ring_buffer_debug);
+#define GST_CAT_DEFAULT gst_wasapi2_ring_buffer_debug
+
+static HRESULT gst_wasapi2_ring_buffer_io_callback (GstWasapi2RingBuffer * buf);
+
+/* *INDENT-OFF* */
+using namespace Microsoft::WRL;
+
+class GstWasapiAsyncCallback : public IMFAsyncCallback
+{
+public:
+ GstWasapiAsyncCallback(GstWasapi2RingBuffer *listener, DWORD queue_id)
+ : ref_count_(1)
+ , queue_id_(queue_id)
+ {
+ g_weak_ref_init (&listener_, listener);
+ }
+
+ virtual ~GstWasapiAsyncCallback ()
+ {
+ g_weak_ref_set (&listener_, nullptr);
+ }
+
+ /* IUnknown */
+ STDMETHODIMP_ (ULONG)
+ AddRef (void)
+ {
+ GST_TRACE ("%p, %d", this, ref_count_);
+ return InterlockedIncrement (&ref_count_);
+ }
+ STDMETHODIMP_ (ULONG)
+ Release (void)
+ {
+ ULONG ref_count;
+
+ GST_TRACE ("%p, %d", this, ref_count_);
+ ref_count = InterlockedDecrement (&ref_count_);
+
+ if (ref_count == 0) {
+ GST_TRACE ("Delete instance %p", this);
+ delete this;
+ }
+
+ return ref_count;
+ }
+
+ STDMETHODIMP
+ QueryInterface (REFIID riid, void ** object)
+ {
+ if (!object)
+ return E_POINTER;
+
+ if (riid == IID_IUnknown) {
+ GST_TRACE ("query IUnknown interface %p", this);
+ *object = static_cast<IUnknown *> (static_cast<GstWasapiAsyncCallback *> (this));
+ } else if (riid == __uuidof (IMFAsyncCallback)) {
+ GST_TRACE ("query IUnknown interface %p", this);
+ *object = static_cast<IUnknown *> (static_cast<GstWasapiAsyncCallback *> (this));
+ } else {
+ *object = nullptr;
+ return E_NOINTERFACE;
+ }
+
+ AddRef ();
+
+ return S_OK;
+ }
+
+ /* IMFAsyncCallback */
+ STDMETHODIMP
+ GetParameters(DWORD * pdwFlags, DWORD * pdwQueue)
+ {
+ *pdwFlags = 0;
+ *pdwQueue = queue_id_;
+
+ return S_OK;
+ }
+
+ STDMETHODIMP
+ Invoke(IMFAsyncResult * pAsyncResult)
+ {
+ GstWasapi2RingBuffer *ringbuffer;
+ HRESULT hr;
+
+ ringbuffer = (GstWasapi2RingBuffer *) g_weak_ref_get (&listener_);
+ if (!ringbuffer) {
+ GST_WARNING ("Listener was removed");
+ return S_OK;
+ }
+
+ hr = gst_wasapi2_ring_buffer_io_callback (ringbuffer);
+ gst_object_unref (ringbuffer);
+
+ return hr;
+ }
+
+private:
+ ULONG ref_count_;
+ DWORD queue_id_;
+ GWeakRef listener_;
+};
+/* *INDENT-ON* */
+
+struct _GstWasapi2RingBuffer
+{
+ GstAudioRingBuffer parent;
+
+ GstWasapi2ClientDeviceClass device_class;
+ gchar *device_id;
+ gboolean low_latency;
+ gboolean mute;
+ gdouble volume;
+ gpointer dispatcher;
+
+ GstWasapi2Client *client;
+ IAudioCaptureClient *capture_client;
+ IAudioRenderClient *render_client;
+ ISimpleAudioVolume *volume_object;
+
+ GstWasapiAsyncCallback *callback_object;
+ IMFAsyncResult *callback_result;
+ MFWORKITEM_KEY callback_key;
+ HANDLE event_handle;
+
+ guint64 expected_position;
+ gboolean is_first;
+ gboolean running;
+ UINT32 buffer_size;
+
+ gint segoffset;
+ guint64 write_frame_offset;
+
+ GMutex volume_lock;
+ gboolean mute_changed;
+ gboolean volume_changed;
+};
+
+static void gst_wasapi2_ring_buffer_constructed (GObject * object);
+static void gst_wasapi2_ring_buffer_dispose (GObject * object);
+static void gst_wasapi2_ring_buffer_finalize (GObject * object);
+
+static gboolean gst_wasapi2_ring_buffer_open_device (GstAudioRingBuffer * buf);
+static gboolean gst_wasapi2_ring_buffer_close_device (GstAudioRingBuffer * buf);
+static gboolean gst_wasapi2_ring_buffer_acquire (GstAudioRingBuffer * buf,
+ GstAudioRingBufferSpec * spec);
+static gboolean gst_wasapi2_ring_buffer_release (GstAudioRingBuffer * buf);
+static gboolean gst_wasapi2_ring_buffer_start (GstAudioRingBuffer * buf);
+static gboolean gst_wasapi2_ring_buffer_stop (GstAudioRingBuffer * buf);
+static guint gst_wasapi2_ring_buffer_delay (GstAudioRingBuffer * buf);
+
+#define gst_wasapi2_ring_buffer_parent_class parent_class
+G_DEFINE_TYPE (GstWasapi2RingBuffer, gst_wasapi2_ring_buffer,
+ GST_TYPE_AUDIO_RING_BUFFER);
+
+static void
+gst_wasapi2_ring_buffer_class_init (GstWasapi2RingBufferClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstAudioRingBufferClass *ring_buffer_class =
+ GST_AUDIO_RING_BUFFER_CLASS (klass);
+
+ gobject_class->constructed = gst_wasapi2_ring_buffer_constructed;
+ gobject_class->dispose = gst_wasapi2_ring_buffer_dispose;
+ gobject_class->finalize = gst_wasapi2_ring_buffer_finalize;
+
+ ring_buffer_class->open_device =
+ GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_open_device);
+ ring_buffer_class->close_device =
+ GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_close_device);
+ ring_buffer_class->acquire =
+ GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_acquire);
+ ring_buffer_class->release =
+ GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_release);
+ ring_buffer_class->start = GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_start);
+ ring_buffer_class->resume = GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_start);
+ ring_buffer_class->stop = GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_stop);
+ ring_buffer_class->delay = GST_DEBUG_FUNCPTR (gst_wasapi2_ring_buffer_delay);
+
+ GST_DEBUG_CATEGORY_INIT (gst_wasapi2_ring_buffer_debug,
+ "wasapi2ringbuffer", 0, "wasapi2ringbuffer");
+}
+
+static void
+gst_wasapi2_ring_buffer_init (GstWasapi2RingBuffer * self)
+{
+ self->volume = 1.0f;
+ self->mute = FALSE;
+
+ self->event_handle = CreateEvent (nullptr, FALSE, FALSE, nullptr);
+ g_mutex_init (&self->volume_lock);
+}
+
+static void
+gst_wasapi2_ring_buffer_constructed (GObject * object)
+{
+ GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (object);
+ HRESULT hr;
+ DWORD task_id = 0;
+ DWORD queue_id = 0;
+
+ hr = MFLockSharedWorkQueue (L"Pro Audio", 0, &task_id, &queue_id);
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING_OBJECT (self, "Failed to get work queue id");
+ goto out;
+ }
+
+ self->callback_object = new GstWasapiAsyncCallback (self, queue_id);
+ hr = MFCreateAsyncResult (nullptr, self->callback_object, nullptr,
+ &self->callback_result);
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING_OBJECT (self, "Failed to create IAsyncResult");
+ GST_WASAPI2_CLEAR_COM (self->callback_object);
+ }
+
+out:
+ G_OBJECT_CLASS (parent_class)->constructed (object);
+}
+
+static void
+gst_wasapi2_ring_buffer_dispose (GObject * object)
+{
+ GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (object);
+
+ gst_clear_object (&self->client);
+ GST_WASAPI2_CLEAR_COM (self->callback_result);
+ GST_WASAPI2_CLEAR_COM (self->callback_object);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_wasapi2_ring_buffer_finalize (GObject * object)
+{
+ GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (object);
+
+ g_free (self->device_id);
+ CloseHandle (self->event_handle);
+ g_mutex_clear (&self->volume_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_wasapi2_ring_buffer_open_device (GstAudioRingBuffer * buf)
+{
+ GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
+
+ GST_DEBUG_OBJECT (self, "Open");
+
+ self->client = gst_wasapi2_client_new (self->device_class,
+ -1, self->device_id, self->dispatcher);
+ if (!self->client) {
+ GST_ERROR_OBJECT (self, "Failed to open device");
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_wasapi2_ring_buffer_close_device (GstAudioRingBuffer * buf)
+{
+ GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
+
+ GST_DEBUG_OBJECT (self, "Close");
+
+ GST_WASAPI2_CLEAR_COM (self->capture_client);
+ GST_WASAPI2_CLEAR_COM (self->render_client);
+
+ g_mutex_lock (&self->volume_lock);
+ if (self->volume_object)
+ self->volume_object->SetMute (FALSE, nullptr);
+ GST_WASAPI2_CLEAR_COM (self->volume_object);
+ g_mutex_unlock (&self->volume_lock);
+
+ gst_clear_object (&self->client);
+
+ return TRUE;
+}
+
+static HRESULT
+gst_wasapi2_ring_buffer_read (GstWasapi2RingBuffer * self)
+{
+ GstAudioRingBuffer *ringbuffer = GST_AUDIO_RING_BUFFER_CAST (self);
+ BYTE *data = nullptr;
+ UINT32 to_read = 0;
+ guint32 to_read_bytes;
+ DWORD flags = 0;
+ HRESULT hr;
+ guint64 position;
+ GstAudioInfo *info = &ringbuffer->spec.info;
+ IAudioCaptureClient *capture_client = self->capture_client;
+ guint gap_size = 0;
+ guint offset = 0;
+ gint segment;
+ guint8 *readptr;
+ gint len;
+
+ if (!capture_client) {
+ GST_ERROR_OBJECT (self, "IAudioCaptureClient is not available");
+ return E_FAIL;
+ }
+
+ hr = capture_client->GetBuffer (&data, &to_read, &flags, &position, nullptr);
+ if (hr == AUDCLNT_S_BUFFER_EMPTY || to_read == 0) {
+ GST_LOG_OBJECT (self, "Empty buffer");
+ to_read = 0;
+ goto out;
+ }
+
+ to_read_bytes = to_read * GST_AUDIO_INFO_BPF (info);
+
+ GST_TRACE_OBJECT (self, "Reading at %d frames offset %" G_GUINT64_FORMAT
+ ", expected position %" G_GUINT64_FORMAT, to_read, position,
+ self->expected_position);
+
+ if (self->is_first) {
+ self->expected_position = position + to_read;
+ self->is_first = FALSE;
+ } else {
+ if (position > self->expected_position) {
+ guint gap_frames;
+
+ gap_frames = (guint) (position - self->expected_position);
+ GST_WARNING_OBJECT (self, "Found %u frames gap", gap_frames);
+ gap_size = gap_frames * GST_AUDIO_INFO_BPF (info);
+ }
+
+ self->expected_position = position + to_read;
+ }
+
+ /* Fill gap data if any */
+ while (gap_size > 0) {
+ if (!gst_audio_ring_buffer_prepare_read (ringbuffer,
+ &segment, &readptr, &len)) {
+ GST_INFO_OBJECT (self, "No segment available");
+ goto out;
+ }
+
+ g_assert (self->segoffset >= 0);
+
+ len -= self->segoffset;
+ if (len > gap_size)
+ len = gap_size;
+
+ gst_audio_format_info_fill_silence (ringbuffer->spec.info.finfo,
+ readptr + self->segoffset, len);
+
+ self->segoffset += len;
+ gap_size -= len;
+
+ if (self->segoffset == ringbuffer->spec.segsize) {
+ gst_audio_ring_buffer_advance (ringbuffer, 1);
+ self->segoffset = 0;
+ }
+ }
+
+ while (to_read_bytes) {
+ if (!gst_audio_ring_buffer_prepare_read (ringbuffer,
+ &segment, &readptr, &len)) {
+ GST_INFO_OBJECT (self, "No segment available");
+ goto out;
+ }
+
+ len -= self->segoffset;
+ if (len > to_read_bytes)
+ len = to_read_bytes;
+
+ memcpy (readptr + self->segoffset, data + offset, len);
+
+ self->segoffset += len;
+ offset += len;
+ to_read_bytes -= len;
+
+ if (self->segoffset == ringbuffer->spec.segsize) {
+ gst_audio_ring_buffer_advance (ringbuffer, 1);
+ self->segoffset = 0;
+ }
+ }
+
+out:
+ hr = capture_client->ReleaseBuffer (to_read);
+ /* For debugging */
+ gst_wasapi2_result (hr);
+
+ return S_OK;
+}
+
+static HRESULT
+gst_wasapi2_ring_buffer_write (GstWasapi2RingBuffer * self, gboolean preroll)
+{
+ GstAudioRingBuffer *ringbuffer = GST_AUDIO_RING_BUFFER_CAST (self);
+ HRESULT hr;
+ IAudioClient *client_handle;
+ IAudioRenderClient *render_client;
+ guint32 padding_frames = 0;
+ guint32 can_write;
+ guint32 can_write_bytes;
+ gint segment;
+ guint8 *readptr;
+ gint len;
+ BYTE *data = nullptr;
+
+ client_handle = gst_wasapi2_client_get_handle (self->client);
+ if (!client_handle) {
+ GST_ERROR_OBJECT (self, "IAudioClient is not available");
+ return E_FAIL;
+ }
+
+ render_client = self->render_client;
+ if (!render_client) {
+ GST_ERROR_OBJECT (self, "IAudioRenderClient is not available");
+ return E_FAIL;
+ }
+
+ hr = client_handle->GetCurrentPadding (&padding_frames);
+ if (!gst_wasapi2_result (hr))
+ return hr;
+
+ if (padding_frames >= self->buffer_size) {
+ GST_INFO_OBJECT (self,
+ "Padding size %d is larger than or equal to buffer size %d",
+ padding_frames, self->buffer_size);
+ return S_OK;
+ }
+
+ can_write = self->buffer_size - padding_frames;
+ can_write_bytes = can_write * GST_AUDIO_INFO_BPF (&ringbuffer->spec.info);
+ if (preroll) {
+ GST_INFO_OBJECT (self, "Pre-fill %d frames with silence", can_write);
+
+ hr = render_client->GetBuffer (can_write, &data);
+ if (!gst_wasapi2_result (hr))
+ return hr;
+
+ hr = render_client->ReleaseBuffer (can_write, AUDCLNT_BUFFERFLAGS_SILENT);
+ return gst_wasapi2_result (hr);
+ }
+
+ GST_TRACE_OBJECT (self, "Writing %d frames offset at %" G_GUINT64_FORMAT,
+ can_write, self->write_frame_offset);
+ self->write_frame_offset += can_write;
+
+ while (can_write_bytes > 0) {
+ if (!gst_audio_ring_buffer_prepare_read (ringbuffer,
+ &segment, &readptr, &len)) {
+ GST_INFO_OBJECT (self, "No segment available");
+ return S_OK;
+ }
+
+ len -= self->segoffset;
+
+ if (len > can_write_bytes)
+ len = can_write_bytes;
+
+ can_write = len / GST_AUDIO_INFO_BPF (&ringbuffer->spec.info);
+ if (can_write == 0)
+ break;
+
+ hr = render_client->GetBuffer (can_write, &data);
+ if (!gst_wasapi2_result (hr))
+ return hr;
+
+ memcpy (data, readptr + self->segoffset, len);
+ hr = render_client->ReleaseBuffer (can_write, 0);
+
+ self->segoffset += len;
+ can_write_bytes -= len;
+
+ if (self->segoffset == ringbuffer->spec.segsize) {
+ gst_audio_ring_buffer_clear (ringbuffer, segment);
+ gst_audio_ring_buffer_advance (ringbuffer, 1);
+ self->segoffset = 0;
+ }
+
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING_OBJECT (self, "Failed to release buffer");
+ break;
+ }
+ }
+
+ return S_OK;
+}
+
+static HRESULT
+gst_wasapi2_ring_buffer_io_callback (GstWasapi2RingBuffer * self)
+{
+ HRESULT hr = E_FAIL;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (self), E_FAIL);
+
+ if (!self->running) {
+ GST_INFO_OBJECT (self, "We are not running now");
+ return S_OK;
+ }
+
+ switch (self->device_class) {
+ case GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE:
+ hr = gst_wasapi2_ring_buffer_read (self);
+ break;
+ case GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER:
+ hr = gst_wasapi2_ring_buffer_write (self, FALSE);
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+
+ if (self->running) {
+ if (gst_wasapi2_result (hr)) {
+ hr = MFPutWaitingWorkItem (self->event_handle, 0, self->callback_result,
+ &self->callback_key);
+
+ if (!gst_wasapi2_result (hr)) {
+ GstElement *parent =
+ (GstElement *) gst_object_get_parent (GST_OBJECT_CAST (self));
+
+ GST_ERROR_OBJECT (self, "Failed to put item");
+ if (parent) {
+ GST_ELEMENT_ERROR (parent, RESOURCE, FAILED,
+ (nullptr), ("Failed to schedule next I/O"));
+ gst_object_unref (parent);
+ }
+ }
+ }
+ } else {
+ GST_INFO_OBJECT (self, "We are not running now");
+ return S_OK;
+ }
+
+ return hr;
+}
+
+static HRESULT
+gst_wasapi2_ring_buffer_initialize_audio_client3 (GstWasapi2RingBuffer * self,
+ IAudioClient * client_handle, WAVEFORMATEX * mix_format, guint * period)
+{
+ HRESULT hr = S_OK;
+ UINT32 default_period, fundamental_period, min_period, max_period;
+ /* AUDCLNT_STREAMFLAGS_NOPERSIST is not allowed for
+ * InitializeSharedAudioStream */
+ DWORD stream_flags = AUDCLNT_STREAMFLAGS_EVENTCALLBACK;
+ ComPtr < IAudioClient3 > audio_client;
+
+ hr = client_handle->QueryInterface (IID_PPV_ARGS (&audio_client));
+ if (!gst_wasapi2_result (hr)) {
+ GST_INFO_OBJECT (self, "IAudioClient3 interface is unavailable");
+ return hr;
+ }
+
+ hr = audio_client->GetSharedModeEnginePeriod (mix_format,
+ &default_period, &fundamental_period, &min_period, &max_period);
+ if (!gst_wasapi2_result (hr)) {
+ GST_INFO_OBJECT (self, "Couldn't get period");
+ return hr;
+ }
+
+ GST_INFO_OBJECT (self, "Using IAudioClient3, default period %d frames, "
+ "fundamental period %d frames, minimum period %d frames, maximum period "
+ "%d frames", default_period, fundamental_period, min_period, max_period);
+
+ *period = min_period;
+
+ hr = audio_client->InitializeSharedAudioStream (stream_flags, min_period,
+ mix_format, nullptr);
+
+ if (!gst_wasapi2_result (hr))
+ GST_WARNING_OBJECT (self, "Failed to initialize IAudioClient3");
+
+ return hr;
+}
+
+static HRESULT
+gst_wasapi2_ring_buffer_initialize_audio_client (GstWasapi2RingBuffer * self,
+ IAudioClient * client_handle, WAVEFORMATEX * mix_format, guint * period)
+{
+ GstAudioRingBuffer *ringbuffer = GST_AUDIO_RING_BUFFER_CAST (self);
+ REFERENCE_TIME default_period, min_period;
+ DWORD stream_flags =
+ AUDCLNT_STREAMFLAGS_EVENTCALLBACK | AUDCLNT_STREAMFLAGS_NOPERSIST;
+ HRESULT hr;
+
+ hr = client_handle->GetDevicePeriod (&default_period, &min_period);
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING_OBJECT (self, "Couldn't get device period info");
+ return hr;
+ }
+
+ GST_INFO_OBJECT (self, "wasapi2 default period: %" G_GINT64_FORMAT
+ ", min period: %" G_GINT64_FORMAT, default_period, min_period);
+
+ hr = client_handle->Initialize (AUDCLNT_SHAREMODE_SHARED, stream_flags,
+ /* hnsBufferDuration should be same as hnsPeriodicity
+ * when AUDCLNT_STREAMFLAGS_EVENTCALLBACK is used.
+ * And in case of shared mode, hnsPeriodicity should be zero, so
+ * this value should be zero as well */
+ 0,
+ /* This must always be 0 in shared mode */
+ 0, mix_format, nullptr);
+
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING_OBJECT (self, "Couldn't initialize audioclient");
+ return hr;
+ }
+
+ *period = gst_util_uint64_scale_round (default_period * 100,
+ GST_AUDIO_INFO_RATE (&ringbuffer->spec.info), GST_SECOND);
+
+ return S_OK;
+}
+
+static gboolean
+gst_wasapi2_ring_buffer_acquire (GstAudioRingBuffer * buf,
+ GstAudioRingBufferSpec * spec)
+{
+ GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
+ IAudioClient *client_handle;
+ HRESULT hr;
+ WAVEFORMATEX *mix_format = nullptr;
+ ComPtr < ISimpleAudioVolume > audio_volume;
+ GstAudioChannelPosition *position = nullptr;
+ guint period = 0;
+
+ GST_DEBUG_OBJECT (buf, "Acquire");
+
+ if (!self->client) {
+ GST_ERROR_OBJECT (self, "No configured client object");
+ return FALSE;
+ }
+
+ if (!gst_wasapi2_client_ensure_activation (self->client)) {
+ GST_ERROR_OBJECT (self, "Failed to activate audio client");
+ return FALSE;
+ }
+
+ client_handle = gst_wasapi2_client_get_handle (self->client);
+ if (!client_handle) {
+ GST_ERROR_OBJECT (self, "IAudioClient handle is not available");
+ return FALSE;
+ }
+
+ /* TODO: convert given caps to mix format */
+ hr = client_handle->GetMixFormat (&mix_format);
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (self, "Failed to get mix format");
+ return FALSE;
+ }
+
+ /* Only use audioclient3 when low-latency is requested because otherwise
+ * very slow machines and VMs with 1 CPU allocated will get glitches:
+ * https://bugzilla.gnome.org/show_bug.cgi?id=794497 */
+ hr = E_FAIL;
+ if (self->low_latency) {
+ hr = gst_wasapi2_ring_buffer_initialize_audio_client3 (self, client_handle,
+ mix_format, &period);
+ }
+
+ /* Try again if IAudioClinet3 API is unavailable.
+ * NOTE: IAudioClinet3:: methods might not be available for default device
+ * NOTE: The default device is a special device which is needed for supporting
+ * automatic stream routing
+ * https://docs.microsoft.com/en-us/windows/win32/coreaudio/automatic-stream-routing
+ */
+ if (FAILED (hr)) {
+ hr = gst_wasapi2_ring_buffer_initialize_audio_client (self, client_handle,
+ mix_format, &period);
+ }
+
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (self, "Failed to initialize audio client");
+ return FALSE;
+ }
+
+ hr = client_handle->SetEventHandle (self->event_handle);
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (self, "Failed to set event handle");
+ return FALSE;
+ }
+
+ gst_wasapi2_util_waveformatex_to_channel_mask (mix_format, &position);
+ if (position)
+ gst_audio_ring_buffer_set_channel_positions (buf, position);
+ g_free (position);
+
+ CoTaskMemFree (mix_format);
+
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (self, "Failed to init audio client");
+ return FALSE;
+ }
+
+ hr = client_handle->GetBufferSize (&self->buffer_size);
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (self, "Failed to query buffer size");
+ return FALSE;
+ }
+
+ g_assert (period > 0);
+
+ spec->segsize = period * GST_AUDIO_INFO_BPF (&buf->spec.info);
+ spec->segtotal = 2;
+
+ GST_INFO_OBJECT (self,
+ "Buffer size: %d frames, period: %d frames, segsize: %d bytes",
+ self->buffer_size, period, spec->segsize);
+
+ if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
+ ComPtr < IAudioRenderClient > render_client;
+
+ hr = client_handle->GetService (IID_PPV_ARGS (&render_client));
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (self, "IAudioRenderClient is unavailable");
+ return FALSE;
+ }
+
+ self->render_client = render_client.Detach ();
+ } else {
+ ComPtr < IAudioCaptureClient > capture_client;
+
+ hr = client_handle->GetService (IID_PPV_ARGS (&capture_client));
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (self, "IAudioCaptureClient is unavailable");
+ return FALSE;
+ }
+
+ self->capture_client = capture_client.Detach ();
+ }
+
+ hr = client_handle->GetService (IID_PPV_ARGS (&audio_volume));
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (self, "ISimpleAudioVolume is unavailable");
+ goto error;
+ }
+
+ g_mutex_lock (&self->volume_lock);
+ self->volume_object = audio_volume.Detach ();
+
+ if (self->mute_changed) {
+ self->volume_object->SetMute (self->mute, nullptr);
+ self->mute_changed = FALSE;
+ } else {
+ self->volume_object->SetMute (FALSE, nullptr);
+ }
+
+ if (self->volume_changed) {
+ self->volume_object->SetMasterVolume (self->volume, nullptr);
+ self->volume_changed = FALSE;
+ }
+ g_mutex_unlock (&self->volume_lock);
+
+ buf->size = spec->segtotal * spec->segsize;
+ buf->memory = (guint8 *) g_malloc (buf->size);
+ gst_audio_format_info_fill_silence (buf->spec.info.finfo,
+ buf->memory, buf->size);
+
+ return TRUE;
+
+error:
+ GST_WASAPI2_CLEAR_COM (self->render_client);
+ GST_WASAPI2_CLEAR_COM (self->capture_client);
+ GST_WASAPI2_CLEAR_COM (self->volume_object);
+
+ return FALSE;
+}
+
+static gboolean
+gst_wasapi2_ring_buffer_release (GstAudioRingBuffer * buf)
+{
+ GST_DEBUG_OBJECT (buf, "Release");
+
+ g_clear_pointer (&buf->memory, g_free);
+
+ return TRUE;
+}
+
+static gboolean
+gst_wasapi2_ring_buffer_start (GstAudioRingBuffer * buf)
+{
+ GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
+ IAudioClient *client_handle;
+ HRESULT hr;
+
+ GST_DEBUG_OBJECT (self, "Start");
+
+ client_handle = gst_wasapi2_client_get_handle (self->client);
+ self->is_first = TRUE;
+ self->running = TRUE;
+ self->segoffset = 0;
+ self->write_frame_offset = 0;
+
+ /* render client might read data from buffer immediately once it's prepared.
+ * Pre-fill with silence in order to start-up glitch */
+ if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
+ hr = gst_wasapi2_ring_buffer_write (self, TRUE);
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (self, "Failed to pre-fill buffer with silence");
+ return FALSE;
+ }
+ }
+
+ hr = client_handle->Start ();
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (self, "Failed to start client");
+ self->running = FALSE;
+ return FALSE;
+ }
+
+ hr = MFPutWaitingWorkItem (self->event_handle, 0, self->callback_result,
+ &self->callback_key);
+ if (!gst_wasapi2_result (hr)) {
+ GST_ERROR_OBJECT (self, "Failed to put waiting item");
+ client_handle->Stop ();
+ self->running = FALSE;
+
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_wasapi2_ring_buffer_stop (GstAudioRingBuffer * buf)
+{
+ GstWasapi2RingBuffer *self = GST_WASAPI2_RING_BUFFER (buf);
+ IAudioClient *client_handle;
+ HRESULT hr;
+
+ GST_DEBUG_OBJECT (buf, "Stop");
+
+ if (!self->client) {
+ GST_DEBUG_OBJECT (self, "No configured client");
+ return TRUE;
+ }
+
+ if (!self->running) {
+ GST_DEBUG_OBJECT (self, "We are not running");
+ return TRUE;
+ }
+
+ client_handle = gst_wasapi2_client_get_handle (self->client);
+
+ self->running = FALSE;
+ MFCancelWorkItem (self->callback_key);
+
+ hr = client_handle->Stop ();
+ gst_wasapi2_result (hr);
+
+ /* Call reset for later reuse case */
+ hr = client_handle->Reset ();
+ self->expected_position = 0;
+
+ return TRUE;
+}
+
+static guint
+gst_wasapi2_ring_buffer_delay (GstAudioRingBuffer * buf)
+{
+ /* NOTE: WASAPI supports GetCurrentPadding() method for querying
+ * currently unread buffer size, but it doesn't seem to be quite useful
+ * here because:
+ *
+ * In case of capture client, GetCurrentPadding() will return the number of
+ * unread frames which will be identical to pNumFramesToRead value of
+ * IAudioCaptureClient::GetBuffer()'s return. Since we are running on
+ * event-driven mode and whenever available, WASAPI will notify signal
+ * so it's likely zero at this moment. And there is a chance to
+ * return incorrect value here because our IO callback happens from
+ * other thread.
+ *
+ * And render client's padding size will return the total size of buffer
+ * which is likely larger than twice of our period. Which doesn't represent
+ * the amount queued frame size in device correctly
+ */
+ return 0;
+}
+
+GstAudioRingBuffer *
+gst_wasapi2_ring_buffer_new (GstWasapi2ClientDeviceClass device_class,
+ gboolean low_latency, const gchar * device_id, gpointer dispatcher,
+ const gchar * name)
+{
+ GstWasapi2RingBuffer *self;
+
+ self = (GstWasapi2RingBuffer *)
+ g_object_new (GST_TYPE_WASAPI2_RING_BUFFER, "name", name, nullptr);
+
+ if (!self->callback_object) {
+ gst_object_unref (self);
+ return nullptr;
+ }
+
+ self->device_class = device_class;
+ self->low_latency = low_latency;
+ self->device_id = g_strdup (device_id);
+ self->dispatcher = dispatcher;
+
+ return GST_AUDIO_RING_BUFFER_CAST (self);
+}
+
+GstCaps *
+gst_wasapi2_ring_buffer_get_caps (GstWasapi2RingBuffer * buf)
+{
+ g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (buf), nullptr);
+
+ if (!buf->client)
+ return nullptr;
+
+ if (!gst_wasapi2_client_ensure_activation (buf->client)) {
+ GST_ERROR_OBJECT (buf, "Failed to activate audio client");
+ return nullptr;
+ }
+
+ return gst_wasapi2_client_get_caps (buf->client);
+}
+
+HRESULT
+gst_wasapi2_ring_buffer_set_mute (GstWasapi2RingBuffer * buf, gboolean mute)
+{
+ HRESULT hr = S_OK;
+ g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (buf), E_INVALIDARG);
+
+ g_mutex_lock (&buf->volume_lock);
+ buf->mute = mute;
+ if (buf->volume_object)
+ hr = buf->volume_object->SetMute (mute, nullptr);
+ else
+ buf->volume_changed = TRUE;
+ g_mutex_unlock (&buf->volume_lock);
+
+ return S_OK;
+}
+
+HRESULT
+gst_wasapi2_ring_buffer_get_mute (GstWasapi2RingBuffer * buf, gboolean * mute)
+{
+ BOOL mute_val;
+ HRESULT hr = S_OK;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (buf), E_INVALIDARG);
+ g_return_val_if_fail (mute != nullptr, E_INVALIDARG);
+
+ mute_val = buf->mute;
+
+ g_mutex_lock (&buf->volume_lock);
+ if (buf->volume_object)
+ hr = buf->volume_object->GetMute (&mute_val);
+ g_mutex_unlock (&buf->volume_lock);
+
+ *mute = mute_val ? TRUE : FALSE;
+
+ return hr;
+}
+
+HRESULT
+gst_wasapi2_ring_buffer_set_volume (GstWasapi2RingBuffer * buf, gfloat volume)
+{
+ HRESULT hr = S_OK;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (buf), E_INVALIDARG);
+ g_return_val_if_fail (volume >= 0 && volume <= 1.0, E_INVALIDARG);
+
+ g_mutex_lock (&buf->volume_lock);
+ buf->volume = volume;
+ if (buf->volume_object)
+ hr = buf->volume_object->SetMasterVolume (volume, nullptr);
+ else
+ buf->mute_changed = TRUE;
+ g_mutex_unlock (&buf->volume_lock);
+
+ return hr;
+}
+
+HRESULT
+gst_wasapi2_ring_buffer_get_volume (GstWasapi2RingBuffer * buf, gfloat * volume)
+{
+ gfloat volume_val;
+ HRESULT hr = S_OK;
+
+ g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (buf), E_INVALIDARG);
+ g_return_val_if_fail (volume != nullptr, E_INVALIDARG);
+
+ g_mutex_lock (&buf->volume_lock);
+ volume_val = buf->volume;
+ if (buf->volume_object)
+ hr = buf->volume_object->GetMasterVolume (&volume_val);
+ g_mutex_unlock (&buf->volume_lock);
+
+ *volume = volume_val;
+
+ return hr;
+}
diff --git a/sys/wasapi2/gstwasapi2ringbuffer.h b/sys/wasapi2/gstwasapi2ringbuffer.h
new file mode 100644
index 000000000..5bbb6e976
--- /dev/null
+++ b/sys/wasapi2/gstwasapi2ringbuffer.h
@@ -0,0 +1,55 @@
+/* GStreamer
+ * Copyright (C) 2021 Seungha Yang <seungha@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_WASAPI2_RING_BUFFER_H__
+#define __GST_WASAPI2_RING_BUFFER_H__
+
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+#include "gstwasapi2client.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_WASAPI2_RING_BUFFER (gst_wasapi2_ring_buffer_get_type())
+G_DECLARE_FINAL_TYPE (GstWasapi2RingBuffer, gst_wasapi2_ring_buffer,
+ GST, WASAPI2_RING_BUFFER, GstAudioRingBuffer);
+
+GstAudioRingBuffer * gst_wasapi2_ring_buffer_new (GstWasapi2ClientDeviceClass device_class,
+ gboolean low_latency,
+ const gchar *device_id,
+ gpointer dispatcher,
+ const gchar * name);
+
+GstCaps * gst_wasapi2_ring_buffer_get_caps (GstWasapi2RingBuffer * buf);
+
+HRESULT gst_wasapi2_ring_buffer_set_mute (GstWasapi2RingBuffer * buf,
+ gboolean mute);
+
+HRESULT gst_wasapi2_ring_buffer_get_mute (GstWasapi2RingBuffer * buf,
+ gboolean * mute);
+
+HRESULT gst_wasapi2_ring_buffer_set_volume (GstWasapi2RingBuffer * buf,
+ gfloat volume);
+
+HRESULT gst_wasapi2_ring_buffer_get_volume (GstWasapi2RingBuffer * buf,
+ gfloat * volume);
+
+G_END_DECLS
+
+#endif /* __GST_WASAPI2_RING_BUFFER_H__ */
diff --git a/sys/wasapi2/gstwasapi2sink.c b/sys/wasapi2/gstwasapi2sink.c
index cb3fd407f..3bb82529e 100644
--- a/sys/wasapi2/gstwasapi2sink.c
+++ b/sys/wasapi2/gstwasapi2sink.c
@@ -31,11 +31,11 @@
*
* ## Example pipelines
* |[
- * gst-launch-1.0 -v audiotestsrc samplesperbuffer=160 ! wasapi2sink
+ * gst-launch-1.0 -v audiotestsink samplesperbuffer=160 ! wasapi2sink
* ]| Generate 20 ms buffers and render to the default audio device.
*
* |[
- * gst-launch-1.0 -v audiotestsrc samplesperbuffer=160 ! wasapi2sink low-latency=true
+ * gst-launch-1.0 -v audiotestsink samplesperbuffer=160 ! wasapi2sink low-latency=true
* ]| Same as above, but with the minimum possible latency
*
*/
@@ -45,7 +45,7 @@
#include "gstwasapi2sink.h"
#include "gstwasapi2util.h"
-#include "gstwasapi2client.h"
+#include "gstwasapi2ringbuffer.h"
GST_DEBUG_CATEGORY_STATIC (gst_wasapi2_sink_debug);
#define GST_CAT_DEFAULT gst_wasapi2_sink_debug
@@ -59,9 +59,6 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
#define DEFAULT_MUTE FALSE
#define DEFAULT_VOLUME 1.0
-#define GST_WASAPI2_SINK_LOCK(s) g_mutex_lock(&(s)->lock)
-#define GST_WASAPI2_SINK_UNLOCK(s) g_mutex_unlock(&(s)->lock)
-
enum
{
PROP_0,
@@ -74,11 +71,7 @@ enum
struct _GstWasapi2Sink
{
- GstAudioSink parent;
-
- GstWasapi2Client *client;
- GstCaps *cached_caps;
- gboolean started;
+ GstAudioBaseSink parent;
/* properties */
gchar *device_id;
@@ -89,30 +82,21 @@ struct _GstWasapi2Sink
gboolean mute_changed;
gboolean volume_changed;
-
- /* to protect audioclient from set/get property */
- GMutex lock;
};
-static void gst_wasapi2_sink_dispose (GObject * object);
static void gst_wasapi2_sink_finalize (GObject * object);
static void gst_wasapi2_sink_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_wasapi2_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
+static GstStateChangeReturn gst_wasapi2_sink_change_state (GstElement *
+ element, GstStateChange transition);
+
static GstCaps *gst_wasapi2_sink_get_caps (GstBaseSink * bsink,
GstCaps * filter);
-
-static gboolean gst_wasapi2_sink_prepare (GstAudioSink * asink,
- GstAudioRingBufferSpec * spec);
-static gboolean gst_wasapi2_sink_unprepare (GstAudioSink * asink);
-static gboolean gst_wasapi2_sink_open (GstAudioSink * asink);
-static gboolean gst_wasapi2_sink_close (GstAudioSink * asink);
-static gint gst_wasapi2_sink_write (GstAudioSink * asink,
- gpointer data, guint length);
-static guint gst_wasapi2_sink_delay (GstAudioSink * asink);
-static void gst_wasapi2_sink_reset (GstAudioSink * asink);
+static GstAudioRingBuffer *gst_wasapi2_sink_create_ringbuffer (GstAudioBaseSink
+ * sink);
static void gst_wasapi2_sink_set_mute (GstWasapi2Sink * self, gboolean mute);
static gboolean gst_wasapi2_sink_get_mute (GstWasapi2Sink * self);
@@ -120,7 +104,8 @@ static void gst_wasapi2_sink_set_volume (GstWasapi2Sink * self, gdouble volume);
static gdouble gst_wasapi2_sink_get_volume (GstWasapi2Sink * self);
#define gst_wasapi2_sink_parent_class parent_class
-G_DEFINE_TYPE_WITH_CODE (GstWasapi2Sink, gst_wasapi2_sink, GST_TYPE_AUDIO_SINK,
+G_DEFINE_TYPE_WITH_CODE (GstWasapi2Sink, gst_wasapi2_sink,
+ GST_TYPE_AUDIO_BASE_SINK,
G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL));
static void
@@ -129,9 +114,9 @@ gst_wasapi2_sink_class_init (GstWasapi2SinkClass * klass)
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseSinkClass *basesink_class = GST_BASE_SINK_CLASS (klass);
- GstAudioSinkClass *audiosink_class = GST_AUDIO_SINK_CLASS (klass);
+ GstAudioBaseSinkClass *audiobasesink_class =
+ GST_AUDIO_BASE_SINK_CLASS (klass);
- gobject_class->dispose = gst_wasapi2_sink_dispose;
gobject_class->finalize = gst_wasapi2_sink_finalize;
gobject_class->set_property = gst_wasapi2_sink_set_property;
gobject_class->get_property = gst_wasapi2_sink_get_property;
@@ -184,15 +169,13 @@ gst_wasapi2_sink_class_init (GstWasapi2SinkClass * klass)
"Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>, "
"Seungha Yang <seungha@centricular.com>");
+ element_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_wasapi2_sink_change_state);
+
basesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_get_caps);
- audiosink_class->prepare = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_prepare);
- audiosink_class->unprepare = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_unprepare);
- audiosink_class->open = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_open);
- audiosink_class->close = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_close);
- audiosink_class->write = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_write);
- audiosink_class->delay = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_delay);
- audiosink_class->reset = GST_DEBUG_FUNCPTR (gst_wasapi2_sink_reset);
+ audiobasesink_class->create_ringbuffer =
+ GST_DEBUG_FUNCPTR (gst_wasapi2_sink_create_ringbuffer);
GST_DEBUG_CATEGORY_INIT (gst_wasapi2_sink_debug, "wasapi2sink",
0, "Windows audio session API sink");
@@ -204,21 +187,6 @@ gst_wasapi2_sink_init (GstWasapi2Sink * self)
self->low_latency = DEFAULT_LOW_LATENCY;
self->mute = DEFAULT_MUTE;
self->volume = DEFAULT_VOLUME;
-
- g_mutex_init (&self->lock);
-}
-
-static void
-gst_wasapi2_sink_dispose (GObject * object)
-{
- GstWasapi2Sink *self = GST_WASAPI2_SINK (object);
-
- GST_WASAPI2_SINK_LOCK (self);
- gst_clear_object (&self->client);
- gst_clear_caps (&self->cached_caps);
- GST_WASAPI2_SINK_UNLOCK (self);
-
- G_OBJECT_CLASS (parent_class)->dispose (object);
}
static void
@@ -227,7 +195,6 @@ gst_wasapi2_sink_finalize (GObject * object)
GstWasapi2Sink *self = GST_WASAPI2_SINK (object);
g_free (self->device_id);
- g_mutex_clear (&self->lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@@ -286,29 +253,58 @@ gst_wasapi2_sink_get_property (GObject * object, guint prop_id,
}
}
+static GstStateChangeReturn
+gst_wasapi2_sink_change_state (GstElement * element, GstStateChange transition)
+{
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (element);
+ GstAudioBaseSink *asink = GST_AUDIO_BASE_SINK_CAST (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* If we have pending volume/mute values to set, do here */
+ GST_OBJECT_LOCK (self);
+ if (asink->ringbuffer) {
+ GstWasapi2RingBuffer *ringbuffer =
+ GST_WASAPI2_RING_BUFFER (asink->ringbuffer);
+
+ if (self->volume_changed) {
+ gst_wasapi2_ring_buffer_set_volume (ringbuffer, self->volume);
+ self->volume_changed = FALSE;
+ }
+
+ if (self->mute_changed) {
+ gst_wasapi2_ring_buffer_set_mute (ringbuffer, self->mute);
+ self->mute_changed = FALSE;
+ }
+ }
+ GST_OBJECT_UNLOCK (self);
+ break;
+ default:
+ break;
+ }
+
+ return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+}
+
static GstCaps *
gst_wasapi2_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
{
- GstWasapi2Sink *self = GST_WASAPI2_SINK (bsink);
+ GstAudioBaseSink *asink = GST_AUDIO_BASE_SINK_CAST (bsink);
GstCaps *caps = NULL;
- /* In case of UWP, device activation might not be finished yet */
- if (self->client && !gst_wasapi2_client_ensure_activation (self->client)) {
- GST_ELEMENT_ERROR (self, RESOURCE, OPEN_WRITE, (NULL),
- ("Failed to activate device"));
- return NULL;
- }
+ GST_OBJECT_LOCK (bsink);
+ if (asink->ringbuffer) {
+ GstWasapi2RingBuffer *ringbuffer =
+ GST_WASAPI2_RING_BUFFER (asink->ringbuffer);
- if (self->client)
- caps = gst_wasapi2_client_get_caps (self->client);
+ gst_object_ref (ringbuffer);
+ GST_OBJECT_UNLOCK (bsink);
- /* store one caps here so that we can return device caps even if
- * audioclient was closed due to unprepare() */
- if (!self->cached_caps && caps)
- self->cached_caps = gst_caps_ref (caps);
-
- if (!caps && self->cached_caps)
- caps = gst_caps_ref (self->cached_caps);
+ /* Get caps might be able to block if device is not activated yet */
+ caps = gst_wasapi2_ring_buffer_get_caps (ringbuffer);
+ } else {
+ GST_OBJECT_UNLOCK (bsink);
+ }
if (!caps)
caps = gst_pad_get_pad_template_caps (bsink->sinkpad);
@@ -320,238 +316,81 @@ gst_wasapi2_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
caps = filtered;
}
- GST_DEBUG_OBJECT (self, "returning caps %" GST_PTR_FORMAT, caps);
+ GST_DEBUG_OBJECT (bsink, "returning caps %" GST_PTR_FORMAT, caps);
return caps;
}
-static gboolean
-gst_wasapi2_sink_open_unlocked (GstAudioSink * asink)
-{
- GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
-
- gst_clear_object (&self->client);
-
- self->client =
- gst_wasapi2_client_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER,
- self->low_latency, -1, self->device_id, self->dispatcher);
-
- if (!self->client)
- return FALSE;
-
- return TRUE;
-}
-
-static gboolean
-gst_wasapi2_sink_open (GstAudioSink * asink)
-{
- GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
- gboolean ret;
-
- GST_DEBUG_OBJECT (self, "Opening device");
-
- GST_WASAPI2_SINK_LOCK (self);
- ret = gst_wasapi2_sink_open_unlocked (asink);
- GST_WASAPI2_SINK_UNLOCK (self);
-
- if (!ret) {
- GST_ELEMENT_ERROR (self, RESOURCE, OPEN_WRITE, (NULL),
- ("Failed to open device"));
- return FALSE;
- }
-
- return TRUE;
-}
-
-static gboolean
-gst_wasapi2_sink_close (GstAudioSink * asink)
-{
- GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
-
- GST_WASAPI2_SINK_LOCK (self);
-
- gst_clear_object (&self->client);
- gst_clear_caps (&self->cached_caps);
- self->started = FALSE;
-
- GST_WASAPI2_SINK_UNLOCK (self);
-
- return TRUE;
-}
-
-static gboolean
-gst_wasapi2_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
-{
- GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
- GstAudioBaseSink *bsink = GST_AUDIO_BASE_SINK (asink);
- gboolean ret = FALSE;
- HRESULT hr;
-
- GST_WASAPI2_SINK_LOCK (self);
- if (!self->client && !gst_wasapi2_sink_open_unlocked (asink)) {
- GST_ERROR_OBJECT (self, "No audio client was configured");
- goto done;
- }
-
- if (!gst_wasapi2_client_ensure_activation (self->client)) {
- GST_ERROR_OBJECT (self, "Couldn't activate audio device");
- goto done;
- }
-
- hr = gst_wasapi2_client_open (self->client, spec, bsink->ringbuffer);
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (self, "Couldn't open audio client");
- goto done;
- }
-
- /* Set mute and volume here again, maybe when "mute" property was set, audioclient
- * might not be configured at that moment */
- if (self->mute_changed) {
- gst_wasapi2_client_set_mute (self->client, self->mute);
- self->mute_changed = FALSE;
- }
-
- if (self->volume_changed) {
- gst_wasapi2_client_set_volume (self->client, self->volume);
- self->volume_changed = FALSE;
- }
-
- /* Will start IAudioClient on the first write request */
- self->started = FALSE;
- ret = TRUE;
-
-done:
- GST_WASAPI2_SINK_UNLOCK (self);
-
- return ret;
-}
-
-static gboolean
-gst_wasapi2_sink_unprepare (GstAudioSink * asink)
-{
- GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
-
- self->started = FALSE;
-
- /* Will reopen device later prepare() */
- GST_WASAPI2_SINK_LOCK (self);
- if (self->client) {
- gst_wasapi2_client_stop (self->client);
- gst_clear_object (&self->client);
- }
- GST_WASAPI2_SINK_UNLOCK (self);
-
- return TRUE;
-}
-
-static gint
-gst_wasapi2_sink_write (GstAudioSink * asink, gpointer data, guint length)
+static GstAudioRingBuffer *
+gst_wasapi2_sink_create_ringbuffer (GstAudioBaseSink * sink)
{
- GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
- HRESULT hr;
- guint write_len = 0;
+ GstWasapi2Sink *self = GST_WASAPI2_SINK (sink);
+ GstAudioRingBuffer *ringbuffer;
+ gchar *name;
- if (!self->client) {
- GST_ERROR_OBJECT (self, "No audio client was configured");
- return -1;
- }
+ name = g_strdup_printf ("%s-ringbuffer", GST_OBJECT_NAME (sink));
- if (!self->started) {
- HRESULT hr = gst_wasapi2_client_start (self->client);
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (self, "Failed to re-start client");
- return -1;
- }
+ ringbuffer =
+ gst_wasapi2_ring_buffer_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER,
+ self->low_latency, self->device_id, self->dispatcher, name);
- self->started = TRUE;
- }
+ g_free (name);
- hr = gst_wasapi2_client_write (self->client, data, length, &write_len);
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (self, "Failed to write");
- return -1;
- }
-
- return (gint) write_len;
-}
-
-static guint
-gst_wasapi2_sink_delay (GstAudioSink * asink)
-{
- GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
- guint32 delay;
- HRESULT hr;
-
- if (!self->client)
- return 0;
-
- hr = gst_wasapi2_client_delay (self->client, &delay);
- if (!gst_wasapi2_result (hr)) {
- GST_WARNING_OBJECT (self, "Failed to get delay");
- return 0;
- }
-
- return delay;
-}
-
-static void
-gst_wasapi2_sink_reset (GstAudioSink * asink)
-{
- GstWasapi2Sink *self = GST_WASAPI2_SINK (asink);
-
- GST_INFO_OBJECT (self, "reset called");
-
- self->started = FALSE;
-
- if (!self->client)
- return;
-
- gst_wasapi2_client_stop (self->client);
+ return ringbuffer;
}
static void
gst_wasapi2_sink_set_mute (GstWasapi2Sink * self, gboolean mute)
{
- GST_WASAPI2_SINK_LOCK (self);
+ GstAudioBaseSink *bsink = GST_AUDIO_BASE_SINK_CAST (self);
+ HRESULT hr;
+
+ GST_OBJECT_LOCK (self);
self->mute = mute;
self->mute_changed = TRUE;
- if (self->client) {
- HRESULT hr = gst_wasapi2_client_set_mute (self->client, mute);
+ if (bsink->ringbuffer) {
+ GstWasapi2RingBuffer *ringbuffer =
+ GST_WASAPI2_RING_BUFFER (bsink->ringbuffer);
+
+ hr = gst_wasapi2_ring_buffer_set_mute (ringbuffer, mute);
+
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't set mute");
} else {
self->mute_changed = FALSE;
}
- } else {
- GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
- GST_WASAPI2_SINK_UNLOCK (self);
+ GST_OBJECT_UNLOCK (self);
}
static gboolean
gst_wasapi2_sink_get_mute (GstWasapi2Sink * self)
{
+ GstAudioBaseSink *bsink = GST_AUDIO_BASE_SINK_CAST (self);
gboolean mute;
+ HRESULT hr;
- GST_WASAPI2_SINK_LOCK (self);
+ GST_OBJECT_LOCK (self);
mute = self->mute;
- if (self->client) {
- HRESULT hr = gst_wasapi2_client_get_mute (self->client, &mute);
+ if (bsink->ringbuffer) {
+ GstWasapi2RingBuffer *ringbuffer =
+ GST_WASAPI2_RING_BUFFER (bsink->ringbuffer);
+
+ hr = gst_wasapi2_ring_buffer_get_mute (ringbuffer, &mute);
+
if (FAILED (hr)) {
- GST_INFO_OBJECT (self, "Couldn't get mute state");
+ GST_INFO_OBJECT (self, "Couldn't get mute");
} else {
self->mute = mute;
}
- } else {
- GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
- GST_WASAPI2_SINK_UNLOCK (self);
+ GST_OBJECT_UNLOCK (self);
return mute;
}
@@ -559,7 +398,10 @@ gst_wasapi2_sink_get_mute (GstWasapi2Sink * self)
static void
gst_wasapi2_sink_set_volume (GstWasapi2Sink * self, gdouble volume)
{
- GST_WASAPI2_SINK_LOCK (self);
+ GstAudioBaseSink *bsink = GST_AUDIO_BASE_SINK_CAST (self);
+ HRESULT hr;
+
+ GST_OBJECT_LOCK (self);
self->volume = volume;
/* clip volume value */
@@ -567,42 +409,47 @@ gst_wasapi2_sink_set_volume (GstWasapi2Sink * self, gdouble volume)
self->volume = MIN (1.0, self->volume);
self->volume_changed = TRUE;
- if (self->client) {
- HRESULT hr =
- gst_wasapi2_client_set_volume (self->client, (gfloat) self->volume);
+ if (bsink->ringbuffer) {
+ GstWasapi2RingBuffer *ringbuffer =
+ GST_WASAPI2_RING_BUFFER (bsink->ringbuffer);
+
+ hr = gst_wasapi2_ring_buffer_set_volume (ringbuffer, (gfloat) self->volume);
+
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't set volume");
} else {
self->volume_changed = FALSE;
}
- } else {
- GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
- GST_WASAPI2_SINK_UNLOCK (self);
+ GST_OBJECT_UNLOCK (self);
}
static gdouble
gst_wasapi2_sink_get_volume (GstWasapi2Sink * self)
{
+ GstAudioBaseSink *bsink = GST_AUDIO_BASE_SINK_CAST (self);
gfloat volume;
+ HRESULT hr;
- GST_WASAPI2_SINK_LOCK (self);
+ GST_OBJECT_LOCK (self);
volume = (gfloat) self->volume;
- if (self->client) {
- HRESULT hr = gst_wasapi2_client_get_volume (self->client, &volume);
+ if (bsink->ringbuffer) {
+ GstWasapi2RingBuffer *ringbuffer =
+ GST_WASAPI2_RING_BUFFER (bsink->ringbuffer);
+
+ hr = gst_wasapi2_ring_buffer_get_volume (ringbuffer, &volume);
+
if (FAILED (hr)) {
- GST_INFO_OBJECT (self, "Couldn't get volume");
+ GST_INFO_OBJECT (self, "Couldn't set volume");
} else {
self->volume = volume;
}
- } else {
- GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
- GST_WASAPI2_SINK_UNLOCK (self);
+ GST_OBJECT_UNLOCK (self);
volume = MAX (0.0, volume);
volume = MIN (1.0, volume);
diff --git a/sys/wasapi2/gstwasapi2sink.h b/sys/wasapi2/gstwasapi2sink.h
index 433dcb656..5e618379e 100644
--- a/sys/wasapi2/gstwasapi2sink.h
+++ b/sys/wasapi2/gstwasapi2sink.h
@@ -27,7 +27,7 @@ G_BEGIN_DECLS
#define GST_TYPE_WASAPI2_SINK (gst_wasapi2_sink_get_type ())
G_DECLARE_FINAL_TYPE (GstWasapi2Sink,
- gst_wasapi2_sink, GST, WASAPI2_SINK, GstAudioSink);
+ gst_wasapi2_sink, GST, WASAPI2_SINK, GstAudioBaseSink);
G_END_DECLS
diff --git a/sys/wasapi2/gstwasapi2src.c b/sys/wasapi2/gstwasapi2src.c
index fa5616936..446803aac 100644
--- a/sys/wasapi2/gstwasapi2src.c
+++ b/sys/wasapi2/gstwasapi2src.c
@@ -43,7 +43,7 @@
#include "gstwasapi2src.h"
#include "gstwasapi2util.h"
-#include "gstwasapi2client.h"
+#include "gstwasapi2ringbuffer.h"
GST_DEBUG_CATEGORY_STATIC (gst_wasapi2_src_debug);
#define GST_CAT_DEFAULT gst_wasapi2_src_debug
@@ -57,9 +57,6 @@ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
#define DEFAULT_MUTE FALSE
#define DEFAULT_VOLUME 1.0
-#define GST_WASAPI2_SRC_LOCK(s) g_mutex_lock(&(s)->lock)
-#define GST_WASAPI2_SRC_UNLOCK(s) g_mutex_unlock(&(s)->lock)
-
enum
{
PROP_0,
@@ -72,11 +69,7 @@ enum
struct _GstWasapi2Src
{
- GstAudioSrc parent;
-
- GstWasapi2Client *client;
- GstCaps *cached_caps;
- gboolean started;
+ GstAudioBaseSrc parent;
/* properties */
gchar *device_id;
@@ -87,29 +80,20 @@ struct _GstWasapi2Src
gboolean mute_changed;
gboolean volume_changed;
-
- /* to protect audioclient from set/get property */
- GMutex lock;
};
-static void gst_wasapi2_src_dispose (GObject * object);
static void gst_wasapi2_src_finalize (GObject * object);
static void gst_wasapi2_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_wasapi2_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstCaps *gst_wasapi2_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter);
+static GstStateChangeReturn gst_wasapi2_src_change_state (GstElement *
+ element, GstStateChange transition);
-static gboolean gst_wasapi2_src_open (GstAudioSrc * asrc);
-static gboolean gst_wasapi2_src_close (GstAudioSrc * asrc);
-static gboolean gst_wasapi2_src_prepare (GstAudioSrc * asrc,
- GstAudioRingBufferSpec * spec);
-static gboolean gst_wasapi2_src_unprepare (GstAudioSrc * asrc);
-static guint gst_wasapi2_src_read (GstAudioSrc * asrc, gpointer data,
- guint length, GstClockTime * timestamp);
-static guint gst_wasapi2_src_delay (GstAudioSrc * asrc);
-static void gst_wasapi2_src_reset (GstAudioSrc * asrc);
+static GstCaps *gst_wasapi2_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter);
+static GstAudioRingBuffer *gst_wasapi2_src_create_ringbuffer (GstAudioBaseSrc *
+ src);
static void gst_wasapi2_src_set_mute (GstWasapi2Src * self, gboolean mute);
static gboolean gst_wasapi2_src_get_mute (GstWasapi2Src * self);
@@ -117,7 +101,8 @@ static void gst_wasapi2_src_set_volume (GstWasapi2Src * self, gdouble volume);
static gdouble gst_wasapi2_src_get_volume (GstWasapi2Src * self);
#define gst_wasapi2_src_parent_class parent_class
-G_DEFINE_TYPE_WITH_CODE (GstWasapi2Src, gst_wasapi2_src, GST_TYPE_AUDIO_SRC,
+G_DEFINE_TYPE_WITH_CODE (GstWasapi2Src, gst_wasapi2_src,
+ GST_TYPE_AUDIO_BASE_SRC,
G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL));
static void
@@ -126,9 +111,8 @@ gst_wasapi2_src_class_init (GstWasapi2SrcClass * klass)
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseSrcClass *basesrc_class = GST_BASE_SRC_CLASS (klass);
- GstAudioSrcClass *audiosrc_class = GST_AUDIO_SRC_CLASS (klass);
+ GstAudioBaseSrcClass *audiobasesrc_class = GST_AUDIO_BASE_SRC_CLASS (klass);
- gobject_class->dispose = gst_wasapi2_src_dispose;
gobject_class->finalize = gst_wasapi2_src_finalize;
gobject_class->set_property = gst_wasapi2_src_set_property;
gobject_class->get_property = gst_wasapi2_src_get_property;
@@ -181,15 +165,13 @@ gst_wasapi2_src_class_init (GstWasapi2SrcClass * klass)
"Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>, "
"Seungha Yang <seungha@centricular.com>");
+ element_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_wasapi2_src_change_state);
+
basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_wasapi2_src_get_caps);
- audiosrc_class->open = GST_DEBUG_FUNCPTR (gst_wasapi2_src_open);
- audiosrc_class->close = GST_DEBUG_FUNCPTR (gst_wasapi2_src_close);
- audiosrc_class->read = GST_DEBUG_FUNCPTR (gst_wasapi2_src_read);
- audiosrc_class->prepare = GST_DEBUG_FUNCPTR (gst_wasapi2_src_prepare);
- audiosrc_class->unprepare = GST_DEBUG_FUNCPTR (gst_wasapi2_src_unprepare);
- audiosrc_class->delay = GST_DEBUG_FUNCPTR (gst_wasapi2_src_delay);
- audiosrc_class->reset = GST_DEBUG_FUNCPTR (gst_wasapi2_src_reset);
+ audiobasesrc_class->create_ringbuffer =
+ GST_DEBUG_FUNCPTR (gst_wasapi2_src_create_ringbuffer);
GST_DEBUG_CATEGORY_INIT (gst_wasapi2_src_debug, "wasapi2src",
0, "Windows audio session API source");
@@ -201,21 +183,6 @@ gst_wasapi2_src_init (GstWasapi2Src * self)
self->mute = DEFAULT_MUTE;
self->volume = DEFAULT_VOLUME;
self->low_latency = DEFAULT_LOW_LATENCY;
-
- g_mutex_init (&self->lock);
-}
-
-static void
-gst_wasapi2_src_dispose (GObject * object)
-{
- GstWasapi2Src *self = GST_WASAPI2_SRC (object);
-
- GST_WASAPI2_SRC_LOCK (self);
- gst_clear_object (&self->client);
- gst_clear_caps (&self->cached_caps);
- GST_WASAPI2_SRC_UNLOCK (self);
-
- G_OBJECT_CLASS (parent_class)->dispose (object);
}
static void
@@ -224,7 +191,6 @@ gst_wasapi2_src_finalize (GObject * object)
GstWasapi2Src *self = GST_WASAPI2_SRC (object);
g_free (self->device_id);
- g_mutex_clear (&self->lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@@ -283,29 +249,59 @@ gst_wasapi2_src_get_property (GObject * object, guint prop_id,
}
}
+static GstStateChangeReturn
+gst_wasapi2_src_change_state (GstElement * element, GstStateChange transition)
+{
+ GstWasapi2Src *self = GST_WASAPI2_SRC (element);
+ GstAudioBaseSrc *asrc = GST_AUDIO_BASE_SRC_CAST (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* If we have pending volume/mute values to set, do here */
+ GST_OBJECT_LOCK (self);
+ if (asrc->ringbuffer) {
+ GstWasapi2RingBuffer *ringbuffer =
+ GST_WASAPI2_RING_BUFFER (asrc->ringbuffer);
+
+ if (self->volume_changed) {
+ gst_wasapi2_ring_buffer_set_volume (ringbuffer, self->volume);
+ self->volume_changed = FALSE;
+ }
+
+ if (self->mute_changed) {
+ gst_wasapi2_ring_buffer_set_mute (ringbuffer, self->mute);
+ self->mute_changed = FALSE;
+ }
+ }
+ GST_OBJECT_UNLOCK (self);
+ break;
+ default:
+ break;
+ }
+
+ return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+}
+
static GstCaps *
gst_wasapi2_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
{
- GstWasapi2Src *self = GST_WASAPI2_SRC (bsrc);
+ GstAudioBaseSrc *asrc = GST_AUDIO_BASE_SRC_CAST (bsrc);
GstCaps *caps = NULL;
- /* In case of UWP, device activation might not be finished yet */
- if (self->client && !gst_wasapi2_client_ensure_activation (self->client)) {
- GST_ELEMENT_ERROR (self, RESOURCE, OPEN_WRITE, (NULL),
- ("Failed to activate device"));
- return NULL;
- }
+ GST_OBJECT_LOCK (bsrc);
+ if (asrc->ringbuffer) {
+ GstWasapi2RingBuffer *ringbuffer =
+ GST_WASAPI2_RING_BUFFER (asrc->ringbuffer);
- if (self->client)
- caps = gst_wasapi2_client_get_caps (self->client);
+ gst_object_ref (ringbuffer);
+ GST_OBJECT_UNLOCK (bsrc);
- /* store one caps here so that we can return device caps even if
- * audioclient was closed due to unprepare() */
- if (!self->cached_caps && caps)
- self->cached_caps = gst_caps_ref (caps);
-
- if (!caps && self->cached_caps)
- caps = gst_caps_ref (self->cached_caps);
+ /* Get caps might be able to block if device is not activated yet */
+ caps = gst_wasapi2_ring_buffer_get_caps (ringbuffer);
+ gst_object_unref (ringbuffer);
+ } else {
+ GST_OBJECT_UNLOCK (bsrc);
+ }
if (!caps)
caps = gst_pad_get_pad_template_caps (bsrc->srcpad);
@@ -317,239 +313,79 @@ gst_wasapi2_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
caps = filtered;
}
- GST_DEBUG_OBJECT (self, "returning caps %" GST_PTR_FORMAT, caps);
+ GST_DEBUG_OBJECT (bsrc, "returning caps %" GST_PTR_FORMAT, caps);
return caps;
}
-static gboolean
-gst_wasapi2_src_open_unlocked (GstAudioSrc * asrc)
-{
- GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
-
- gst_clear_object (&self->client);
-
- self->client =
- gst_wasapi2_client_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE,
- self->low_latency, -1, self->device_id, self->dispatcher);
-
- if (!self->client)
- return FALSE;
-
- return TRUE;
-}
-
-static gboolean
-gst_wasapi2_src_open (GstAudioSrc * asrc)
-{
- GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
- gboolean ret;
-
- GST_DEBUG_OBJECT (self, "Opening device");
-
- GST_WASAPI2_SRC_LOCK (self);
- ret = gst_wasapi2_src_open_unlocked (asrc);
- GST_WASAPI2_SRC_UNLOCK (self);
-
- if (!ret) {
- GST_ELEMENT_ERROR (self, RESOURCE, OPEN_READ, (NULL),
- ("Failed to open device"));
- return FALSE;
- }
-
- return TRUE;
-}
-
-static gboolean
-gst_wasapi2_src_close (GstAudioSrc * asrc)
-{
- GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
-
- GST_WASAPI2_SRC_LOCK (self);
-
- gst_clear_object (&self->client);
- gst_clear_caps (&self->cached_caps);
- self->started = FALSE;
-
- GST_WASAPI2_SRC_UNLOCK (self);
-
- return TRUE;
-}
-
-static gboolean
-gst_wasapi2_src_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
-{
- GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
- GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC (asrc);
- gboolean ret = FALSE;
- HRESULT hr;
-
- GST_WASAPI2_SRC_LOCK (self);
- if (!self->client && !gst_wasapi2_src_open_unlocked (asrc)) {
- GST_ERROR_OBJECT (self, "No audio client was configured");
- goto done;
- }
-
- if (!gst_wasapi2_client_ensure_activation (self->client)) {
- GST_ERROR_OBJECT (self, "Couldn't activate audio device");
- goto done;
- }
-
- hr = gst_wasapi2_client_open (self->client, spec, bsrc->ringbuffer);
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (self, "Couldn't open audio client");
- goto done;
- }
-
- /* Set mute and volume here again, maybe when "mute" property was set, audioclient
- * might not be configured at that moment */
- if (self->mute_changed) {
- gst_wasapi2_client_set_mute (self->client, self->mute);
- self->mute_changed = FALSE;
- }
-
- if (self->volume_changed) {
- gst_wasapi2_client_set_volume (self->client, self->volume);
- self->volume_changed = FALSE;
- }
-
- /* Will start IAudioClient on the first read request */
- self->started = FALSE;
- ret = TRUE;
-
-done:
- GST_WASAPI2_SRC_UNLOCK (self);
-
- return ret;
-}
-
-static gboolean
-gst_wasapi2_src_unprepare (GstAudioSrc * asrc)
-{
- GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
-
- self->started = FALSE;
-
- /* Will reopen device later prepare() */
- GST_WASAPI2_SRC_LOCK (self);
- if (self->client) {
- gst_wasapi2_client_stop (self->client);
- gst_clear_object (&self->client);
- }
- GST_WASAPI2_SRC_UNLOCK (self);
-
- return TRUE;
-}
-
-static guint
-gst_wasapi2_src_read (GstAudioSrc * asrc, gpointer data, guint length,
- GstClockTime * timestamp)
+static GstAudioRingBuffer *
+gst_wasapi2_src_create_ringbuffer (GstAudioBaseSrc * src)
{
- GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
- HRESULT hr;
- guint read_len = 0;
+ GstWasapi2Src *self = GST_WASAPI2_SRC (src);
+ GstAudioRingBuffer *ringbuffer;
+ gchar *name;
- if (!self->client) {
- GST_ERROR_OBJECT (self, "No audio client was configured");
- return -1;
- }
-
- if (!self->started) {
- HRESULT hr = gst_wasapi2_client_start (self->client);
- if (!gst_wasapi2_result (hr)) {
- GST_ERROR_OBJECT (self, "Failed to re-start client");
- return -1;
- }
+ name = g_strdup_printf ("%s-ringbuffer", GST_OBJECT_NAME (src));
- self->started = TRUE;
- }
+ ringbuffer =
+ gst_wasapi2_ring_buffer_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE,
+ self->low_latency, self->device_id, self->dispatcher, name);
+ g_free (name);
- hr = gst_wasapi2_client_read (self->client, data, length, &read_len);
- if (!gst_wasapi2_result (hr)) {
- GST_WARNING_OBJECT (self, "Failed to read data");
- return -1;
- }
-
- return read_len;
-}
-
-static guint
-gst_wasapi2_src_delay (GstAudioSrc * asrc)
-{
- GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
- guint32 delay;
- HRESULT hr;
-
- if (!self->client)
- return 0;
-
- hr = gst_wasapi2_client_delay (self->client, &delay);
- if (!gst_wasapi2_result (hr)) {
- GST_WARNING_OBJECT (self, "Failed to get delay");
- return 0;
- }
-
- return delay;
-}
-
-static void
-gst_wasapi2_src_reset (GstAudioSrc * asrc)
-{
- GstWasapi2Src *self = GST_WASAPI2_SRC (asrc);
-
- GST_DEBUG_OBJECT (self, "reset called");
-
- self->started = FALSE;
-
- if (!self->client)
- return;
-
- gst_wasapi2_client_stop (self->client);
+ return ringbuffer;
}
static void
gst_wasapi2_src_set_mute (GstWasapi2Src * self, gboolean mute)
{
- GST_WASAPI2_SRC_LOCK (self);
+ GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC_CAST (self);
+ HRESULT hr;
+
+ GST_OBJECT_LOCK (self);
self->mute = mute;
self->mute_changed = TRUE;
- if (self->client) {
- HRESULT hr = gst_wasapi2_client_set_mute (self->client, mute);
+ if (bsrc->ringbuffer) {
+ GstWasapi2RingBuffer *ringbuffer =
+ GST_WASAPI2_RING_BUFFER (bsrc->ringbuffer);
+
+ hr = gst_wasapi2_ring_buffer_set_mute (ringbuffer, mute);
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't set mute");
} else {
self->mute_changed = FALSE;
}
- } else {
- GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
- GST_WASAPI2_SRC_UNLOCK (self);
+ GST_OBJECT_UNLOCK (self);
}
static gboolean
gst_wasapi2_src_get_mute (GstWasapi2Src * self)
{
+ GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC_CAST (self);
gboolean mute;
+ HRESULT hr;
- GST_WASAPI2_SRC_LOCK (self);
+ GST_OBJECT_LOCK (self);
mute = self->mute;
- if (self->client) {
- HRESULT hr = gst_wasapi2_client_get_mute (self->client, &mute);
+ if (bsrc->ringbuffer) {
+ GstWasapi2RingBuffer *ringbuffer =
+ GST_WASAPI2_RING_BUFFER (bsrc->ringbuffer);
+
+ hr = gst_wasapi2_ring_buffer_get_mute (ringbuffer, &mute);
+
if (FAILED (hr)) {
- GST_INFO_OBJECT (self, "Couldn't get mute state");
+ GST_INFO_OBJECT (self, "Couldn't get mute");
} else {
self->mute = mute;
}
- } else {
- GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
- GST_WASAPI2_SRC_UNLOCK (self);
+ GST_OBJECT_UNLOCK (self);
return mute;
}
@@ -557,7 +393,10 @@ gst_wasapi2_src_get_mute (GstWasapi2Src * self)
static void
gst_wasapi2_src_set_volume (GstWasapi2Src * self, gdouble volume)
{
- GST_WASAPI2_SRC_LOCK (self);
+ GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC_CAST (self);
+ HRESULT hr;
+
+ GST_OBJECT_LOCK (self);
self->volume = volume;
/* clip volume value */
@@ -565,42 +404,47 @@ gst_wasapi2_src_set_volume (GstWasapi2Src * self, gdouble volume)
self->volume = MIN (1.0, self->volume);
self->volume_changed = TRUE;
- if (self->client) {
- HRESULT hr =
- gst_wasapi2_client_set_volume (self->client, (gfloat) self->volume);
+ if (bsrc->ringbuffer) {
+ GstWasapi2RingBuffer *ringbuffer =
+ GST_WASAPI2_RING_BUFFER (bsrc->ringbuffer);
+
+ hr = gst_wasapi2_ring_buffer_set_volume (ringbuffer, (gfloat) self->volume);
+
if (FAILED (hr)) {
GST_INFO_OBJECT (self, "Couldn't set volume");
} else {
self->volume_changed = FALSE;
}
- } else {
- GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
- GST_WASAPI2_SRC_UNLOCK (self);
+ GST_OBJECT_UNLOCK (self);
}
static gdouble
gst_wasapi2_src_get_volume (GstWasapi2Src * self)
{
+ GstAudioBaseSrc *bsrc = GST_AUDIO_BASE_SRC_CAST (self);
gfloat volume;
+ HRESULT hr;
- GST_WASAPI2_SRC_LOCK (self);
+ GST_OBJECT_LOCK (self);
volume = (gfloat) self->volume;
- if (self->client) {
- HRESULT hr = gst_wasapi2_client_get_volume (self->client, &volume);
+ if (bsrc->ringbuffer) {
+ GstWasapi2RingBuffer *ringbuffer =
+ GST_WASAPI2_RING_BUFFER (bsrc->ringbuffer);
+
+ hr = gst_wasapi2_ring_buffer_get_volume (ringbuffer, &volume);
+
if (FAILED (hr)) {
- GST_INFO_OBJECT (self, "Couldn't get volume");
+ GST_INFO_OBJECT (self, "Couldn't set volume");
} else {
self->volume = volume;
}
- } else {
- GST_DEBUG_OBJECT (self, "audio client is not configured yet");
}
- GST_WASAPI2_SRC_UNLOCK (self);
+ GST_OBJECT_UNLOCK (self);
volume = MAX (0.0, volume);
volume = MIN (1.0, volume);
diff --git a/sys/wasapi2/gstwasapi2src.h b/sys/wasapi2/gstwasapi2src.h
index c832db5e8..2b3a0a0d6 100644
--- a/sys/wasapi2/gstwasapi2src.h
+++ b/sys/wasapi2/gstwasapi2src.h
@@ -27,7 +27,7 @@ G_BEGIN_DECLS
#define GST_TYPE_WASAPI2_SRC (gst_wasapi2_src_get_type ())
G_DECLARE_FINAL_TYPE (GstWasapi2Src,
- gst_wasapi2_src, GST, WASAPI2_SRC, GstAudioSrc);
+ gst_wasapi2_src, GST, WASAPI2_SRC, GstAudioBaseSrc);
G_END_DECLS
diff --git a/sys/wasapi2/gstwasapi2util.c b/sys/wasapi2/gstwasapi2util.c
index 81109d020..d19c96633 100644
--- a/sys/wasapi2/gstwasapi2util.c
+++ b/sys/wasapi2/gstwasapi2util.c
@@ -25,10 +25,56 @@
#include "gstwasapi2util.h"
#include <audioclient.h>
+#include <mmdeviceapi.h>
GST_DEBUG_CATEGORY_EXTERN (gst_wasapi2_debug);
#define GST_CAT_DEFAULT gst_wasapi2_debug
+/* Desktop only defines */
+#ifndef KSAUDIO_SPEAKER_MONO
+#define KSAUDIO_SPEAKER_MONO (SPEAKER_FRONT_CENTER)
+#endif
+#ifndef KSAUDIO_SPEAKER_1POINT1
+#define KSAUDIO_SPEAKER_1POINT1 (SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY)
+#endif
+#ifndef KSAUDIO_SPEAKER_STEREO
+#define KSAUDIO_SPEAKER_STEREO (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT)
+#endif
+#ifndef KSAUDIO_SPEAKER_2POINT1
+#define KSAUDIO_SPEAKER_2POINT1 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_LOW_FREQUENCY)
+#endif
+#ifndef KSAUDIO_SPEAKER_3POINT0
+#define KSAUDIO_SPEAKER_3POINT0 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER)
+#endif
+#ifndef KSAUDIO_SPEAKER_3POINT1
+#define KSAUDIO_SPEAKER_3POINT1 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
+ SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY)
+#endif
+#ifndef KSAUDIO_SPEAKER_QUAD
+#define KSAUDIO_SPEAKER_QUAD (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
+ SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT)
+#endif
+#define KSAUDIO_SPEAKER_SURROUND (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
+ SPEAKER_FRONT_CENTER | SPEAKER_BACK_CENTER)
+#ifndef KSAUDIO_SPEAKER_5POINT0
+#define KSAUDIO_SPEAKER_5POINT0 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | \
+ SPEAKER_SIDE_LEFT | SPEAKER_SIDE_RIGHT)
+#endif
+#define KSAUDIO_SPEAKER_5POINT1 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
+ SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | \
+ SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT)
+#ifndef KSAUDIO_SPEAKER_7POINT0
+#define KSAUDIO_SPEAKER_7POINT0 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | \
+ SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT | \
+ SPEAKER_SIDE_LEFT | SPEAKER_SIDE_RIGHT)
+#endif
+#ifndef KSAUDIO_SPEAKER_7POINT1
+#define KSAUDIO_SPEAKER_7POINT1 (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | \
+ SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | \
+ SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT | \
+ SPEAKER_FRONT_LEFT_OF_CENTER | SPEAKER_FRONT_RIGHT_OF_CENTER)
+#endif
+
/* *INDENT-OFF* */
static struct
{
@@ -57,6 +103,27 @@ static struct
{SPEAKER_TOP_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER},
{SPEAKER_TOP_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT}
};
+
+static DWORD default_ch_masks[] = {
+ 0,
+ KSAUDIO_SPEAKER_MONO,
+ /* 2ch */
+ KSAUDIO_SPEAKER_STEREO,
+ /* 2.1ch */
+ /* KSAUDIO_SPEAKER_3POINT0 ? */
+ KSAUDIO_SPEAKER_2POINT1,
+ /* 4ch */
+ /* KSAUDIO_SPEAKER_3POINT1 or KSAUDIO_SPEAKER_SURROUND ? */
+ KSAUDIO_SPEAKER_QUAD,
+ /* 5ch */
+ KSAUDIO_SPEAKER_5POINT0,
+ /* 5.1ch */
+ KSAUDIO_SPEAKER_5POINT1,
+ /* 7ch */
+ KSAUDIO_SPEAKER_7POINT0,
+ /* 7.1ch */
+ KSAUDIO_SPEAKER_7POINT1,
+};
/* *INDENT-ON* */
static const gchar *
@@ -192,3 +259,173 @@ _gst_wasapi2_result (HRESULT hr, GstDebugCategory * cat, const gchar * file,
return SUCCEEDED (hr);
#endif
}
+
+static void
+gst_wasapi_util_channel_position_all_none (guint channels,
+ GstAudioChannelPosition * position)
+{
+ guint i;
+
+ for (i = 0; i < channels; i++)
+ position[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
+}
+
+guint64
+gst_wasapi2_util_waveformatex_to_channel_mask (WAVEFORMATEX * format,
+ GstAudioChannelPosition ** out_position)
+{
+ guint i, ch;
+ guint64 mask = 0;
+ GstAudioChannelPosition *pos = NULL;
+ WORD nChannels = 0;
+ DWORD dwChannelMask = 0;
+
+ nChannels = format->nChannels;
+ if (format->wFormatTag == WAVE_FORMAT_EXTENSIBLE) {
+ WAVEFORMATEXTENSIBLE *extensible = (WAVEFORMATEXTENSIBLE *) format;
+ dwChannelMask = extensible->dwChannelMask;
+ }
+
+ if (out_position)
+ *out_position = NULL;
+
+ if (nChannels > 2 && !dwChannelMask) {
+ GST_WARNING ("Unknown channel mask value for %d channel stream", nChannels);
+
+ if (nChannels >= G_N_ELEMENTS (default_ch_masks)) {
+ GST_ERROR ("To may channels %d", nChannels);
+ return 0;
+ }
+
+ dwChannelMask = default_ch_masks[nChannels];
+ }
+
+ pos = g_new (GstAudioChannelPosition, nChannels);
+ gst_wasapi_util_channel_position_all_none (nChannels, pos);
+
+ /* Too many channels, have to assume that they are all non-positional */
+ if (nChannels > G_N_ELEMENTS (wasapi_to_gst_pos)) {
+ GST_INFO ("Got too many (%i) channels, assuming non-positional", nChannels);
+ goto out;
+ }
+
+ /* Too many bits in the channel mask, and the bits don't match nChannels */
+ if (dwChannelMask >> (G_N_ELEMENTS (wasapi_to_gst_pos) + 1) != 0) {
+ GST_WARNING ("Too many bits in channel mask (%lu), assuming "
+ "non-positional", dwChannelMask);
+ goto out;
+ }
+
+ /* Map WASAPI's channel mask to Gstreamer's channel mask and positions.
+ * If the no. of bits in the mask > nChannels, we will ignore the extra. */
+ for (i = 0, ch = 0; i < G_N_ELEMENTS (wasapi_to_gst_pos) && ch < nChannels;
+ i++) {
+ if (!(dwChannelMask & wasapi_to_gst_pos[i].wasapi_pos))
+ /* no match, try next */
+ continue;
+ mask |= G_GUINT64_CONSTANT (1) << wasapi_to_gst_pos[i].gst_pos;
+ pos[ch++] = wasapi_to_gst_pos[i].gst_pos;
+ }
+
+ /* XXX: Warn if some channel masks couldn't be mapped? */
+
+ GST_DEBUG ("Converted WASAPI mask 0x%" G_GINT64_MODIFIER "x -> 0x%"
+ G_GINT64_MODIFIER "x", (guint64) dwChannelMask, (guint64) mask);
+
+out:
+ if (out_position) {
+ *out_position = pos;
+ } else {
+ g_free (pos);
+ }
+
+ return mask;
+}
+
+const gchar *
+gst_wasapi2_util_waveformatex_to_audio_format (WAVEFORMATEX * format)
+{
+ const gchar *fmt_str = NULL;
+ GstAudioFormat fmt = GST_AUDIO_FORMAT_UNKNOWN;
+
+ switch (format->wFormatTag) {
+ case WAVE_FORMAT_PCM:
+ fmt = gst_audio_format_build_integer (TRUE, G_LITTLE_ENDIAN,
+ format->wBitsPerSample, format->wBitsPerSample);
+ break;
+ case WAVE_FORMAT_IEEE_FLOAT:
+ if (format->wBitsPerSample == 32)
+ fmt = GST_AUDIO_FORMAT_F32LE;
+ else if (format->wBitsPerSample == 64)
+ fmt = GST_AUDIO_FORMAT_F64LE;
+ break;
+ case WAVE_FORMAT_EXTENSIBLE:
+ {
+ WAVEFORMATEXTENSIBLE *ex = (WAVEFORMATEXTENSIBLE *) format;
+ if (IsEqualGUID (&ex->SubFormat, &KSDATAFORMAT_SUBTYPE_PCM)) {
+ fmt = gst_audio_format_build_integer (TRUE, G_LITTLE_ENDIAN,
+ format->wBitsPerSample, ex->Samples.wValidBitsPerSample);
+ } else if (IsEqualGUID (&ex->SubFormat, &KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)) {
+ if (format->wBitsPerSample == 32
+ && ex->Samples.wValidBitsPerSample == 32)
+ fmt = GST_AUDIO_FORMAT_F32LE;
+ else if (format->wBitsPerSample == 64 &&
+ ex->Samples.wValidBitsPerSample == 64)
+ fmt = GST_AUDIO_FORMAT_F64LE;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (fmt != GST_AUDIO_FORMAT_UNKNOWN)
+ fmt_str = gst_audio_format_to_string (fmt);
+
+ return fmt_str;
+}
+
+gboolean
+gst_wasapi2_util_parse_waveformatex (WAVEFORMATEX * format,
+ GstCaps * template_caps, GstCaps ** out_caps,
+ GstAudioChannelPosition ** out_positions)
+{
+ const gchar *afmt;
+ guint64 channel_mask;
+
+ *out_caps = NULL;
+
+ /* TODO: handle SPDIF and other encoded formats */
+
+ /* 1 or 2 channels <= 16 bits sample size OR
+ * 1 or 2 channels > 16 bits sample size or >2 channels */
+ if (format->wFormatTag != WAVE_FORMAT_PCM &&
+ format->wFormatTag != WAVE_FORMAT_IEEE_FLOAT &&
+ format->wFormatTag != WAVE_FORMAT_EXTENSIBLE)
+ /* Unhandled format tag */
+ return FALSE;
+
+ /* WASAPI can only tell us one canonical mix format that it will accept. The
+ * alternative is calling IsFormatSupported on all combinations of formats.
+ * Instead, it's simpler and faster to require conversion inside gstreamer */
+ afmt = gst_wasapi2_util_waveformatex_to_audio_format (format);
+ if (afmt == NULL)
+ return FALSE;
+
+ *out_caps = gst_caps_copy (template_caps);
+
+ channel_mask = gst_wasapi2_util_waveformatex_to_channel_mask (format,
+ out_positions);
+
+ gst_caps_set_simple (*out_caps,
+ "format", G_TYPE_STRING, afmt,
+ "channels", G_TYPE_INT, format->nChannels,
+ "rate", G_TYPE_INT, format->nSamplesPerSec, NULL);
+
+ if (channel_mask) {
+ gst_caps_set_simple (*out_caps,
+ "channel-mask", GST_TYPE_BITMASK, channel_mask, NULL);
+ }
+
+ return TRUE;
+}
diff --git a/sys/wasapi2/gstwasapi2util.h b/sys/wasapi2/gstwasapi2util.h
index 67156f952..cb977fb94 100644
--- a/sys/wasapi2/gstwasapi2util.h
+++ b/sys/wasapi2/gstwasapi2util.h
@@ -23,6 +23,7 @@
#include <gst/gst.h>
#include <gst/audio/audio.h>
#include <windows.h>
+#include <initguid.h>
#include <audioclient.h>
G_BEGIN_DECLS
@@ -34,6 +35,13 @@ G_BEGIN_DECLS
"rate = " GST_AUDIO_RATE_RANGE ", " \
"channels = " GST_AUDIO_CHANNELS_RANGE
+#define GST_WASAPI2_CLEAR_COM(obj) G_STMT_START { \
+ if (obj) { \
+ (obj)->Release (); \
+ (obj) = NULL; \
+ } \
+ } G_STMT_END
+
gboolean _gst_wasapi2_result (HRESULT hr,
GstDebugCategory * cat,
const gchar * file,
@@ -43,6 +51,16 @@ gboolean _gst_wasapi2_result (HRESULT hr,
#define gst_wasapi2_result(result) \
_gst_wasapi2_result (result, GST_CAT_DEFAULT, __FILE__, GST_FUNCTION, __LINE__)
+guint64 gst_wasapi2_util_waveformatex_to_channel_mask (WAVEFORMATEX * format,
+ GstAudioChannelPosition ** out_position);
+
+const gchar * gst_wasapi2_util_waveformatex_to_audio_format (WAVEFORMATEX * format);
+
+gboolean gst_wasapi2_util_parse_waveformatex (WAVEFORMATEX * format,
+ GstCaps * template_caps,
+ GstCaps ** out_caps,
+ GstAudioChannelPosition ** out_positions);
+
G_END_DECLS
#endif /* __GST_WASAPI_UTIL_H__ */
diff --git a/sys/wasapi2/meson.build b/sys/wasapi2/meson.build
index 84d37c9ee..5edbfff2d 100644
--- a/sys/wasapi2/meson.build
+++ b/sys/wasapi2/meson.build
@@ -4,6 +4,7 @@ wasapi2_sources = [
'gstwasapi2util.c',
'gstwasapi2client.cpp',
'gstwasapi2device.c',
+ 'gstwasapi2ringbuffer.cpp',
'plugin.c',
]
@@ -26,7 +27,8 @@ ole32_dep = cc.find_library('ole32', required : get_option('wasapi2'))
ksuser_dep = cc.find_library('ksuser', required : get_option('wasapi2'))
runtimeobject_dep = cc.find_library('runtimeobject', required : get_option('wasapi2'))
mmdeviceapi_dep = cc.find_library('mmdevapi', required : get_option('wasapi2'))
-wasapi2_dep = [ole32_dep, ksuser_dep, runtimeobject_dep, mmdeviceapi_dep]
+mfplat_dep = cc.find_library('mfplat', required : get_option('wasapi2'))
+wasapi2_dep = [ole32_dep, ksuser_dep, runtimeobject_dep, mmdeviceapi_dep, mfplat_dep]
extra_args = []
foreach dep: wasapi2_dep
diff --git a/sys/wasapi2/plugin.c b/sys/wasapi2/plugin.c
index 18394ee0a..39cdf4c14 100644
--- a/sys/wasapi2/plugin.c
+++ b/sys/wasapi2/plugin.c
@@ -26,14 +26,23 @@
#include "gstwasapi2sink.h"
#include "gstwasapi2src.h"
#include "gstwasapi2device.h"
+#include "gstwasapi2util.h"
+#include <mfapi.h>
GST_DEBUG_CATEGORY (gst_wasapi2_debug);
GST_DEBUG_CATEGORY (gst_wasapi2_client_debug);
+static void
+plugin_deinit (gpointer data)
+{
+ MFShutdown ();
+}
+
static gboolean
plugin_init (GstPlugin * plugin)
{
GstRank rank = GST_RANK_SECONDARY;
+ HRESULT hr;
/**
* plugin-wasapi2:
@@ -41,6 +50,11 @@ plugin_init (GstPlugin * plugin)
* Since: 1.18
*/
+ hr = MFStartup (MF_VERSION, MFSTARTUP_NOSOCKET);
+ if (!gst_wasapi2_result (hr)) {
+ GST_WARNING ("MFStartup failure, hr: 0x%x", hr);
+ return TRUE;
+ }
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP) && !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
/* If we are building for UWP, wasapi2 plugin should have the highest rank */
rank = GST_RANK_PRIMARY + 1;
@@ -56,6 +70,10 @@ plugin_init (GstPlugin * plugin)
gst_device_provider_register (plugin, "wasapi2deviceprovider",
rank, GST_TYPE_WASAPI2_DEVICE_PROVIDER);
+ g_object_set_data_full (G_OBJECT (plugin),
+ "plugin-wasapi2-shutdown", "shutdown-data",
+ (GDestroyNotify) plugin_deinit);
+
return TRUE;
}