summaryrefslogtreecommitdiff
path: root/sys
diff options
context:
space:
mode:
authorYouness Alaoui <youness.alaoui@collabora.co.uk>2012-09-10 16:09:26 -0400
committerOlivier CrĂȘte <olivier.crete@collabora.com>2012-09-10 16:09:26 -0400
commit1ba24e1306dd1623bae3400e12e073c9f6fc8d51 (patch)
treefe2df82a40468ac40fe25a58fc7479a7e2e3a3d4 /sys
parent1ef529601b0a9dfd9d278e37dbe141f21b9b8525 (diff)
downloadgstreamer-plugins-bad-1ba24e1306dd1623bae3400e12e073c9f6fc8d51.tar.gz
UVC H264 plugin
Diffstat (limited to 'sys')
-rw-r--r--sys/Makefile.am10
-rw-r--r--sys/uvch264/Makefile.am48
-rw-r--r--sys/uvch264/gstuvch264-marshal.list3
-rw-r--r--sys/uvch264/gstuvch264.c50
-rw-r--r--sys/uvch264/gstuvch264_mjpgdemux.c723
-rw-r--r--sys/uvch264/gstuvch264_mjpgdemux.h66
-rw-r--r--sys/uvch264/gstuvch264_src.c3180
-rw-r--r--sys/uvch264/gstuvch264_src.h166
-rw-r--r--sys/uvch264/uvc_h264.c122
-rw-r--r--sys/uvch264/uvc_h264.h335
10 files changed, 4701 insertions, 2 deletions
diff --git a/sys/Makefile.am b/sys/Makefile.am
index d1a29b344..d79d22325 100644
--- a/sys/Makefile.am
+++ b/sys/Makefile.am
@@ -130,9 +130,15 @@ else
WINSCREENCAP_DIR=
endif
-SUBDIRS = $(ACM_DIR) $(APPLE_MEDIA_DIR) $(AVC_DIR) $(D3DVIDEOSINK_DIR) $(DECKLINK_DIR) $(DIRECTDRAW_DIR) $(DIRECTSOUND_DIR) $(DIRECTSHOW_DIR) $(DVB_DIR) $(FBDEV_DIR) $(LINSYS_DIR) $(OSX_VIDEO_DIR) $(PVR_DIR) $(QT_DIR) $(SHM_DIR) $(VCD_DIR) $(VDPAU_DIR) $(WININET_DIR) $(WINSCREENCAP_DIR)
+if USE_UVCH264
+UVCH264_DIR=uvch264
+else
+UVCH264_DIR=
+endif
+
+SUBDIRS = $(ACM_DIR) $(APPLE_MEDIA_DIR) $(AVC_DIR) $(D3DVIDEOSINK_DIR) $(DECKLINK_DIR) $(DIRECTDRAW_DIR) $(DIRECTSOUND_DIR) $(DIRECTSHOW_DIR) $(DVB_DIR) $(FBDEV_DIR) $(LINSYS_DIR) $(OSX_VIDEO_DIR) $(PVR_DIR) $(QT_DIR) $(SHM_DIR) $(UVCH264_DIR) $(VCD_DIR) $(VDPAU_DIR) $(WININET_DIR) $(WINSCREENCAP_DIR)
DIST_SUBDIRS = acmenc acmmp3dec applemedia avc d3dvideosink decklink directdraw directsound dvb linsys fbdev dshowdecwrapper dshowsrcwrapper dshowvideosink \
- osxvideo pvr2d qtwrapper shm vcd vdpau wasapi wininet winks winscreencap
+ osxvideo pvr2d qtwrapper shm uvch264 vcd vdpau wasapi wininet winks winscreencap
include $(top_srcdir)/common/parallel-subdirs.mak
diff --git a/sys/uvch264/Makefile.am b/sys/uvch264/Makefile.am
new file mode 100644
index 000000000..8ecff113f
--- /dev/null
+++ b/sys/uvch264/Makefile.am
@@ -0,0 +1,48 @@
+glib_gen_prefix = __gst_uvc_h264
+glib_gen_basename = gstuvch264
+
+include $(top_srcdir)/common/gst-glib-gen.mak
+
+built_sources = gstuvch264-marshal.c
+built_headers = gstuvch264-marshal.h
+
+BUILT_SOURCES = $(built_sources) $(built_headers)
+
+CLEANFILES = $(BUILT_SOURCES)
+
+EXTRA_DIST = gstuvch264-marshal.list
+
+
+plugin_LTLIBRARIES = libgstuvch264.la
+
+libgstuvch264_la_SOURCES = gstuvch264.c \
+ gstuvch264_mjpgdemux.c \
+ gstuvch264_src.c \
+ uvc_h264.c
+
+nodist_libgstuvch264_la_SOURCES = $(built_sources)
+
+libgstuvch264_la_CFLAGS = $(GST_PLUGINS_BAD_CFLAGS) \
+ $(GST_PLUGINS_BASE_CFLAGS) \
+ $(GST_BASE_CFLAGS) \
+ $(GST_VIDEO_CFLAGS) \
+ $(GST_CFLAGS) \
+ $(G_UDEV_CFLAGS) \
+ $(LIBUSB_CFLAGS) \
+ -DGST_USE_UNSTABLE_API
+
+libgstuvch264_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
+libgstuvch264_la_LIBTOOLFLAGS = --tag=disable-static
+
+libgstuvch264_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
+ $(GST_BASE_LIBS) \
+ $(GST_PLUGINS_BASE_LIBS) \
+ $(GST_VIDEO_LIBS) \
+ $(GST_LIBS) \
+ $(G_UDEV_LIBS) \
+ $(LIBUSB_LIBS) \
+ $(top_builddir)/gst-libs/gst/basecamerabinsrc/libgstbasecamerabinsrc-$(GST_MAJORMINOR).la
+
+noinst_HEADERS = gstuvch264_mjpgdemux.h \
+ gstuvch264_src.h \
+ uvc_h264.h
diff --git a/sys/uvch264/gstuvch264-marshal.list b/sys/uvch264/gstuvch264-marshal.list
new file mode 100644
index 000000000..a9ec0dd26
--- /dev/null
+++ b/sys/uvch264/gstuvch264-marshal.list
@@ -0,0 +1,3 @@
+BOOLEAN:STRING,POINTER,POINTER,POINTER
+BOOLEAN:STRING,POINTER,POINTER
+BOOLEAN:STRING,POINTER
diff --git a/sys/uvch264/gstuvch264.c b/sys/uvch264/gstuvch264.c
new file mode 100644
index 000000000..77ad73dc4
--- /dev/null
+++ b/sys/uvch264/gstuvch264.c
@@ -0,0 +1,50 @@
+/* GStreamer
+ *
+ * uvch264: a plugin for handling UVC compliant H264 encoding cameras
+ *
+ * Copyright (C) 2012 Cisco Systems, Inc.
+ * Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <gst/gst.h>
+#include "gstuvch264_mjpgdemux.h"
+#include "gstuvch264_src.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ if (!gst_element_register (plugin, "uvch264_mjpgdemux", GST_RANK_NONE,
+ GST_TYPE_UVC_H264_MJPG_DEMUX))
+ return FALSE;
+
+ if (!gst_element_register (plugin, "uvch264_src", GST_RANK_NONE,
+ GST_TYPE_UVC_H264_SRC))
+ return FALSE;
+
+ return TRUE;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "uvch264",
+ "UVC compliant H264 encoding cameras plugin",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/sys/uvch264/gstuvch264_mjpgdemux.c b/sys/uvch264/gstuvch264_mjpgdemux.c
new file mode 100644
index 000000000..4bc689981
--- /dev/null
+++ b/sys/uvch264/gstuvch264_mjpgdemux.c
@@ -0,0 +1,723 @@
+/* GStreamer
+ *
+ * uvch264_mjpg_demux: a demuxer for muxed stream in UVC H264 compliant MJPG
+ *
+ * Copyright (C) 2012 Cisco Systems, Inc.
+ * Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:element-uvch264-mjpgdemux
+ * @short_description: UVC H264 compliant MJPG demuxer
+ *
+ * Parses a MJPG stream from a UVC H264 compliant encoding camera and extracts
+ * each muxed stream into separate pads.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <string.h>
+#include <linux/uvcvideo.h>
+#include <linux/usb/video.h>
+#include <sys/ioctl.h>
+
+#ifndef UVCIOC_GET_LAST_SCR
+#include <time.h>
+
+struct uvc_last_scr_sample
+{
+ __u32 dev_frequency;
+ __u32 dev_stc;
+ __u16 dev_sof;
+ struct timespec host_ts;
+ __u16 host_sof;
+};
+
+#define UVCIOC_GET_LAST_SCR _IOR('u', 0x23, struct uvc_last_scr_sample)
+#endif
+
+#include "gstuvch264_mjpgdemux.h"
+
+enum
+{
+ PROP_0,
+ PROP_DEVICE_FD,
+ PROP_NUM_CLOCK_SAMPLES
+};
+
+#define DEFAULT_NUM_CLOCK_SAMPLES 32
+
+static GstStaticPadTemplate mjpgsink_pad_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("image/jpeg, "
+ "width = (int) [ 0, MAX ],"
+ "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
+ );
+
+static GstStaticPadTemplate jpegsrc_pad_template =
+GST_STATIC_PAD_TEMPLATE ("jpeg",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("image/jpeg, "
+ "width = (int) [ 0, MAX ],"
+ "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
+ );
+
+static GstStaticPadTemplate h264src_pad_template =
+GST_STATIC_PAD_TEMPLATE ("h264",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-h264, "
+ "width = (int) [ 0, MAX ], "
+ "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
+ );
+
+static GstStaticPadTemplate yuy2src_pad_template =
+GST_STATIC_PAD_TEMPLATE ("yuy2",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw-yuv, "
+ "format = (fourcc) YUY2, "
+ "width = (int) [ 0, MAX ], "
+ "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
+ );
+static GstStaticPadTemplate nv12src_pad_template =
+GST_STATIC_PAD_TEMPLATE ("nv12",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw-yuv, "
+ "format = (fourcc) NV21, "
+ "width = (int) [ 0, MAX ], "
+ "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
+ );
+
+
+GST_DEBUG_CATEGORY_STATIC (uvc_h264_mjpg_demux_debug);
+#define GST_CAT_DEFAULT uvc_h264_mjpg_demux_debug
+
+typedef struct
+{
+ guint32 dev_stc;
+ guint32 dev_sof;
+ GstClockTime host_ts;
+ guint32 host_sof;
+} GstUvcH264ClockSample;
+
+struct _GstUvcH264MjpgDemuxPrivate
+{
+ int device_fd;
+ int num_clock_samples;
+ GstUvcH264ClockSample *clock_samples;
+ int last_sample;
+ int num_samples;
+ GstPad *sink_pad;
+ GstPad *jpeg_pad;
+ GstPad *h264_pad;
+ GstPad *yuy2_pad;
+ GstPad *nv12_pad;
+ GstCaps *h264_caps;
+ GstCaps *yuy2_caps;
+ GstCaps *nv12_caps;
+ guint16 h264_width;
+ guint16 h264_height;
+ guint16 yuy2_width;
+ guint16 yuy2_height;
+ guint16 nv12_width;
+ guint16 nv12_height;
+};
+
+typedef struct
+{
+ guint16 version;
+ guint16 header_len;
+ guint32 type;
+ guint16 width;
+ guint16 height;
+ guint32 frame_interval;
+ guint16 delay;
+ guint32 pts;
+} __attribute__ ((packed)) AuxiliaryStreamHeader;
+
+static void gst_uvc_h264_mjpg_demux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_uvc_h264_mjpg_demux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+static void gst_uvc_h264_mjpg_demux_dispose (GObject * object);
+static GstFlowReturn gst_uvc_h264_mjpg_demux_chain (GstPad * pad,
+ GstBuffer * buffer);
+static gboolean gst_uvc_h264_mjpg_demux_sink_setcaps (GstPad * pad,
+ GstCaps * caps);
+static GstCaps *gst_uvc_h264_mjpg_demux_getcaps (GstPad * pad);
+
+#define _do_init(x) \
+ GST_DEBUG_CATEGORY_INIT (uvc_h264_mjpg_demux_debug, \
+ "uvch264_mjpgdemux", 0, "UVC H264 MJPG Demuxer");
+
+GST_BOILERPLATE_FULL (GstUvcH264MjpgDemux, gst_uvc_h264_mjpg_demux, GstElement,
+ GST_TYPE_ELEMENT, _do_init);
+
+static void
+gst_uvc_h264_mjpg_demux_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstPadTemplate *pt;
+
+ /* do not use gst_element_class_add_static_pad_template to stay compatible
+ * with gstreamer 0.10.35 */
+ pt = gst_static_pad_template_get (&mjpgsink_pad_template);
+ gst_element_class_add_pad_template (element_class, pt);
+ gst_object_unref (pt);
+ pt = gst_static_pad_template_get (&jpegsrc_pad_template);
+ gst_element_class_add_pad_template (element_class, pt);
+ gst_object_unref (pt);
+ pt = gst_static_pad_template_get (&h264src_pad_template);
+ gst_element_class_add_pad_template (element_class, pt);
+ gst_object_unref (pt);
+ pt = gst_static_pad_template_get (&yuy2src_pad_template);
+ gst_element_class_add_pad_template (element_class, pt);
+ gst_object_unref (pt);
+ pt = gst_static_pad_template_get (&nv12src_pad_template);
+ gst_element_class_add_pad_template (element_class, pt);
+ gst_object_unref (pt);
+
+ gst_element_class_set_details_simple (element_class,
+ "UVC H264 MJPG Demuxer",
+ "Video/Demuxer",
+ "Demux UVC H264 auxiliary streams from MJPG images",
+ "Youness Alaoui <youness.alaoui@collabora.co.uk>");
+}
+
+static void
+gst_uvc_h264_mjpg_demux_class_init (GstUvcH264MjpgDemuxClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ g_type_class_add_private (gobject_class, sizeof (GstUvcH264MjpgDemuxPrivate));
+
+ gobject_class->set_property = gst_uvc_h264_mjpg_demux_set_property;
+ gobject_class->get_property = gst_uvc_h264_mjpg_demux_get_property;
+ gobject_class->dispose = gst_uvc_h264_mjpg_demux_dispose;
+
+
+ g_object_class_install_property (gobject_class, PROP_DEVICE_FD,
+ g_param_spec_int ("device-fd", "device-fd",
+ "File descriptor of the v4l2 device",
+ -1, G_MAXINT, -1, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NUM_CLOCK_SAMPLES,
+ g_param_spec_int ("num-clock-samples", "num-clock-samples",
+ "Number of clock samples to gather for the PTS synchronization"
+ " (-1 = unlimited)",
+ 0, G_MAXINT, DEFAULT_NUM_CLOCK_SAMPLES,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_uvc_h264_mjpg_demux_init (GstUvcH264MjpgDemux * self,
+ GstUvcH264MjpgDemuxClass * g_class)
+{
+ self->priv = G_TYPE_INSTANCE_GET_PRIVATE (self, GST_TYPE_UVC_H264_MJPG_DEMUX,
+ GstUvcH264MjpgDemuxPrivate);
+
+
+ self->priv->device_fd = -1;
+
+ /* create the sink and src pads */
+ self->priv->sink_pad =
+ gst_pad_new_from_static_template (&mjpgsink_pad_template, "sink");
+ gst_pad_set_chain_function (self->priv->sink_pad,
+ GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_chain));
+ gst_pad_set_setcaps_function (self->priv->sink_pad,
+ GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_sink_setcaps));
+ gst_pad_set_getcaps_function (self->priv->sink_pad,
+ GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_getcaps));
+ gst_element_add_pad (GST_ELEMENT (self), self->priv->sink_pad);
+
+ /* JPEG */
+ self->priv->jpeg_pad =
+ gst_pad_new_from_static_template (&jpegsrc_pad_template, "jpeg");
+ gst_pad_set_getcaps_function (self->priv->jpeg_pad,
+ GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_getcaps));
+ gst_element_add_pad (GST_ELEMENT (self), self->priv->jpeg_pad);
+
+ /* H264 */
+ self->priv->h264_pad =
+ gst_pad_new_from_static_template (&h264src_pad_template, "h264");
+ gst_pad_use_fixed_caps (self->priv->h264_pad);
+ gst_element_add_pad (GST_ELEMENT (self), self->priv->h264_pad);
+
+ /* YUY2 */
+ self->priv->yuy2_pad =
+ gst_pad_new_from_static_template (&yuy2src_pad_template, "yuy2");
+ gst_pad_use_fixed_caps (self->priv->yuy2_pad);
+ gst_element_add_pad (GST_ELEMENT (self), self->priv->yuy2_pad);
+
+ /* NV12 */
+ self->priv->nv12_pad =
+ gst_pad_new_from_static_template (&nv12src_pad_template, "nv12");
+ gst_pad_use_fixed_caps (self->priv->nv12_pad);
+ gst_element_add_pad (GST_ELEMENT (self), self->priv->nv12_pad);
+
+ self->priv->h264_caps = gst_caps_new_simple ("video/x-h264", NULL);
+ self->priv->yuy2_caps = gst_caps_new_simple ("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'), NULL);
+ self->priv->nv12_caps = gst_caps_new_simple ("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('N', 'V', '1', '2'), NULL);
+ self->priv->h264_width = self->priv->h264_height = 0;
+ self->priv->yuy2_width = self->priv->yuy2_height = 0;
+ self->priv->nv12_width = self->priv->nv12_height = 0;
+}
+
+static void
+gst_uvc_h264_mjpg_demux_dispose (GObject * object)
+{
+ GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object);
+
+ if (self->priv->h264_caps)
+ gst_caps_unref (self->priv->h264_caps);
+ self->priv->h264_caps = NULL;
+ if (self->priv->yuy2_caps)
+ gst_caps_unref (self->priv->yuy2_caps);
+ self->priv->yuy2_caps = NULL;
+ if (self->priv->nv12_caps)
+ gst_caps_unref (self->priv->nv12_caps);
+ self->priv->nv12_caps = NULL;
+ if (self->priv->clock_samples)
+ g_free (self->priv->clock_samples);
+ self->priv->clock_samples = NULL;
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_uvc_h264_mjpg_demux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_DEVICE_FD:
+ self->priv->device_fd = g_value_get_int (value);
+ break;
+ case PROP_NUM_CLOCK_SAMPLES:
+ self->priv->num_clock_samples = g_value_get_int (value);
+ if (self->priv->clock_samples) {
+ if (self->priv->num_clock_samples) {
+ self->priv->clock_samples = g_realloc_n (self->priv->clock_samples,
+ self->priv->num_clock_samples, sizeof (GstUvcH264ClockSample));
+ if (self->priv->num_samples > self->priv->num_clock_samples) {
+ self->priv->num_samples = self->priv->num_clock_samples;
+ if (self->priv->last_sample >= self->priv->num_samples)
+ self->priv->last_sample = self->priv->num_samples - 1;
+ }
+ } else {
+ g_free (self->priv->clock_samples);
+ self->priv->clock_samples = NULL;
+ self->priv->last_sample = -1;
+ self->priv->num_samples = 0;
+ }
+ }
+ if (self->priv->num_clock_samples > 0) {
+ self->priv->clock_samples = g_malloc0_n (self->priv->num_clock_samples,
+ sizeof (GstUvcH264ClockSample));
+ self->priv->last_sample = -1;
+ self->priv->num_samples = 0;
+ }
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_uvc_h264_mjpg_demux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_DEVICE_FD:
+ g_value_set_int (value, self->priv->device_fd);
+ break;
+ case PROP_NUM_CLOCK_SAMPLES:
+ g_value_set_int (value, self->priv->num_clock_samples);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ break;
+ }
+}
+
+
+static gboolean
+gst_uvc_h264_mjpg_demux_sink_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (GST_OBJECT_PARENT (pad));
+
+ return gst_pad_set_caps (self->priv->jpeg_pad, caps);
+}
+
+static GstCaps *
+gst_uvc_h264_mjpg_demux_getcaps (GstPad * pad)
+{
+ GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (GST_OBJECT_PARENT (pad));
+ GstCaps *result = NULL;
+
+ if (pad == self->priv->jpeg_pad)
+ result = gst_pad_peer_get_caps (self->priv->sink_pad);
+ else if (pad == self->priv->sink_pad)
+ result = gst_pad_peer_get_caps (self->priv->jpeg_pad);
+
+ /* TODO: intersect with template and fixate caps */
+ if (result == NULL)
+ result = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
+
+ return result;
+}
+
+static gboolean
+_pts_to_timestamp (GstUvcH264MjpgDemux * self, GstBuffer * buf, guint32 pts)
+{
+ GstUvcH264MjpgDemuxPrivate *priv = self->priv;
+ GstUvcH264ClockSample *current_sample = NULL;
+ GstUvcH264ClockSample *oldest_sample = NULL;
+ guint32 next_sample;
+ struct uvc_last_scr_sample sample;
+ guint32 dev_sof;
+
+ if (self->priv->device_fd == -1 || priv->clock_samples == NULL)
+ return FALSE;
+
+ if (-1 == ioctl (priv->device_fd, UVCIOC_GET_LAST_SCR, &sample)) {
+ //GST_WARNING_OBJECT (self, " GET_LAST_SCR error");
+ return FALSE;
+ }
+
+ dev_sof = (guint32) (sample.dev_sof + 2048) << 16;
+ if (priv->num_samples > 0 &&
+ priv->clock_samples[priv->last_sample].dev_sof == dev_sof) {
+ current_sample = &priv->clock_samples[priv->last_sample];
+ } else {
+ next_sample = (priv->last_sample + 1) % priv->num_clock_samples;
+ current_sample = &priv->clock_samples[next_sample];
+ current_sample->dev_stc = sample.dev_stc;
+ current_sample->dev_sof = dev_sof;
+ current_sample->host_ts = sample.host_ts.tv_sec * GST_SECOND +
+ sample.host_ts.tv_nsec * GST_NSECOND;
+ current_sample->host_sof = (guint32) (sample.host_sof + 2048) << 16;
+
+ priv->num_samples++;
+ priv->last_sample = next_sample;
+
+ /* Debug printing */
+ GST_DEBUG_OBJECT (self, "device frequency: %u", sample.dev_frequency);
+ GST_DEBUG_OBJECT (self, "dev_sof: %u", sample.dev_sof);
+ GST_DEBUG_OBJECT (self, "dev_stc: %u", sample.dev_stc);
+ GST_DEBUG_OBJECT (self, "host_ts: %lu -- %" GST_TIME_FORMAT,
+ current_sample->host_ts, GST_TIME_ARGS (current_sample->host_ts));
+ GST_DEBUG_OBJECT (self, "host_sof: %u", sample.host_sof);
+ GST_DEBUG_OBJECT (self, "PTS: %u", pts);
+ GST_DEBUG_OBJECT (self, "Diff: %u - %f\n", sample.dev_stc - pts,
+ (gdouble) (sample.dev_stc - pts) / sample.dev_frequency);
+ }
+
+ if (priv->num_samples < priv->num_clock_samples)
+ return FALSE;
+
+ next_sample = (priv->last_sample + 1) % priv->num_clock_samples;
+ oldest_sample = &priv->clock_samples[next_sample];
+
+ /* TODO: Use current_sample and oldest_sample to do the
+ * double linear regression and calculate a new PTS */
+ (void) oldest_sample;
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_uvc_h264_mjpg_demux_chain (GstPad * pad, GstBuffer * buf)
+{
+ GstUvcH264MjpgDemux *self;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBufferList *jpeg_buf = gst_buffer_list_new ();
+ GstBufferListIterator *jpeg_it = gst_buffer_list_iterate (jpeg_buf);
+ GstBufferList *aux_buf = NULL;
+ GstBufferListIterator *aux_it = NULL;
+ AuxiliaryStreamHeader aux_header = { 0 };
+ GstBuffer *sub_buffer = NULL;
+ guint32 aux_size = 0;
+ GstPad *aux_pad = NULL;
+ GstCaps **aux_caps = NULL;
+ guint last_offset;
+ guint i;
+ guchar *data;
+ guint size;
+
+ self = GST_UVC_H264_MJPG_DEMUX (GST_PAD_PARENT (pad));
+
+ last_offset = 0;
+ data = GST_BUFFER_DATA (buf);
+ size = GST_BUFFER_SIZE (buf);
+ if (data == NULL || size == 0) {
+ ret = gst_pad_push (self->priv->jpeg_pad, buf);
+ goto done;
+ }
+
+ gst_buffer_list_iterator_add_group (jpeg_it);
+ for (i = 0; i < size - 1; i++) {
+ /* Check for APP4 (0xe4) marker in the jpeg */
+ if (data[i] == 0xff && data[i + 1] == 0xe4) {
+ guint16 segment_size;
+
+ /* Sanity check sizes and get segment size */
+ if (i + 4 >= size) {
+ GST_ELEMENT_ERROR (self, STREAM, DEMUX,
+ ("Not enough data to read marker size"), (NULL));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+ segment_size = GUINT16_FROM_BE (*((guint16 *) (data + i + 2)));
+
+ if (i + segment_size + 2 >= size) {
+ GST_ELEMENT_ERROR (self, STREAM, DEMUX,
+ ("Not enough data to read marker content"), (NULL));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+ GST_DEBUG_OBJECT (self,
+ "Found APP4 marker (%d). JPG: %d-%d - APP4: %d - %d", segment_size,
+ last_offset, i, i, i + 2 + segment_size);
+
+ /* Add JPEG data between the last offset and this market */
+ if (i - last_offset > 0) {
+ sub_buffer = gst_buffer_create_sub (buf, last_offset, i - last_offset);
+ gst_buffer_copy_metadata (sub_buffer, buf, GST_BUFFER_COPY_ALL);
+ gst_buffer_list_iterator_add (jpeg_it, sub_buffer);
+ }
+ last_offset = i + 2 + segment_size;
+
+ /* Reset i/segment size to the app4 data (ignore marker header/size) */
+ i += 4;
+ segment_size -= 2;
+
+ /* If this is a new auxiliary stream, initialize everything properly */
+ if (aux_buf == NULL) {
+ if (segment_size < sizeof (aux_header) + sizeof (aux_size)) {
+ GST_ELEMENT_ERROR (self, STREAM, DEMUX,
+ ("Not enough data to read aux header"), (NULL));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+
+ aux_header = *((AuxiliaryStreamHeader *) (data + i));
+ /* version should be little endian but it looks more like BE */
+ aux_header.version = GUINT16_FROM_BE (aux_header.version);
+ aux_header.header_len = GUINT16_FROM_LE (aux_header.header_len);
+ aux_header.width = GUINT16_FROM_LE (aux_header.width);
+ aux_header.height = GUINT16_FROM_LE (aux_header.height);
+ aux_header.frame_interval = GUINT32_FROM_LE (aux_header.frame_interval);
+ aux_header.delay = GUINT16_FROM_LE (aux_header.delay);
+ aux_header.pts = GUINT32_FROM_LE (aux_header.pts);
+ GST_DEBUG_OBJECT (self, "New auxiliary stream : v%d - %d bytes - %"
+ GST_FOURCC_FORMAT " %dx%d -- %d *100ns -- %d ms -- %d",
+ aux_header.version, aux_header.header_len,
+ GST_FOURCC_ARGS (aux_header.type),
+ aux_header.width, aux_header.height,
+ aux_header.frame_interval, aux_header.delay, aux_header.pts);
+ aux_size = *((guint32 *) (data + i + aux_header.header_len));
+ GST_DEBUG_OBJECT (self, "Auxiliary stream size : %d bytes", aux_size);
+
+ if (aux_size > 0) {
+ guint16 *width = NULL;
+ guint16 *height = NULL;
+
+ /* Find the auxiliary stream's pad and caps */
+ switch (aux_header.type) {
+ case GST_MAKE_FOURCC ('H', '2', '6', '4'):
+ aux_pad = self->priv->h264_pad;
+ aux_caps = &self->priv->h264_caps;
+ width = &self->priv->h264_width;
+ height = &self->priv->h264_height;
+ break;
+ case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
+ aux_pad = self->priv->yuy2_pad;
+ aux_caps = &self->priv->yuy2_caps;
+ width = &self->priv->yuy2_width;
+ height = &self->priv->yuy2_height;
+ break;
+ case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
+ aux_pad = self->priv->nv12_pad;
+ aux_caps = &self->priv->nv12_caps;
+ width = &self->priv->nv12_width;
+ height = &self->priv->nv12_height;
+ break;
+ default:
+ GST_ELEMENT_ERROR (self, STREAM, DEMUX,
+ ("Unknown auxiliary stream format : %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (aux_header.type)), (NULL));
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ if (ret != GST_FLOW_OK)
+ goto done;
+
+ if (*width != aux_header.width || *height != aux_header.height) {
+ GstCaps *peercaps = gst_pad_peer_get_caps (aux_pad);
+ GstStructure *s = NULL;
+ gint fps_num = 1000000000 / aux_header.frame_interval;
+ gint fps_den = 100;
+
+ /* TODO: intersect with pad template */
+ GST_DEBUG ("peercaps : %" GST_PTR_FORMAT, peercaps);
+ if (peercaps && !gst_caps_is_any (peercaps))
+ s = gst_caps_get_structure (peercaps, 0);
+ if (s) {
+ /* TODO: make sure it contains the right format/width/height */
+ gst_structure_fixate_field_nearest_fraction (s, "framerate",
+ fps_num, fps_den);
+ GST_DEBUG ("Fixated struct : %" GST_PTR_FORMAT, s);
+ gst_structure_get_fraction (s, "framerate", &fps_num, &fps_den);
+ }
+ if (peercaps)
+ gst_caps_unref (peercaps);
+
+ *width = aux_header.width;
+ *height = aux_header.height;
+ *aux_caps = gst_caps_make_writable (*aux_caps);
+ /* FIXME: fps must match the caps and be allowed and represent
+ our first buffer */
+ gst_caps_set_simple (*aux_caps,
+ "width", G_TYPE_INT, aux_header.width,
+ "height", G_TYPE_INT, aux_header.height,
+ "framerate", GST_TYPE_FRACTION, fps_num, fps_den, NULL);
+ if (!gst_pad_set_caps (aux_pad, *aux_caps)) {
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto done;
+ }
+ }
+
+ /* Create new auxiliary buffer list and adjust i/segment size */
+ aux_buf = gst_buffer_list_new ();
+ aux_it = gst_buffer_list_iterate (aux_buf);
+ gst_buffer_list_iterator_add_group (aux_it);
+ }
+
+ i += sizeof (aux_header) + sizeof (aux_size);
+ segment_size -= sizeof (aux_header) + sizeof (aux_size);
+ }
+
+ if (segment_size > aux_size) {
+ GST_ELEMENT_ERROR (self, STREAM, DEMUX,
+ ("Expected %d auxiliary data, got %d bytes", aux_size,
+ segment_size), (NULL));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+
+ if (segment_size > 0) {
+ sub_buffer = gst_buffer_create_sub (buf, i, segment_size);
+ GST_BUFFER_DURATION (sub_buffer) =
+ aux_header.frame_interval * 100 * GST_NSECOND;
+ gst_buffer_copy_metadata (sub_buffer, buf, GST_BUFFER_COPY_TIMESTAMPS);
+ gst_buffer_set_caps (sub_buffer, *aux_caps);
+
+ _pts_to_timestamp (self, sub_buffer, aux_header.pts);
+
+ gst_buffer_list_iterator_add (aux_it, sub_buffer);
+
+ aux_size -= segment_size;
+
+ /* Push completed aux data */
+ if (aux_size == 0) {
+ gst_buffer_list_iterator_free (aux_it);
+ aux_it = NULL;
+ GST_DEBUG_OBJECT (self, "Pushing %" GST_FOURCC_FORMAT
+ " auxiliary buffer %" GST_PTR_FORMAT,
+ GST_FOURCC_ARGS (aux_header.type), *aux_caps);
+ ret = gst_pad_push_list (aux_pad, aux_buf);
+ aux_buf = NULL;
+ if (ret != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (self, "Error pushing %" GST_FOURCC_FORMAT
+ " auxiliary data", GST_FOURCC_ARGS (aux_header.type));
+ goto done;
+ }
+ }
+ }
+
+ i += segment_size - 1;
+ } else if (data[i] == 0xff && data[i + 1] == 0xda) {
+
+ /* The APP4 markers must be before the SOS marker, so this is the end */
+ GST_DEBUG_OBJECT (self, "Found SOS marker.");
+
+ sub_buffer = gst_buffer_create_sub (buf, last_offset, size - last_offset);
+ gst_buffer_copy_metadata (sub_buffer, buf, GST_BUFFER_COPY_ALL);
+ gst_buffer_list_iterator_add (jpeg_it, sub_buffer);
+ last_offset = size;
+ break;
+ }
+ }
+ gst_buffer_list_iterator_free (jpeg_it);
+ jpeg_it = NULL;
+
+ if (aux_buf != NULL) {
+ GST_ELEMENT_ERROR (self, STREAM, DEMUX,
+ ("Incomplete auxiliary stream. %d bytes missing", aux_size), (NULL));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+
+ if (last_offset != size) {
+ /* this means there was no SOS marker in the jpg, so we assume the JPG was
+ just a container */
+ GST_DEBUG_OBJECT (self, "SOS marker wasn't found. MJPG is container only");
+ gst_buffer_list_unref (jpeg_buf);
+ jpeg_buf = NULL;
+ } else {
+ ret = gst_pad_push_list (self->priv->jpeg_pad, jpeg_buf);
+ jpeg_buf = NULL;
+ }
+
+ if (ret != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (self, "Error pushing jpeg data");
+ goto done;
+ }
+
+done:
+ /* In case of error, unref whatever was left */
+ if (aux_it)
+ gst_buffer_list_iterator_free (aux_it);
+ if (aux_buf)
+ gst_buffer_list_unref (aux_buf);
+ if (jpeg_it)
+ gst_buffer_list_iterator_free (jpeg_it);
+ if (jpeg_buf)
+ gst_buffer_list_unref (jpeg_buf);
+
+ /* We must always unref the input buffer since we never push it out */
+ gst_buffer_unref (buf);
+
+ return ret;
+}
diff --git a/sys/uvch264/gstuvch264_mjpgdemux.h b/sys/uvch264/gstuvch264_mjpgdemux.h
new file mode 100644
index 000000000..8c4445201
--- /dev/null
+++ b/sys/uvch264/gstuvch264_mjpgdemux.h
@@ -0,0 +1,66 @@
+/* GStreamer
+ *
+ * uvch264_mjpg_demux: a demuxer for muxed stream in UVC H264 compliant MJPG
+ *
+ * Copyright (C) 2012 Cisco Systems, Inc.
+ * Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_UVC_H264_MJPG_DEMUX_H__
+#define __GST_UVC_H264_MJPG_DEMUX_H__
+
+#include <gst/gst.h>
+
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_UVC_H264_MJPG_DEMUX \
+ (gst_uvc_h264_mjpg_demux_get_type())
+#define GST_UVC_H264_MJPG_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), \
+ GST_TYPE_UVC_H264_MJPG_DEMUX, \
+ GstUvcH264MjpgDemux))
+#define GST_UVC_H264_MJPG_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), \
+ GST_TYPE_UVC_H264_MJPG_DEMUX, \
+ GstUvcH264MjpgDemuxClass))
+#define GST_IS_UVC_H264_MJPG_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), \
+ GST_TYPE_UVC_H264_MJPG_DEMUX))
+#define GST_IS_UVC_H264_MJPG_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), \
+ GST_TYPE_UVC_H264_MJPG_DEMUX))
+
+typedef struct _GstUvcH264MjpgDemux GstUvcH264MjpgDemux;
+typedef struct _GstUvcH264MjpgDemuxPrivate GstUvcH264MjpgDemuxPrivate;
+typedef struct _GstUvcH264MjpgDemuxClass GstUvcH264MjpgDemuxClass;
+
+struct _GstUvcH264MjpgDemux {
+ GstElement element;
+ GstUvcH264MjpgDemuxPrivate *priv;
+};
+
+struct _GstUvcH264MjpgDemuxClass {
+ GstElementClass parent_class;
+};
+
+GType gst_uvc_h264_mjpg_demux_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_UVC_H264_MJPG_DEMUX_H__ */
diff --git a/sys/uvch264/gstuvch264_src.c b/sys/uvch264/gstuvch264_src.c
new file mode 100644
index 000000000..69555d3e1
--- /dev/null
+++ b/sys/uvch264/gstuvch264_src.c
@@ -0,0 +1,3180 @@
+/*
+ * GStreamer
+ *
+ * Copyright (C) 2012 Cisco Systems, Inc.
+ * Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+/**
+ * SECTION:element-uvch264-src
+ *
+ * A camera bin src element that wraps v4l2src and implements UVC H264
+ * Extension Units (XU) to control the H264 encoder in the camera
+ */
+
+#ifdef HAVE_CONFIG_H
+# include <config.h>
+#endif
+
+#include <gst/video/video.h>
+#include <linux/uvcvideo.h>
+#include <linux/usb/video.h>
+#include <sys/ioctl.h>
+#include <string.h>
+
+#if defined (HAVE_GUDEV) && defined (HAVE_LIBUSB)
+#include <gudev/gudev.h>
+#include <libusb.h>
+
+typedef struct
+{
+ int8_t bLength;
+ int8_t bDescriptorType;
+ int8_t bDescriptorSubType;
+ int8_t bUnitID;
+ uint8_t guidExtensionCode[16];
+} __attribute__ ((__packed__)) xu_descriptor;
+
+#define GUID_FORMAT "02X%02X%02X%02X-%02X%02X%02X%02X-"\
+ "%02X%02X%02X%02X-%02X%02X%02X%02X"
+#define GUID_ARGS(guid) guid[0], guid[1], guid[2], guid[3], \
+ guid[4], guid[5], guid[6], guid[7], \
+ guid[8], guid[9], guid[10], guid[11], \
+ guid[12], guid[13], guid[14], guid[15]
+
+#define USB_VIDEO_CONTROL 1
+#define USB_VIDEO_CONTROL_INTERFACE 0x24
+#define USB_VIDEO_CONTROL_XU_TYPE 0x06
+#endif
+
+#include "gstuvch264_src.h"
+#include "gstuvch264-marshal.h"
+
+#ifndef UVCIOC_XU_FIND_UNIT
+/* Define the needed structure if <linux/uvcvideo.h> is too old.
+ * This might fail though if the kernel itself does not support it.
+ */
+struct uvc_xu_find_unit
+{
+ __u8 guid[16];
+ __u8 unit;
+};
+#define UVCIOC_XU_FIND_UNIT _IOWR('u', 0x22, struct uvc_xu_find_unit)
+#endif
+
+
+enum
+{
+ PROP_0,
+ /* uvch264_src properties */
+ PROP_COLORSPACE_NAME,
+ PROP_JPEG_DECODER_NAME,
+ PROP_NUM_CLOCK_SAMPLES,
+ /* v4l2src properties */
+ PROP_NUM_BUFFERS,
+ PROP_DEVICE,
+ PROP_DEVICE_NAME,
+ /* Static controls */
+ PROP_INITIAL_BITRATE,
+ PROP_SLICE_UNITS,
+ PROP_SLICE_MODE,
+ PROP_IFRAME_PERIOD,
+ PROP_USAGE_TYPE,
+ PROP_ENTROPY,
+ PROP_ENABLE_SEI,
+ PROP_NUM_REORDER_FRAMES,
+ PROP_PREVIEW_FLIPPED,
+ PROP_LEAKY_BUCKET_SIZE,
+ /* Dynamic controls */
+ PROP_RATE_CONTROL,
+ PROP_FIXED_FRAMERATE,
+ PROP_MAX_MBPS, /* read-only */
+ PROP_LEVEL_IDC,
+ PROP_PEAK_BITRATE,
+ PROP_AVERAGE_BITRATE,
+ PROP_MIN_IFRAME_QP,
+ PROP_MAX_IFRAME_QP,
+ PROP_MIN_PFRAME_QP,
+ PROP_MAX_PFRAME_QP,
+ PROP_MIN_BFRAME_QP,
+ PROP_MAX_BFRAME_QP,
+ PROP_LTR_BUFFER_SIZE,
+ PROP_LTR_ENCODER_CONTROL,
+};
+/* In caps : frame interval (fps), width, height, profile, mux */
+/* Ignored: temporal, spatial, SNR, MVC views, version, reset */
+/* Events: LTR, generate IDR */
+
+enum
+{
+ /* action signals */
+ SIGNAL_GET_ENUM_SETTING,
+ SIGNAL_GET_BOOLEAN_SETTING,
+ SIGNAL_GET_INT_SETTING,
+ LAST_SIGNAL
+};
+
+static guint _signals[LAST_SIGNAL];
+
+/* Default values */
+#define DEFAULT_COLORSPACE_NAME "ffmpegcolorspace"
+#define DEFAULT_JPEG_DECODER_NAME "jpegdec"
+#define DEFAULT_NUM_CLOCK_SAMPLES 0
+#define DEFAULT_NUM_BUFFERS -1
+#define DEFAULT_DEVICE "/dev/video0"
+#define DEFAULT_DEVICE_NAME NULL
+#define DEFAULT_INITIAL_BITRATE 3000000
+#define DEFAULT_SLICE_UNITS 4
+#define DEFAULT_SLICE_MODE UVC_H264_SLICEMODE_SLICEPERFRAME
+#define DEFAULT_IFRAME_PERIOD 10000
+#define DEFAULT_USAGE_TYPE UVC_H264_USAGETYPE_REALTIME
+#define DEFAULT_ENTROPY UVC_H264_ENTROPY_CAVLC
+#define DEFAULT_ENABLE_SEI FALSE
+#define DEFAULT_NUM_REORDER_FRAMES 0
+#define DEFAULT_PREVIEW_FLIPPED FALSE
+#define DEFAULT_LEAKY_BUCKET_SIZE 1000
+#define DEFAULT_RATE_CONTROL UVC_H264_RATECONTROL_CBR
+#define DEFAULT_FIXED_FRAMERATE FALSE
+#define DEFAULT_LEVEL_IDC 40
+#define DEFAULT_PEAK_BITRATE DEFAULT_INITIAL_BITRATE
+#define DEFAULT_AVERAGE_BITRATE DEFAULT_INITIAL_BITRATE
+#define DEFAULT_MIN_QP 10
+#define DEFAULT_MAX_QP 46
+#define DEFAULT_LTR_BUFFER_SIZE 0
+#define DEFAULT_LTR_ENCODER_CONTROL 0
+
+#define NSEC_PER_SEC (G_USEC_PER_SEC * 1000)
+
+
+GST_DEBUG_CATEGORY (uvc_h264_src_debug);
+#define GST_CAT_DEFAULT uvc_h264_src_debug
+
+GST_BOILERPLATE (GstUvcH264Src, gst_uvc_h264_src,
+ GstBaseCameraSrc, GST_TYPE_BASE_CAMERA_SRC);
+
+#define GST_UVC_H264_SRC_VF_CAPS_STR \
+ GST_VIDEO_CAPS_RGB ";" \
+ GST_VIDEO_CAPS_RGB";" \
+ GST_VIDEO_CAPS_BGR";" \
+ GST_VIDEO_CAPS_RGBx";" \
+ GST_VIDEO_CAPS_xRGB";" \
+ GST_VIDEO_CAPS_BGRx";" \
+ GST_VIDEO_CAPS_xBGR";" \
+ GST_VIDEO_CAPS_RGBA";" \
+ GST_VIDEO_CAPS_ARGB";" \
+ GST_VIDEO_CAPS_BGRA";" \
+ GST_VIDEO_CAPS_ABGR";" \
+ GST_VIDEO_CAPS_RGB_16";" \
+ GST_VIDEO_CAPS_RGB_15";" \
+ "video/x-raw-rgb, bpp = (int)8, depth = (int)8, " \
+ "width = "GST_VIDEO_SIZE_RANGE" , " \
+ "height = " GST_VIDEO_SIZE_RANGE ", " \
+ "framerate = "GST_VIDEO_FPS_RANGE ";" \
+ GST_VIDEO_CAPS_GRAY8";" \
+ GST_VIDEO_CAPS_GRAY16("BIG_ENDIAN")";" \
+ GST_VIDEO_CAPS_GRAY16("LITTLE_ENDIAN")";" \
+ GST_VIDEO_CAPS_YUV ("{ I420 , NV12 , NV21 , YV12 , YUY2 ," \
+ " Y42B , Y444 , YUV9 , YVU9 , Y41B , Y800 , Y8 , GREY ," \
+ " Y16 , UYVY , YVYU , IYU1 , v308 , AYUV, A420}") ";" \
+ "image/jpeg, " \
+ "width = " GST_VIDEO_SIZE_RANGE ", " \
+ "height = " GST_VIDEO_SIZE_RANGE ", " \
+ "framerate = " GST_VIDEO_FPS_RANGE
+
+#define GST_UVC_H264_SRC_VID_CAPS_STR \
+ GST_UVC_H264_SRC_VF_CAPS_STR ";" \
+ "video/x-h264, " \
+ "width = " GST_VIDEO_SIZE_RANGE ", " \
+ "height = " GST_VIDEO_SIZE_RANGE ", " \
+ "framerate = " GST_VIDEO_FPS_RANGE ", " \
+ "stream-format = (string) { byte-stream, avc }, " \
+ "alignment = (string) { au }, " \
+ "profile = (string) { high, main, baseline, constrained-baseline }"
+
+static GstStaticPadTemplate vfsrc_template =
+GST_STATIC_PAD_TEMPLATE (GST_BASE_CAMERA_SRC_VIEWFINDER_PAD_NAME,
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_UVC_H264_SRC_VF_CAPS_STR));
+
+static GstStaticPadTemplate imgsrc_template =
+GST_STATIC_PAD_TEMPLATE (GST_BASE_CAMERA_SRC_IMAGE_PAD_NAME,
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_NONE);
+
+static GstStaticPadTemplate vidsrc_template =
+GST_STATIC_PAD_TEMPLATE (GST_BASE_CAMERA_SRC_VIDEO_PAD_NAME,
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_UVC_H264_SRC_VID_CAPS_STR));
+
+
+static void gst_uvc_h264_src_dispose (GObject * object);
+static void gst_uvc_h264_src_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_uvc_h264_src_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+static gboolean gst_uvc_h264_src_event (GstPad * pad, GstEvent * event);
+static gboolean gst_uvc_h264_src_send_event (GstElement * element,
+ GstEvent * event);
+static gboolean gst_uvc_h264_src_construct_pipeline (GstBaseCameraSrc *
+ bcamsrc);
+static gboolean gst_uvc_h264_src_set_mode (GstBaseCameraSrc * bcamsrc,
+ GstCameraBinMode mode);
+static gboolean gst_uvc_h264_src_start_capture (GstBaseCameraSrc * camerasrc);
+static void gst_uvc_h264_src_stop_capture (GstBaseCameraSrc * camerasrc);
+static GstStateChangeReturn gst_uvc_h264_src_change_state (GstElement * element,
+ GstStateChange trans);
+static gboolean gst_uvc_h264_src_buffer_probe (GstPad * pad,
+ GstBuffer * buffer, gpointer user_data);
+static gboolean gst_uvc_h264_src_event_probe (GstPad * pad,
+ GstEvent * event, gpointer user_data);
+static void gst_uvc_h264_src_pad_linking_cb (GstPad * pad,
+ GstPad * peer, gpointer user_data);
+static GstCaps *gst_uvc_h264_src_getcaps (GstPad * pad);
+
+
+static void v4l2src_prepare_format (GstElement * v4l2src, gint fd, guint fourcc,
+ guint width, guint height, gpointer user_data);
+static void fill_probe_commit (GstUvcH264Src * self,
+ uvcx_video_config_probe_commit_t * probe, guint32 frame_interval,
+ guint32 width, guint32 height, guint32 profile,
+ UvcH264StreamFormat stream_format);
+static gboolean xu_query (GstUvcH264Src * self, guint selector, guint query,
+ guchar * data);
+
+static void set_rate_control (GstUvcH264Src * self);
+static void set_level_idc (GstUvcH264Src * self);
+static void set_bitrate (GstUvcH264Src * self);
+static void set_qp (GstUvcH264Src * self, gint type);
+static void set_ltr (GstUvcH264Src * self);
+static void update_rate_control (GstUvcH264Src * self);
+static guint32 update_level_idc_and_get_max_mbps (GstUvcH264Src * self);
+static void update_bitrate (GstUvcH264Src * self);
+static gboolean update_qp (GstUvcH264Src * self, gint type);
+static void update_ltr (GstUvcH264Src * self);
+
+static gboolean gst_uvc_h264_src_get_enum_setting (GstUvcH264Src * self,
+ gchar * property, gint * mask, gint * default_value);
+static gboolean gst_uvc_h264_src_get_boolean_setting (GstUvcH264Src * self,
+ gchar * property, gboolean * changeable, gboolean * def);
+static gboolean gst_uvc_h264_src_get_int_setting (GstUvcH264Src * self,
+ gchar * property, gint * min, gint * def, gint * max);
+
+static void
+gst_uvc_h264_src_base_init (gpointer g_class)
+{
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
+ GstPadTemplate *pt;
+
+ GST_DEBUG_CATEGORY_INIT (uvc_h264_src_debug, "uvch264_src",
+ 0, "UVC H264 Compliant camera bin source");
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "UVC H264 Source",
+ "Source/Video",
+ "UVC H264 Encoding camera source",
+ "Youness Alaoui <youness.alaoui@collabora.co.uk>");
+
+ /* Don't use gst_element_class_add_static_pad_template in order to keep
+ * the plugin compatible with gst 0.10.35 */
+ pt = gst_static_pad_template_get (&vidsrc_template);
+ gst_element_class_add_pad_template (gstelement_class, pt);
+ gst_object_unref (pt);
+
+ pt = gst_static_pad_template_get (&imgsrc_template);
+ gst_element_class_add_pad_template (gstelement_class, pt);
+ gst_object_unref (pt);
+
+ pt = gst_static_pad_template_get (&vfsrc_template);
+ gst_element_class_add_pad_template (gstelement_class, pt);
+ gst_object_unref (pt);
+}
+
+static void
+gst_uvc_h264_src_class_init (GstUvcH264SrcClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBaseCameraSrcClass *gstbasecamerasrc_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+ gstbasecamerasrc_class = GST_BASE_CAMERA_SRC_CLASS (klass);
+
+ gobject_class->dispose = gst_uvc_h264_src_dispose;
+ gobject_class->set_property = gst_uvc_h264_src_set_property;
+ gobject_class->get_property = gst_uvc_h264_src_get_property;
+
+ gstelement_class->change_state = gst_uvc_h264_src_change_state;
+ gstelement_class->send_event = gst_uvc_h264_src_send_event;
+
+ gstbasecamerasrc_class->construct_pipeline =
+ gst_uvc_h264_src_construct_pipeline;
+ gstbasecamerasrc_class->set_mode = gst_uvc_h264_src_set_mode;
+ gstbasecamerasrc_class->start_capture = gst_uvc_h264_src_start_capture;
+ gstbasecamerasrc_class->stop_capture = gst_uvc_h264_src_stop_capture;
+
+ /* Properties */
+ g_object_class_install_property (gobject_class, PROP_COLORSPACE_NAME,
+ g_param_spec_string ("colorspace-name", "colorspace element name",
+ "The name of the colorspace element",
+ DEFAULT_COLORSPACE_NAME, G_PARAM_CONSTRUCT | G_PARAM_READWRITE |
+ GST_PARAM_MUTABLE_READY | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_JPEG_DECODER_NAME,
+ g_param_spec_string ("jpeg-decoder-name", "jpeg decoder element name",
+ "The name of the jpeg decoder element",
+ DEFAULT_JPEG_DECODER_NAME, G_PARAM_CONSTRUCT | G_PARAM_READWRITE |
+ GST_PARAM_MUTABLE_READY | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NUM_CLOCK_SAMPLES,
+ g_param_spec_int ("num-clock-samples", "num-clock-samples",
+ "Number of clock samples to gather for the PTS synchronization"
+ " (-1 = unlimited)",
+ 0, G_MAXINT, DEFAULT_NUM_CLOCK_SAMPLES,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | GST_PARAM_MUTABLE_PLAYING |
+ G_PARAM_STATIC_STRINGS));
+
+ /* v4l2src proxied properties */
+ g_object_class_install_property (gobject_class, PROP_NUM_BUFFERS,
+ g_param_spec_int ("num-buffers", "num-buffers",
+ "Number of buffers to output before sending EOS (-1 = unlimited)",
+ -1, G_MAXINT, DEFAULT_NUM_BUFFERS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DEVICE,
+ g_param_spec_string ("device", "device",
+ "Device location",
+ DEFAULT_DEVICE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
+ g_param_spec_string ("device-name", "Device name",
+ "Name of the device", DEFAULT_DEVICE_NAME,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ /* Static controls */
+ g_object_class_install_property (gobject_class, PROP_INITIAL_BITRATE,
+ g_param_spec_uint ("initial-bitrate", "Initial bitrate",
+ "Initial bitrate in bits/second (static control)",
+ 0, G_MAXUINT, DEFAULT_INITIAL_BITRATE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_SLICE_UNITS,
+ g_param_spec_uint ("slice-units", "Slice units",
+ "Slice units (static control)",
+ 0, G_MAXUINT16, DEFAULT_SLICE_UNITS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_SLICE_MODE,
+ g_param_spec_enum ("slice-mode", "Slice mode",
+ "Defines the unit of the slice-units property (static control)",
+ UVC_H264_SLICEMODE_TYPE,
+ DEFAULT_SLICE_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_IFRAME_PERIOD,
+ g_param_spec_uint ("iframe-period", "I Frame Period",
+ "Time between IDR frames in milliseconds (static control)",
+ 0, G_MAXUINT16, DEFAULT_IFRAME_PERIOD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_USAGE_TYPE,
+ g_param_spec_enum ("usage-type", "Usage type",
+ "The usage type (static control)",
+ UVC_H264_USAGETYPE_TYPE, DEFAULT_USAGE_TYPE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_ENTROPY,
+ g_param_spec_enum ("entropy", "Entropy",
+ "Entropy (static control)",
+ UVC_H264_ENTROPY_TYPE, DEFAULT_ENTROPY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_ENABLE_SEI,
+ g_param_spec_boolean ("enable-sei", "Enable SEI",
+ "Enable SEI picture timing (static control)",
+ DEFAULT_ENABLE_SEI, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_NUM_REORDER_FRAMES,
+ g_param_spec_uint ("num-reorder-frames", "Number of Reorder frames",
+ "Number of B frames between the references frames (static control)",
+ 0, G_MAXUINT8, DEFAULT_NUM_REORDER_FRAMES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_PREVIEW_FLIPPED,
+ g_param_spec_boolean ("preview-flipped", "Flip preview",
+ "Horizontal flipped image for non H.264 streams (static control)",
+ DEFAULT_PREVIEW_FLIPPED, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_LEAKY_BUCKET_SIZE,
+ g_param_spec_uint ("leaky-bucket-size", "Size of the leaky bucket size",
+ "Size of the leaky bucket size in milliseconds (static control)",
+ 0, G_MAXUINT16, DEFAULT_LEAKY_BUCKET_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+
+ /* Dynamic controls */
+ g_object_class_install_property (gobject_class, PROP_RATE_CONTROL,
+ g_param_spec_enum ("rate-control", "Rate control",
+ "Rate control mode (static & dynamic control)",
+ UVC_H264_RATECONTROL_TYPE, DEFAULT_RATE_CONTROL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_FIXED_FRAMERATE,
+ g_param_spec_boolean ("fixed-framerate", "Fixed framerate",
+ "Fixed framerate (static & dynamic control)",
+ DEFAULT_FIXED_FRAMERATE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MAX_MBPS,
+ g_param_spec_uint ("max-mbps", "Max macroblocks/second",
+ "The number of macroblocks per second for the maximum processing rate",
+ 0, G_MAXUINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_LEVEL_IDC,
+ g_param_spec_uint ("level-idc", "Level IDC",
+ "Level IDC (dynamic control)",
+ 0, G_MAXUINT8, DEFAULT_LEVEL_IDC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_PEAK_BITRATE,
+ g_param_spec_uint ("peak-bitrate", "Peak bitrate",
+ "The peak bitrate in bits/second (dynamic control)",
+ 0, G_MAXUINT, DEFAULT_PEAK_BITRATE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_AVERAGE_BITRATE,
+ g_param_spec_uint ("average-bitrate", "Average bitrate",
+ "The average bitrate in bits/second (dynamic control)",
+ 0, G_MAXUINT, DEFAULT_AVERAGE_BITRATE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MIN_IFRAME_QP,
+ g_param_spec_int ("min-iframe-qp", "Minimum I frame QP",
+ "The minimum Quantization step size for I frames (dynamic control)",
+ -G_MAXINT8, G_MAXINT8, DEFAULT_MIN_QP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MAX_IFRAME_QP,
+ g_param_spec_int ("max-iframe-qp", "Minimum I frame QP",
+ "The minimum Quantization step size for I frames (dynamic control)",
+ -G_MAXINT8, G_MAXINT8, DEFAULT_MAX_QP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MIN_PFRAME_QP,
+ g_param_spec_int ("min-pframe-qp", "Minimum P frame QP",
+ "The minimum Quantization step size for P frames (dynamic control)",
+ -G_MAXINT8, G_MAXINT8, DEFAULT_MIN_QP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MAX_PFRAME_QP,
+ g_param_spec_int ("max-pframe-qp", "Minimum P frame QP",
+ "The minimum Quantization step size for P frames (dynamic control)",
+ -G_MAXINT8, G_MAXINT8, DEFAULT_MAX_QP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MIN_BFRAME_QP,
+ g_param_spec_int ("min-bframe-qp", "Minimum B frame QP",
+ "The minimum Quantization step size for B frames (dynamic control)",
+ -G_MAXINT8, G_MAXINT8, DEFAULT_MIN_QP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MAX_BFRAME_QP,
+ g_param_spec_int ("max-bframe-qp", "Minimum B frame QP",
+ "The minimum Quantization step size for B frames (dynamic control)",
+ -G_MAXINT8, G_MAXINT8, DEFAULT_MAX_QP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_LTR_BUFFER_SIZE,
+ g_param_spec_int ("ltr-buffer-size", "LTR Buffer size",
+ "Total number of Long-Term Reference frames (dynamic control)",
+ 0, G_MAXUINT8, DEFAULT_LTR_BUFFER_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_LTR_ENCODER_CONTROL,
+ g_param_spec_int ("ltr-encoder-control", "LTR frames controled by device",
+ "Number of LTR frames the device can control (dynamic control)",
+ 0, G_MAXUINT8, DEFAULT_LTR_ENCODER_CONTROL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+
+ _signals[SIGNAL_GET_ENUM_SETTING] =
+ g_signal_new_class_handler ("get-enum-setting",
+ G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_uvc_h264_src_get_enum_setting),
+ NULL, NULL, __gst_uvc_h264_marshal_BOOLEAN__STRING_POINTER_POINTER,
+ G_TYPE_BOOLEAN, 3, G_TYPE_STRING, G_TYPE_POINTER, G_TYPE_POINTER, 0);
+ _signals[SIGNAL_GET_BOOLEAN_SETTING] =
+ g_signal_new_class_handler ("get-boolean-setting",
+ G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_uvc_h264_src_get_boolean_setting), NULL, NULL,
+ __gst_uvc_h264_marshal_BOOLEAN__STRING_POINTER_POINTER,
+ G_TYPE_BOOLEAN, 3, G_TYPE_STRING, G_TYPE_POINTER, G_TYPE_POINTER, 0);
+ _signals[SIGNAL_GET_INT_SETTING] =
+ g_signal_new_class_handler ("get-int-setting",
+ G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_uvc_h264_src_get_int_setting), NULL, NULL,
+ __gst_uvc_h264_marshal_BOOLEAN__STRING_POINTER_POINTER_POINTER,
+ G_TYPE_BOOLEAN, 4, G_TYPE_STRING, G_TYPE_POINTER, G_TYPE_POINTER,
+ G_TYPE_POINTER, 0);
+
+}
+
+static void
+gst_uvc_h264_src_init (GstUvcH264Src * self, GstUvcH264SrcClass * klass)
+{
+ self->vfsrc =
+ gst_ghost_pad_new_no_target (GST_BASE_CAMERA_SRC_VIEWFINDER_PAD_NAME,
+ GST_PAD_SRC);
+ gst_pad_set_getcaps_function (self->vfsrc,
+ GST_DEBUG_FUNCPTR (gst_uvc_h264_src_getcaps));
+ gst_element_add_pad (GST_ELEMENT (self), self->vfsrc);
+
+ self->imgsrc =
+ gst_ghost_pad_new_no_target (GST_BASE_CAMERA_SRC_IMAGE_PAD_NAME,
+ GST_PAD_SRC);
+ gst_element_add_pad (GST_ELEMENT (self), self->imgsrc);
+
+ self->vidsrc =
+ gst_ghost_pad_new_no_target (GST_BASE_CAMERA_SRC_VIDEO_PAD_NAME,
+ GST_PAD_SRC);
+ gst_pad_set_getcaps_function (self->vidsrc,
+ GST_DEBUG_FUNCPTR (gst_uvc_h264_src_getcaps));
+ gst_element_add_pad (GST_ELEMENT (self), self->vidsrc);
+ gst_pad_add_buffer_probe (self->vidsrc,
+ (GCallback) gst_uvc_h264_src_buffer_probe, self);
+ gst_pad_add_event_probe (self->vfsrc,
+ (GCallback) gst_uvc_h264_src_event_probe, self);
+ gst_pad_add_event_probe (self->vidsrc,
+ (GCallback) gst_uvc_h264_src_event_probe, self);
+
+ self->srcpad_event_func = GST_PAD_EVENTFUNC (self->vfsrc);
+
+ gst_pad_set_event_function (self->imgsrc, gst_uvc_h264_src_event);
+ gst_pad_set_event_function (self->vidsrc, gst_uvc_h264_src_event);
+ gst_pad_set_event_function (self->vfsrc, gst_uvc_h264_src_event);
+
+ g_signal_connect (self->vidsrc, "linked",
+ (GCallback) gst_uvc_h264_src_pad_linking_cb, self);
+ g_signal_connect (self->vidsrc, "unlinked",
+ (GCallback) gst_uvc_h264_src_pad_linking_cb, self);
+ g_signal_connect (self->vfsrc, "linked",
+ (GCallback) gst_uvc_h264_src_pad_linking_cb, self);
+ g_signal_connect (self->vfsrc, "unlinked",
+ (GCallback) gst_uvc_h264_src_pad_linking_cb, self);
+
+ self->vid_newseg = FALSE;
+ self->vf_newseg = FALSE;
+ self->v4l2_fd = -1;
+ gst_base_camera_src_set_mode (GST_BASE_CAMERA_SRC (self), MODE_VIDEO);
+
+ self->main_format = UVC_H264_SRC_FORMAT_NONE;
+ self->main_width = 0;
+ self->main_height = 0;
+ self->main_frame_interval = 0;
+ self->main_stream_format = UVC_H264_STREAMFORMAT_ANNEXB;
+ self->main_profile = UVC_H264_PROFILE_CONSTRAINED_BASELINE;
+ self->secondary_format = UVC_H264_SRC_FORMAT_NONE;
+ self->secondary_width = 0;
+ self->secondary_height = 0;
+ self->secondary_frame_interval = 0;
+
+ /* v4l2src properties */
+ self->num_buffers = DEFAULT_NUM_BUFFERS;
+ self->device = g_strdup (DEFAULT_DEVICE);
+
+ /* Static controls */
+ self->initial_bitrate = DEFAULT_INITIAL_BITRATE;
+ self->slice_units = DEFAULT_SLICE_UNITS;
+ self->slice_mode = DEFAULT_SLICE_MODE;
+ self->iframe_period = DEFAULT_IFRAME_PERIOD;
+ self->usage_type = DEFAULT_USAGE_TYPE;
+ self->entropy = DEFAULT_ENTROPY;
+ self->enable_sei = DEFAULT_ENABLE_SEI;
+ self->num_reorder_frames = DEFAULT_NUM_REORDER_FRAMES;
+ self->preview_flipped = DEFAULT_PREVIEW_FLIPPED;
+ self->leaky_bucket_size = DEFAULT_LEAKY_BUCKET_SIZE;
+
+ /* Dynamic controls */
+ self->rate_control = DEFAULT_RATE_CONTROL;
+ self->fixed_framerate = DEFAULT_FIXED_FRAMERATE;
+ self->level_idc = DEFAULT_LEVEL_IDC;
+ self->peak_bitrate = DEFAULT_PEAK_BITRATE;
+ self->average_bitrate = DEFAULT_AVERAGE_BITRATE;
+ self->min_qp[QP_I_FRAME] = DEFAULT_MIN_QP;
+ self->max_qp[QP_I_FRAME] = DEFAULT_MAX_QP;
+ self->min_qp[QP_P_FRAME] = DEFAULT_MIN_QP;
+ self->max_qp[QP_P_FRAME] = DEFAULT_MAX_QP;
+ self->min_qp[QP_B_FRAME] = DEFAULT_MIN_QP;
+ self->max_qp[QP_B_FRAME] = DEFAULT_MAX_QP;
+ self->ltr_buffer_size = DEFAULT_LTR_BUFFER_SIZE;
+ self->ltr_encoder_control = DEFAULT_LTR_ENCODER_CONTROL;
+}
+
+static void
+gst_uvc_h264_src_dispose (GObject * object)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (object);
+
+#if defined (HAVE_GUDEV) && defined (HAVE_LIBUSB)
+ if (self->usb_ctx)
+ libusb_exit (self->usb_ctx);
+ self->usb_ctx = NULL;
+#else
+ (void) self;
+#endif
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_uvc_h264_src_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (object);
+
+ switch (prop_id) {
+ case PROP_COLORSPACE_NAME:
+ g_free (self->colorspace_name);
+ self->colorspace_name = g_value_dup_string (value);
+ break;
+ case PROP_JPEG_DECODER_NAME:
+ g_free (self->jpeg_decoder_name);
+ self->jpeg_decoder_name = g_value_dup_string (value);
+ break;
+ case PROP_NUM_CLOCK_SAMPLES:
+ self->num_clock_samples = g_value_get_int (value);
+ if (self->mjpg_demux)
+ g_object_set (self->mjpg_demux,
+ "num-clock-samples", self->num_clock_samples, NULL);
+ break;
+ /* v4l2 properties */
+ case PROP_NUM_BUFFERS:
+ self->num_buffers = g_value_get_int (value);
+ if (self->v4l2_src)
+ g_object_set_property (G_OBJECT (self->v4l2_src), "num-buffers", value);
+ break;
+ case PROP_DEVICE:
+ g_free (self->device);
+ self->device = g_value_dup_string (value);
+ if (self->v4l2_src)
+ g_object_set_property (G_OBJECT (self->v4l2_src), "device", value);
+ break;
+ /* Static controls */
+ case PROP_INITIAL_BITRATE:
+ self->initial_bitrate = g_value_get_uint (value);
+ break;
+ case PROP_SLICE_UNITS:
+ self->slice_units = g_value_get_uint (value);
+ break;
+ case PROP_SLICE_MODE:
+ self->slice_mode = g_value_get_enum (value);
+ break;
+ case PROP_IFRAME_PERIOD:
+ self->iframe_period = g_value_get_uint (value);
+ break;
+ case PROP_USAGE_TYPE:
+ self->usage_type = g_value_get_enum (value);
+ break;
+ case PROP_ENTROPY:
+ self->entropy = g_value_get_enum (value);
+ break;
+ case PROP_ENABLE_SEI:
+ self->enable_sei = g_value_get_boolean (value);
+ break;
+ case PROP_NUM_REORDER_FRAMES:
+ self->num_reorder_frames = g_value_get_uint (value);
+ break;
+ case PROP_PREVIEW_FLIPPED:
+ self->preview_flipped = g_value_get_boolean (value);
+ break;
+ case PROP_LEAKY_BUCKET_SIZE:
+ self->leaky_bucket_size = g_value_get_uint (value);
+ break;
+
+
+ /* Dynamic controls */
+ case PROP_RATE_CONTROL:
+ self->rate_control = g_value_get_enum (value);
+ set_rate_control (self);
+ update_rate_control (self);
+ break;
+ case PROP_FIXED_FRAMERATE:
+ self->fixed_framerate = g_value_get_boolean (value);
+ set_rate_control (self);
+ update_rate_control (self);
+ break;
+ case PROP_LEVEL_IDC:
+ self->level_idc = g_value_get_uint (value);
+ set_level_idc (self);
+ update_level_idc_and_get_max_mbps (self);
+ break;
+ case PROP_PEAK_BITRATE:
+ self->peak_bitrate = g_value_get_uint (value);
+ set_bitrate (self);
+ update_bitrate (self);
+ break;
+ case PROP_AVERAGE_BITRATE:
+ self->average_bitrate = g_value_get_uint (value);
+ set_bitrate (self);
+ update_bitrate (self);
+ break;
+ case PROP_MIN_IFRAME_QP:
+ self->min_qp[QP_I_FRAME] = g_value_get_int (value);
+ set_qp (self, QP_I_FRAME);
+ update_qp (self, QP_I_FRAME);
+ break;
+ case PROP_MAX_IFRAME_QP:
+ self->max_qp[QP_I_FRAME] = g_value_get_int (value);
+ set_qp (self, QP_I_FRAME);
+ update_qp (self, QP_I_FRAME);
+ break;
+ case PROP_MIN_PFRAME_QP:
+ self->min_qp[QP_P_FRAME] = g_value_get_int (value);
+ set_qp (self, QP_P_FRAME);
+ update_qp (self, QP_P_FRAME);
+ break;
+ case PROP_MAX_PFRAME_QP:
+ self->max_qp[QP_P_FRAME] = g_value_get_int (value);
+ set_qp (self, QP_P_FRAME);
+ update_qp (self, QP_P_FRAME);
+ break;
+ case PROP_MIN_BFRAME_QP:
+ self->min_qp[QP_B_FRAME] = g_value_get_int (value);
+ set_qp (self, QP_B_FRAME);
+ update_qp (self, QP_B_FRAME);
+ break;
+ case PROP_MAX_BFRAME_QP:
+ self->max_qp[QP_B_FRAME] = g_value_get_int (value);
+ set_qp (self, QP_B_FRAME);
+ update_qp (self, QP_B_FRAME);
+ break;
+ case PROP_LTR_BUFFER_SIZE:
+ self->ltr_buffer_size = g_value_get_int (value);
+ set_ltr (self);
+ update_ltr (self);
+ break;
+ case PROP_LTR_ENCODER_CONTROL:
+ self->ltr_encoder_control = g_value_get_int (value);
+ set_ltr (self);
+ update_ltr (self);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_uvc_h264_src_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (object);
+ uvcx_video_config_probe_commit_t probe;
+
+ switch (prop_id) {
+ case PROP_INITIAL_BITRATE:
+ case PROP_SLICE_UNITS:
+ case PROP_SLICE_MODE:
+ case PROP_IFRAME_PERIOD:
+ case PROP_USAGE_TYPE:
+ case PROP_ENTROPY:
+ case PROP_ENABLE_SEI:
+ case PROP_NUM_REORDER_FRAMES:
+ case PROP_PREVIEW_FLIPPED:
+ case PROP_LEAKY_BUCKET_SIZE:
+ fill_probe_commit (self, &probe, 0, 0, 0, 0, 0);
+ if (GST_STATE (self) >= GST_STATE_PAUSED) {
+ xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR,
+ (guchar *) & probe);
+ }
+ break;
+ default:
+ break;
+ }
+
+ switch (prop_id) {
+ case PROP_COLORSPACE_NAME:
+ g_value_set_string (value, self->colorspace_name);
+ break;
+ case PROP_JPEG_DECODER_NAME:
+ g_value_set_string (value, self->jpeg_decoder_name);
+ break;
+ case PROP_NUM_CLOCK_SAMPLES:
+ g_value_set_int (value, self->num_clock_samples);
+ break;
+ /* v4l2src properties */
+ case PROP_NUM_BUFFERS:
+ g_value_set_int (value, self->num_buffers);
+ break;
+ case PROP_DEVICE:
+ g_value_set_string (value, self->device);
+ break;
+ case PROP_DEVICE_NAME:
+ if (self->v4l2_src)
+ g_object_get_property (G_OBJECT (self->v4l2_src), "device-name", value);
+ else
+ g_value_set_static_string (value, "");
+ break;
+ /* Static controls */
+ case PROP_INITIAL_BITRATE:
+ g_value_set_uint (value, probe.dwBitRate);
+ break;
+ case PROP_SLICE_UNITS:
+ g_value_set_uint (value, probe.wSliceUnits);
+ break;
+ case PROP_SLICE_MODE:
+ g_value_set_enum (value, probe.wSliceMode);
+ break;
+ case PROP_IFRAME_PERIOD:
+ g_value_set_uint (value, probe.wIFramePeriod);
+ break;
+ case PROP_USAGE_TYPE:
+ g_value_set_enum (value, probe.bUsageType);
+ break;
+ case PROP_ENTROPY:
+ g_value_set_enum (value, probe.bEntropyCABAC);
+ break;
+ case PROP_ENABLE_SEI:
+ g_value_set_boolean (value,
+ (probe.bTimestamp == UVC_H264_TIMESTAMP_SEI_ENABLE));
+ break;
+ case PROP_NUM_REORDER_FRAMES:
+ g_value_set_uint (value, probe.bNumOfReorderFrames);
+ break;
+ case PROP_PREVIEW_FLIPPED:
+ g_value_set_boolean (value,
+ (probe.bPreviewFlipped == UVC_H264_PREFLIPPED_HORIZONTAL));
+ break;
+ case PROP_LEAKY_BUCKET_SIZE:
+ g_value_set_uint (value, probe.wLeakyBucketSize);
+ break;
+
+ /* Dynamic controls */
+ case PROP_RATE_CONTROL:
+ update_rate_control (self);
+ g_value_set_enum (value, self->rate_control);
+ break;
+ case PROP_FIXED_FRAMERATE:
+ update_rate_control (self);
+ g_value_set_boolean (value, self->fixed_framerate);
+ break;
+ case PROP_MAX_MBPS:
+ g_value_set_uint (value, update_level_idc_and_get_max_mbps (self));
+ break;
+ case PROP_LEVEL_IDC:
+ update_level_idc_and_get_max_mbps (self);
+ g_value_set_uint (value, self->level_idc);
+ break;
+ case PROP_PEAK_BITRATE:
+ update_bitrate (self);
+ g_value_set_uint (value, self->peak_bitrate);
+ break;
+ case PROP_AVERAGE_BITRATE:
+ update_bitrate (self);
+ g_value_set_uint (value, self->average_bitrate);
+ break;
+ case PROP_MIN_IFRAME_QP:
+ update_qp (self, QP_I_FRAME);
+ g_value_set_int (value, self->min_qp[QP_I_FRAME]);
+ break;
+ case PROP_MAX_IFRAME_QP:
+ update_qp (self, QP_I_FRAME);
+ g_value_set_int (value, self->max_qp[QP_I_FRAME]);
+ break;
+ case PROP_MIN_PFRAME_QP:
+ update_qp (self, QP_P_FRAME);
+ g_value_set_int (value, self->min_qp[QP_P_FRAME]);
+ break;
+ case PROP_MAX_PFRAME_QP:
+ update_qp (self, QP_P_FRAME);
+ g_value_set_int (value, self->max_qp[QP_P_FRAME]);
+ break;
+ case PROP_MIN_BFRAME_QP:
+ update_qp (self, QP_B_FRAME);
+ g_value_set_int (value, self->min_qp[QP_B_FRAME]);
+ break;
+ case PROP_MAX_BFRAME_QP:
+ update_qp (self, QP_B_FRAME);
+ g_value_set_int (value, self->max_qp[QP_B_FRAME]);
+ break;
+ case PROP_LTR_BUFFER_SIZE:
+ update_ltr (self);
+ g_value_set_int (value, self->ltr_buffer_size);
+ break;
+ case PROP_LTR_ENCODER_CONTROL:
+ update_ltr (self);
+ g_value_set_int (value, self->ltr_encoder_control);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ break;
+ }
+}
+
+/* Set dynamic controls */
+static void
+set_rate_control (GstUvcH264Src * self)
+{
+ uvcx_rate_control_mode_t req;
+
+ if (!xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_GET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " RATE_CONTROL GET_CUR error");
+ return;
+ }
+
+ req.bRateControlMode = self->rate_control;
+ if (self->fixed_framerate)
+ req.bRateControlMode |= UVC_H264_RATECONTROL_FIXED_FRM_FLG;
+
+ if (!xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_SET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " RATE_CONTROL SET_CUR error");
+ return;
+ }
+}
+
+static void
+set_level_idc (GstUvcH264Src * self)
+{
+ uvcx_video_advance_config_t req;
+
+ if (!xu_query (self, UVCX_VIDEO_ADVANCE_CONFIG, UVC_GET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " VIDEO_ADVANCE_CONFIG GET_CUR error");
+ return;
+ }
+
+ req.blevel_idc = self->level_idc;
+ if (!xu_query (self, UVCX_VIDEO_ADVANCE_CONFIG, UVC_SET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " VIDEO_ADVANCE_CONFIG SET_CUR error");
+ return;
+ }
+}
+
+static void
+set_bitrate (GstUvcH264Src * self)
+{
+ uvcx_bitrate_layers_t req;
+
+ if (!xu_query (self, UVCX_BITRATE_LAYERS, UVC_GET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " BITRATE_LAYERS GET_CUR error");
+ return;
+ }
+
+ req.dwPeakBitrate = self->peak_bitrate;
+ req.dwAverageBitrate = self->average_bitrate;
+ if (!xu_query (self, UVCX_BITRATE_LAYERS, UVC_SET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " BITRATE_LAYERS SET_CUR error");
+ return;
+ }
+}
+
+static void
+set_qp (GstUvcH264Src * self, gint type)
+{
+ uvcx_qp_steps_layers_t req;
+
+ req.wLayerID = 0;
+ switch (type) {
+ case QP_I_FRAME:
+ req.bFrameType = UVC_H264_QP_STEPS_I_FRAME_TYPE;
+ break;
+ case QP_P_FRAME:
+ req.bFrameType = UVC_H264_QP_STEPS_P_FRAME_TYPE;
+ break;
+ case QP_B_FRAME:
+ req.bFrameType = UVC_H264_QP_STEPS_B_FRAME_TYPE;
+ break;
+ default:
+ return;
+ }
+ req.bMinQp = 0;
+ req.bMaxQp = 0;
+ if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_SET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS SET_CUR error");
+ return;
+ }
+
+ if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_GET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS GET_CUR error");
+ return;
+ }
+
+ req.bMinQp = self->min_qp[type];
+ req.bMaxQp = self->max_qp[type];
+ if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_SET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS SET_CUR error");
+ return;
+ }
+}
+
+static void
+set_ltr (GstUvcH264Src * self)
+{
+ uvcx_ltr_buffer_size_control_t req;
+
+ if (!xu_query (self, UVCX_LTR_BUFFER_SIZE_CONTROL, UVC_GET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " LTR_BUFFER_SIZE GET_CUR error");
+ return;
+ }
+
+ req.bLTRBufferSize = self->ltr_buffer_size;
+ req.bLTREncoderControl = self->ltr_encoder_control;
+ if (!xu_query (self, UVCX_LTR_BUFFER_SIZE_CONTROL, UVC_SET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, "LTR_BUFFER_SIZE SET_CUR error");
+ return;
+ }
+}
+
+/* Get Dynamic controls */
+
+static void
+update_rate_control (GstUvcH264Src * self)
+{
+ uvcx_rate_control_mode_t req;
+
+ if (!xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_GET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " RATE_CONTROL GET_CUR error");
+ return;
+ }
+
+ if (self->rate_control != (req.bRateControlMode &
+ ~UVC_H264_RATECONTROL_FIXED_FRM_FLG)) {
+ self->rate_control = (req.bRateControlMode &
+ ~UVC_H264_RATECONTROL_FIXED_FRM_FLG);
+ g_object_notify (G_OBJECT (self), "rate-control");
+ }
+ if (self->fixed_framerate != ((req.bRateControlMode &
+ UVC_H264_RATECONTROL_FIXED_FRM_FLG) != 0)) {
+ self->fixed_framerate = ((req.bRateControlMode &
+ UVC_H264_RATECONTROL_FIXED_FRM_FLG) != 0);
+ g_object_notify (G_OBJECT (self), "fixed-framerate");
+ }
+}
+
+
+static guint32
+update_level_idc_and_get_max_mbps (GstUvcH264Src * self)
+{
+ uvcx_video_advance_config_t req;
+
+ if (!xu_query (self, UVCX_VIDEO_ADVANCE_CONFIG, UVC_GET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " VIDEO_ADVANCE_CONFIG GET_CUR error");
+ return 0;
+ }
+
+ if (self->level_idc != req.blevel_idc) {
+ self->level_idc = req.blevel_idc;
+ g_object_notify (G_OBJECT (self), "level-idc");
+ }
+ return req.dwMb_max;
+}
+
+static void
+update_bitrate (GstUvcH264Src * self)
+{
+ uvcx_bitrate_layers_t req;
+
+ if (!xu_query (self, UVCX_BITRATE_LAYERS, UVC_GET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " BITRATE_LAYERS GET_CUR error");
+ return;
+ }
+ if (self->peak_bitrate != req.dwPeakBitrate) {
+ self->peak_bitrate = req.dwPeakBitrate;
+ g_object_notify (G_OBJECT (self), "peak-bitrate");
+ }
+ if (self->average_bitrate != req.dwAverageBitrate) {
+ self->average_bitrate = req.dwAverageBitrate;
+ g_object_notify (G_OBJECT (self), "average-bitrate");
+ }
+}
+
+static gboolean
+update_qp (GstUvcH264Src * self, gint type)
+{
+ uvcx_qp_steps_layers_t req;
+ guint8 frame_type;
+
+ req.wLayerID = 0;
+ switch (type) {
+ case QP_I_FRAME:
+ frame_type = UVC_H264_QP_STEPS_I_FRAME_TYPE;
+ break;
+ case QP_P_FRAME:
+ frame_type = UVC_H264_QP_STEPS_P_FRAME_TYPE;
+ break;
+ case QP_B_FRAME:
+ frame_type = UVC_H264_QP_STEPS_B_FRAME_TYPE;
+ break;
+ default:
+ return FALSE;
+ }
+ req.bFrameType = frame_type;
+ req.bMinQp = 0;
+ req.bMaxQp = 0;
+ if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_SET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS SET_CUR error");
+ return FALSE;
+ }
+
+ if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_GET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS GET_CUR error");
+ return FALSE;
+ }
+
+ if (req.bFrameType == frame_type) {
+ if (self->min_qp[type] != req.bMinQp) {
+ self->min_qp[type] = req.bMinQp;
+ switch (type) {
+ case QP_I_FRAME:
+ g_object_notify (G_OBJECT (self), "min-iframe-qp");
+ break;
+ case QP_P_FRAME:
+ g_object_notify (G_OBJECT (self), "min-pframe-qp");
+ break;
+ case QP_B_FRAME:
+ g_object_notify (G_OBJECT (self), "min-bframe-qp");
+ break;
+ default:
+ break;
+ }
+ }
+ if (self->max_qp[type] != req.bMaxQp) {
+ self->max_qp[type] = req.bMaxQp;
+ switch (type) {
+ case QP_I_FRAME:
+ g_object_notify (G_OBJECT (self), "max-iframe-qp");
+ break;
+ case QP_P_FRAME:
+ g_object_notify (G_OBJECT (self), "max-pframe-qp");
+ break;
+ case QP_B_FRAME:
+ g_object_notify (G_OBJECT (self), "max-bframe-qp");
+ break;
+ default:
+ break;
+ }
+ }
+ return TRUE;
+ } else {
+ self->min_qp[type] = 0xFF;
+ self->max_qp[type] = 0xFF;
+ return FALSE;
+ }
+}
+
+static void
+update_ltr (GstUvcH264Src * self)
+{
+ uvcx_ltr_buffer_size_control_t req;
+
+ if (!xu_query (self, UVCX_LTR_BUFFER_SIZE_CONTROL, UVC_GET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " LTR_BUFFER_SIZE GET_CUR error");
+ return;
+ }
+
+ if (self->ltr_buffer_size != req.bLTRBufferSize) {
+ self->ltr_buffer_size = req.bLTRBufferSize;
+ g_object_notify (G_OBJECT (self), "ltr-buffer-size");
+ }
+ if (self->ltr_encoder_control != req.bLTREncoderControl) {
+ self->ltr_encoder_control = req.bLTREncoderControl;
+ g_object_notify (G_OBJECT (self), "ltr-encoder-control");
+ }
+}
+
+#define STORE_MIN_DEF_MAX(type) \
+ *(type *)min = *((type *) (min_p + offset)); \
+ *(type *)def = *((type *) (def_p + offset)); \
+ *(type *)max = *((type *) (max_p + offset));
+
+static gboolean
+probe_setting (GstUvcH264Src * self, uvcx_control_selector_t selector,
+ guint offset, gint size, gpointer min, gpointer def, gpointer max)
+{
+ guchar *min_p, *def_p, *max_p;
+ gboolean ret = FALSE;
+ __u16 len;
+
+ if (!xu_query (self, selector, UVC_GET_LEN, (guchar *) & len)) {
+ GST_WARNING_OBJECT (self, "probe_setting GET_LEN error");
+ return FALSE;
+ }
+ min_p = g_malloc0 (len);
+ def_p = g_malloc0 (len);
+ max_p = g_malloc0 (len);
+
+ if (!xu_query (self, selector, UVC_GET_MIN, min_p)) {
+ GST_WARNING_OBJECT (self, "probe_setting GET_MIN error");
+ goto end;
+ }
+ if (!xu_query (self, selector, UVC_GET_DEF, def_p)) {
+ GST_WARNING_OBJECT (self, "probe_setting GET_DEF error");
+ goto end;
+ }
+ if (!xu_query (self, selector, UVC_GET_MAX, max_p)) {
+ GST_WARNING_OBJECT (self, "probe_setting GET_MAX error");
+ goto end;
+ }
+
+ switch (size) {
+ case -1:
+ STORE_MIN_DEF_MAX (gint8);
+ ret = TRUE;
+ break;
+ case 1:
+ STORE_MIN_DEF_MAX (guint8);
+ ret = TRUE;
+ break;
+ case -2:
+ STORE_MIN_DEF_MAX (gint16);
+ ret = TRUE;
+ break;
+ case 2:
+ STORE_MIN_DEF_MAX (guint16);
+ ret = TRUE;
+ break;
+ case -4:
+ STORE_MIN_DEF_MAX (gint32);
+ ret = TRUE;
+ break;
+ case 4:
+ STORE_MIN_DEF_MAX (guint32);
+ ret = TRUE;
+ break;
+ default:
+ break;
+ }
+
+end:
+ g_free (min_p);
+ g_free (def_p);
+ g_free (max_p);
+
+ return ret;
+}
+
+static gboolean
+test_enum_setting (GstUvcH264Src * self, guint offset, guint size,
+ guint16 value)
+{
+ uvcx_video_config_probe_commit_t cur;
+ uvcx_video_config_probe_commit_t req;
+ guchar *req_p = (guchar *) & req;
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR, (guchar *) & cur)) {
+ GST_WARNING_OBJECT (self, " GET_CUR error");
+ return FALSE;
+ }
+
+ req = cur;
+
+ if (size == 1)
+ *((guint8 *) (req_p + offset)) = (guint8) value;
+ else
+ *((guint16 *) (req_p + offset)) = value;
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR, req_p)) {
+ GST_WARNING_OBJECT (self, " SET_CUR error");
+ return FALSE;
+ }
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR, req_p)) {
+ GST_WARNING_OBJECT (self, " GET_CUR error");
+ return FALSE;
+ }
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR, (guchar *) & cur)) {
+ GST_WARNING_OBJECT (self, " SET_CUR error");
+ return FALSE;
+ }
+
+ if (size == 1)
+ return *((guint8 *) (req_p + offset)) == (guint8) value;
+ else
+ return *((guint16 *) (req_p + offset)) == value;
+}
+
+static gboolean
+gst_uvc_h264_src_get_enum_setting (GstUvcH264Src * self, gchar * property,
+ gint * mask, gint * default_value)
+{
+ guint8 min, def, max;
+ guint8 en;
+ gboolean ret = FALSE;
+
+ if (g_strcmp0 (property, "slice-mode") == 0) {
+ guint16 min16, def16, max16;
+ guint16 en16;
+
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, wSliceMode), 2,
+ &min16, &def16, &max16);
+ if (ret) {
+ *default_value = def16;
+ *mask = 0;
+ for (en16 = min16; en16 <= max16; en16++) {
+ if (test_enum_setting (self, offsetof (uvcx_video_config_probe_commit_t,
+ wSliceMode), 2, en16))
+ *mask |= (1 << en16);
+ }
+ }
+ } else if (g_strcmp0 (property, "usage-type") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bUsageType), 1,
+ &min, &def, &max);
+ if (ret) {
+ *default_value = def;
+ *mask = 0;
+ for (en = min; en <= max; en++) {
+ if (test_enum_setting (self, offsetof (uvcx_video_config_probe_commit_t,
+ bUsageType), 1, en))
+ *mask |= (1 << en);
+ }
+ }
+ } else if (g_strcmp0 (property, "entropy") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bEntropyCABAC), 1,
+ &min, &def, &max);
+ if (ret) {
+ *mask = (1 << min) | (1 << max);
+ *default_value = def;
+ }
+ } else if (g_strcmp0 (property, "rate-control") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bRateControlMode), 1,
+ &min, &def, &max);
+ if (ret) {
+ uvcx_rate_control_mode_t cur;
+
+ *default_value = def;
+ *mask = 0;
+
+ xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_GET_CUR, (guchar *) & cur);
+
+ for (en = min; en <= max; en++) {
+ uvcx_rate_control_mode_t req = { 0, en };
+
+ if (xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_SET_CUR,
+ (guchar *) & req) &&
+ xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_GET_CUR,
+ (guchar *) & req) && req.bRateControlMode == en)
+ *mask |= (1 << en);
+ }
+ xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_SET_CUR, (guchar *) & cur);
+ }
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_uvc_h264_src_get_boolean_setting (GstUvcH264Src * self, gchar * property,
+ gboolean * changeable, gboolean * default_value)
+{
+ guint8 min, def, max;
+ gboolean ret = FALSE;
+
+ if (g_strcmp0 (property, "enable-sei") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bTimestamp), 1,
+ &min, &def, &max);
+ *changeable = (min != max);
+ *default_value = (def != 0);
+ } else if (g_strcmp0 (property, "preview-flipped") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bPreviewFlipped), 1,
+ &min, &def, &max);
+ *changeable = (min != max);
+ *default_value = (def != 0);
+ } else if (g_strcmp0 (property, "fixed-framerate") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bRateControlMode), 1,
+ &min, &def, &max);
+ *changeable = ((max & UVC_H264_RATECONTROL_FIXED_FRM_FLG) != 0);
+ *default_value = ((def & UVC_H264_RATECONTROL_FIXED_FRM_FLG) != 0);
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_uvc_h264_src_get_int_setting (GstUvcH264Src * self, gchar * property,
+ gint * min, gint * def, gint * max)
+{
+ guint32 min32, def32, max32;
+ guint16 min16, def16, max16;
+ guint8 min8, def8, max8;
+ gint8 smin8, sdef8, smax8;
+ gboolean ret = FALSE;
+
+ GST_DEBUG_OBJECT (self, "Probing int property %s", property);
+ if (g_strcmp0 (property, "initial-bitrate") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, dwBitRate), 4,
+ &min32, &def32, &max32);
+ *min = min32;
+ *def = def32;
+ *max = max32;
+ } else if (g_strcmp0 (property, "slice-units") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, wSliceUnits), 2,
+ &min16, &def16, &max16);
+ *min = min16;
+ *def = def16;
+ *max = max16;
+ } else if (g_strcmp0 (property, "iframe-period") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, wIFramePeriod), 2,
+ &min16, &def16, &max16);
+ *min = min16;
+ *def = def16;
+ *max = max16;
+ } else if (g_strcmp0 (property, "num-reorder-frames") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bNumOfReorderFrames), 1,
+ &min8, &def8, &max8);
+ *min = min8;
+ *def = def8;
+ *max = max8;
+ } else if (g_strcmp0 (property, "leaky-bucket-size") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, wLeakyBucketSize), 2,
+ &min16, &def16, &max16);
+ *min = min16;
+ *def = def16;
+ *max = max16;
+ } else if (g_strcmp0 (property, "level-idc") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_ADVANCE_CONFIG,
+ offsetof (uvcx_video_advance_config_t, blevel_idc), 1,
+ &min8, &def8, &max8);
+ *min = min8;
+ *def = def8;
+ *max = max8;
+ } else if (g_strcmp0 (property, "max-mbps") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_ADVANCE_CONFIG,
+ offsetof (uvcx_video_advance_config_t, dwMb_max), 4,
+ &min32, &def32, &max32);
+ *min = min32;
+ *def = def32;
+ *max = max32;
+ } else if (g_strcmp0 (property, "peak-bitrate") == 0) {
+ ret = probe_setting (self, UVCX_BITRATE_LAYERS,
+ offsetof (uvcx_bitrate_layers_t, dwPeakBitrate), 4,
+ &min32, &def32, &max32);
+ *min = min32;
+ *def = def32;
+ *max = max32;
+ } else if (g_strcmp0 (property, "average-bitrate") == 0) {
+ ret = probe_setting (self, UVCX_BITRATE_LAYERS,
+ offsetof (uvcx_bitrate_layers_t, dwAverageBitrate), 4,
+ &min32, &def32, &max32);
+ *min = min32;
+ *def = def32;
+ *max = max32;
+ } else if (g_strcmp0 (property, "min-iframe-qp") == 0) {
+ if (update_qp (self, QP_I_FRAME))
+ ret = probe_setting (self, UVCX_QP_STEPS_LAYERS,
+ offsetof (uvcx_qp_steps_layers_t, bMinQp), 1, &smin8, &sdef8, &smax8);
+ *min = smin8;
+ *def = sdef8;
+ *max = smax8;
+ } else if (g_strcmp0 (property, "max-iframe-qp") == 0) {
+ if (update_qp (self, QP_I_FRAME))
+ ret = probe_setting (self, UVCX_QP_STEPS_LAYERS,
+ offsetof (uvcx_qp_steps_layers_t, bMaxQp), 1, &smin8, &sdef8, &smax8);
+ *min = smin8;
+ *def = sdef8;
+ *max = smax8;
+ } else if (g_strcmp0 (property, "min-pframe-qp") == 0) {
+ if (update_qp (self, QP_P_FRAME))
+ ret = probe_setting (self, UVCX_QP_STEPS_LAYERS,
+ offsetof (uvcx_qp_steps_layers_t, bMinQp), 1, &smin8, &sdef8, &smax8);
+ *min = smin8;
+ *def = sdef8;
+ *max = smax8;
+ } else if (g_strcmp0 (property, "max-pframe-qp") == 0) {
+ if (update_qp (self, QP_P_FRAME))
+ ret = probe_setting (self, UVCX_QP_STEPS_LAYERS,
+ offsetof (uvcx_qp_steps_layers_t, bMaxQp), 1, &smin8, &sdef8, &smax8);
+ *min = smin8;
+ *def = sdef8;
+ *max = smax8;
+ } else if (g_strcmp0 (property, "min-bframe-qp") == 0) {
+ if (update_qp (self, QP_B_FRAME))
+ ret = probe_setting (self, UVCX_QP_STEPS_LAYERS,
+ offsetof (uvcx_qp_steps_layers_t, bMinQp), 1, &smin8, &sdef8, &smax8);
+ *min = smin8;
+ *def = sdef8;
+ *max = smax8;
+ } else if (g_strcmp0 (property, "max-bframe-qp") == 0) {
+ if (update_qp (self, QP_B_FRAME))
+ ret = probe_setting (self, UVCX_QP_STEPS_LAYERS,
+ offsetof (uvcx_qp_steps_layers_t, bMaxQp), 1, &smin8, &sdef8, &smax8);
+ *min = smin8;
+ *def = sdef8;
+ *max = smax8;
+ } else if (g_strcmp0 (property, "ltr-buffer-size") == 0) {
+ ret = probe_setting (self, UVCX_LTR_BUFFER_SIZE_CONTROL,
+ offsetof (uvcx_ltr_buffer_size_control_t, bLTRBufferSize), 1,
+ &min8, &def8, &max8);
+ *min = min8;
+ *def = def8;
+ *max = max8;
+ } else if (g_strcmp0 (property, "ltr-encoder-control") == 0) {
+ ret = probe_setting (self, UVCX_LTR_BUFFER_SIZE_CONTROL,
+ offsetof (uvcx_ltr_buffer_size_control_t, bLTREncoderControl), 1,
+ &min8, &def8, &max8);
+ *min = min8;
+ *def = def8;
+ *max = max8;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_uvc_h264_src_event_probe (GstPad * pad, GstEvent * event,
+ gpointer user_data)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (user_data);
+ gboolean ret = TRUE;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ ret = !self->reconfiguring;
+ break;
+ case GST_EVENT_NEWSEGMENT:
+ if (pad == self->vidsrc) {
+ ret = !self->vid_newseg;
+ self->vid_newseg = TRUE;
+ } else if (pad == self->vfsrc) {
+ ret = !self->vf_newseg;
+ self->vf_newseg = TRUE;
+ }
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_uvc_h264_src_buffer_probe (GstPad * pad, GstBuffer * buffer,
+ gpointer user_data)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (user_data);
+
+ /* TODO: Check the NALU type and make sure it is a keyframe */
+ if (self->key_unit_event) {
+ GstClockTime ts, running_time, stream_time;
+ gboolean all_headers;
+ guint count;
+ GstEvent *downstream;
+
+ if (gst_video_event_parse_upstream_force_key_unit (self->key_unit_event,
+ &ts, &all_headers, &count)) {
+ if (!GST_CLOCK_TIME_IS_VALID (ts)) {
+ ts = GST_BUFFER_TIMESTAMP (buffer);
+ }
+ running_time = gst_segment_to_running_time (&self->segment,
+ GST_FORMAT_TIME, ts);
+
+ stream_time = gst_segment_to_stream_time (&self->segment,
+ GST_FORMAT_TIME, ts);
+
+ GST_DEBUG_OBJECT (self, "Sending downstream force-key-unit : %d - %d ts=%"
+ GST_TIME_FORMAT " running time =%" GST_TIME_FORMAT " stream=%"
+ GST_TIME_FORMAT, all_headers, count, GST_TIME_ARGS (ts),
+ GST_TIME_ARGS (running_time), GST_TIME_ARGS (stream_time));
+ downstream = gst_video_event_new_downstream_force_key_unit (ts,
+ running_time, stream_time, all_headers, count);
+ gst_pad_push_event (self->vidsrc, downstream);
+ gst_event_replace (&self->key_unit_event, NULL);
+ }
+ }
+ return TRUE;
+}
+
+static gboolean
+gst_uvc_h264_src_parse_event (GstUvcH264Src * self, GstPad * pad,
+ GstEvent * event)
+{
+ const GstStructure *s = gst_event_get_structure (event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ if (pad == self->vidsrc && self->main_format == UVC_H264_SRC_FORMAT_H264) {
+ if (gst_video_event_is_force_key_unit (event)) {
+ uvcx_picture_type_control_t req = { 0, 0 };
+ GstClockTime ts;
+ gboolean all_headers;
+
+ if (gst_video_event_parse_upstream_force_key_unit (event,
+ &ts, &all_headers, NULL)) {
+ GST_INFO_OBJECT (self, "Received upstream force-key-unit : %d %"
+ GST_TIME_FORMAT, all_headers, GST_TIME_ARGS (ts));
+ /* TODO: wait until 'ts' time is reached */
+ if (all_headers)
+ req.wPicType = UVC_H264_PICTYPE_IDR_WITH_PPS_SPS;
+ else
+ req.wPicType = UVC_H264_PICTYPE_IDR;
+
+ if (!xu_query (self, UVCX_PICTURE_TYPE_CONTROL, UVC_SET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " PICTURE_TYPE_CONTROL SET_CUR error");
+ } else {
+ gst_event_replace (&self->key_unit_event, event);
+ gst_event_unref (event);
+
+ return TRUE;
+ }
+ }
+ } else if (s &&
+ gst_structure_has_name (s, "uvc-h264-ltr-picture-control")) {
+ guint put_at, encode_using;
+
+ if (gst_structure_get_uint (s, "put-at", &put_at) &&
+ gst_structure_get_uint (s, "encode-using", &encode_using)) {
+ uvcx_ltr_picture_control req = { 0, put_at, encode_using };
+
+ if (!xu_query (self, UVCX_LTR_PICTURE_CONTROL, UVC_SET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " LTR PICTURE_CONTROL SET_CUR error");
+ } else {
+ gst_event_unref (event);
+
+ return TRUE;
+ }
+ }
+ return TRUE;
+ } else if (s && gst_structure_has_name (s, "uvc-h264-bitrate-control")) {
+ guint average, peak;
+
+ if (gst_structure_get_uint (s, "average-bitrate", &average) &&
+ gst_structure_get_uint (s, "peak-bitrate", &peak)) {
+ self->average_bitrate = average;
+ self->peak_bitrate = peak;
+ set_bitrate (self);
+ update_bitrate (self);
+
+ gst_event_unref (event);
+
+ return TRUE;
+ }
+ } else if (s && gst_structure_has_name (s, "uvc-h264-qp-control")) {
+ gint min_qp, max_qp;
+ gboolean valid_event = FALSE;
+
+ if (gst_structure_get_int (s, "min-iframe-qp", &min_qp) &&
+ gst_structure_get_int (s, "max-iframe-qp", &max_qp)) {
+ self->min_qp[QP_I_FRAME] = min_qp;
+ self->max_qp[QP_I_FRAME] = max_qp;
+ set_qp (self, QP_I_FRAME);
+ update_qp (self, QP_I_FRAME);
+ valid_event = TRUE;
+ }
+ if (gst_structure_get_int (s, "min-pframe-qp", &min_qp) &&
+ gst_structure_get_int (s, "max-pframe-qp", &max_qp)) {
+ self->min_qp[QP_P_FRAME] = min_qp;
+ self->max_qp[QP_P_FRAME] = max_qp;
+ set_qp (self, QP_P_FRAME);
+ update_qp (self, QP_P_FRAME);
+ valid_event = TRUE;
+ }
+ if (gst_structure_get_int (s, "min-bframe-qp", &min_qp) &&
+ gst_structure_get_int (s, "max-bframe-qp", &max_qp)) {
+ self->min_qp[QP_B_FRAME] = min_qp;
+ self->max_qp[QP_B_FRAME] = max_qp;
+ set_qp (self, QP_B_FRAME);
+ update_qp (self, QP_B_FRAME);
+ valid_event = TRUE;
+ }
+
+ if (valid_event) {
+ gst_event_unref (event);
+
+ return TRUE;
+ }
+ } else if (s && gst_structure_has_name (s, "uvc-h264-rate-control")) {
+ UvcH264RateControl rate;
+ gboolean fixed_framerate;
+
+ if (gst_structure_get_enum (s, "rate-control",
+ UVC_H264_RATECONTROL_TYPE, (gint *) & rate) &&
+ gst_structure_get_boolean (s, "fixed-framerate",
+ &fixed_framerate)) {
+ self->rate_control = rate;
+ self->fixed_framerate = fixed_framerate;
+ set_rate_control (self);
+ update_rate_control (self);
+
+ gst_event_unref (event);
+
+ return TRUE;
+ }
+ } else if (s && gst_structure_has_name (s, "uvc-h264-level-idc")) {
+ guint level_idc;
+
+ if (gst_structure_get_uint (s, "level-idc", &level_idc)) {
+ self->level_idc = level_idc;
+ set_level_idc (self);
+ update_level_idc_and_get_max_mbps (self);
+
+ gst_event_unref (event);
+ }
+ }
+ }
+ if (s && gst_structure_has_name (s, "renegotiate")) {
+ GST_DEBUG_OBJECT (self, "Received renegotiate on %s",
+ GST_PAD_NAME (pad));
+ /* TODO: Do not reconstruct pipeline twice if we receive
+ the event on both pads */
+ if (GST_STATE (self) >= GST_STATE_READY) {
+ /* TODO: diff the caps */
+ gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self));
+ }
+ return TRUE;
+ }
+ break;
+ default:
+ break;
+ }
+
+ return FALSE;
+}
+
+static gboolean
+gst_uvc_h264_src_send_event (GstElement * element, GstEvent * event)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (element);
+
+ if (gst_uvc_h264_src_parse_event (self, self->vidsrc, event))
+ return TRUE;
+
+ return GST_ELEMENT_CLASS (parent_class)->send_event (element, event);
+}
+
+static gboolean
+gst_uvc_h264_src_event (GstPad * pad, GstEvent * event)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (GST_PAD_PARENT (pad));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_NEWSEGMENT:
+ if (!self->vid_newseg && pad == self->vidsrc) {
+ gboolean update;
+ gdouble rate, applied_rate;
+ GstFormat format;
+ gint64 start, stop, position;
+
+ gst_event_parse_new_segment_full (event, &update, &rate,
+ &applied_rate, &format, &start, &stop, &position);
+ gst_segment_set_newsegment (&self->segment, update, rate, format,
+ start, stop, position);
+ }
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ if (pad == self->vidsrc) {
+ gst_segment_init (&self->segment, GST_FORMAT_UNDEFINED);
+ self->vid_newseg = FALSE;
+ }
+ if (pad == self->vfsrc)
+ self->vf_newseg = FALSE;
+ break;
+ default:
+ if (gst_uvc_h264_src_parse_event (self, pad, event))
+ return TRUE;
+ break;
+ }
+ return self->srcpad_event_func (pad, event);
+}
+
+static guint8
+xu_get_id (GstUvcH264Src * self)
+{
+ struct uvc_xu_find_unit xu;
+ static const __u8 guid[16] = GUID_UVCX_H264_XU;
+
+ if (self->v4l2_fd == -1) {
+ GST_WARNING_OBJECT (self, "Can't query XU with fd = -1");
+ return 0;
+ }
+
+ memcpy (xu.guid, guid, 16);
+ xu.unit = 0;
+
+ if (-1 == ioctl (self->v4l2_fd, UVCIOC_XU_FIND_UNIT, &xu)) {
+#if defined (HAVE_GUDEV) && defined (HAVE_LIBUSB)
+ /* Fallback on libusb */
+ GUdevClient *client;
+ GUdevDevice *udevice;
+ GUdevDevice *parent;
+ guint64 busnum;
+ guint64 devnum;
+ libusb_device **device_list = NULL;
+ libusb_device *device = NULL;
+ ssize_t cnt;
+ int i, j, k;
+
+ GST_DEBUG_OBJECT (self, "XU_FIND_UNIT ioctl failed. Fallback on libusb");
+
+ if (self->usb_ctx == NULL)
+ libusb_init (&self->usb_ctx);
+
+ client = g_udev_client_new (NULL);
+ if (client) {
+ udevice = g_udev_client_query_by_device_file (client, self->device);
+ if (udevice) {
+ parent = g_udev_device_get_parent_with_subsystem (udevice, "usb",
+ "usb_device");
+ if (parent) {
+ busnum = g_udev_device_get_sysfs_attr_as_uint64 (parent, "busnum");
+ devnum = g_udev_device_get_sysfs_attr_as_uint64 (parent, "devnum");
+
+ cnt = libusb_get_device_list (self->usb_ctx, &device_list);
+ for (i = 0; i < cnt; i++) {
+ if (busnum == libusb_get_bus_number (device_list[i]) &&
+ devnum == libusb_get_device_address (device_list[i])) {
+ device = libusb_ref_device (device_list[i]);
+ break;
+ }
+ }
+ libusb_free_device_list (device_list, 1);
+ g_object_unref (parent);
+ }
+ g_object_unref (udevice);
+ }
+ g_object_unref (client);
+ }
+
+ if (device) {
+ struct libusb_device_descriptor desc;
+
+ if (libusb_get_device_descriptor (device, &desc) == 0) {
+ for (i = 0; i < desc.bNumConfigurations; ++i) {
+ struct libusb_config_descriptor *config = NULL;
+
+ if (libusb_get_config_descriptor (device, i, &config) == 0) {
+ for (j = 0; j < config->bNumInterfaces; j++) {
+ for (k = 0; k < config->interface[j].num_altsetting; k++) {
+ const struct libusb_interface_descriptor *interface;
+ const guint8 *ptr = NULL;
+
+ interface = &config->interface[j].altsetting[k];
+ if (interface->bInterfaceClass != LIBUSB_CLASS_VIDEO ||
+ interface->bInterfaceSubClass != USB_VIDEO_CONTROL)
+ continue;
+ ptr = interface->extra;
+ while (ptr - interface->extra +
+ sizeof (xu_descriptor) < interface->extra_length) {
+ xu_descriptor *desc = (xu_descriptor *) ptr;
+
+ GST_DEBUG_OBJECT (self, "Found VideoControl interface with "
+ "unit id %d : %" GUID_FORMAT, desc->bUnitID,
+ GUID_ARGS (desc->guidExtensionCode));
+ if (desc->bDescriptorType == USB_VIDEO_CONTROL_INTERFACE &&
+ desc->bDescriptorSubType == USB_VIDEO_CONTROL_XU_TYPE &&
+ memcmp (desc->guidExtensionCode, guid, 16) == 0) {
+ guint8 unit_id = desc->bUnitID;
+
+ GST_DEBUG_OBJECT (self, "Found H264 XU unit : %d", unit_id);
+
+ libusb_unref_device (device);
+ return unit_id;
+ }
+ ptr += desc->bLength;
+ }
+ }
+ }
+ }
+ }
+ }
+ libusb_unref_device (device);
+ }
+#else
+ GST_WARNING_OBJECT (self, "XU_FIND_UNIT ioctl failed");
+#endif
+ return 0;
+ }
+
+ return xu.unit;
+}
+
+static gboolean
+xu_query (GstUvcH264Src * self, guint selector, guint query, guchar * data)
+{
+ struct uvc_xu_control_query xu;
+ __u16 len;
+
+ if (self->v4l2_fd == -1) {
+ GST_WARNING_OBJECT (self, "Can't query XU with fd = -1");
+ return FALSE;
+ }
+
+ xu.unit = self->h264_unit_id;
+ xu.selector = selector;
+
+ xu.query = UVC_GET_LEN;
+ xu.size = sizeof (len);
+ xu.data = (unsigned char *) &len;
+ if (-1 == ioctl (self->v4l2_fd, UVCIOC_CTRL_QUERY, &xu)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_LEN error");
+ return FALSE;
+ }
+
+ if (query == UVC_GET_LEN) {
+ *((__u16 *) data) = len;
+ } else {
+ xu.query = query;
+ xu.size = len;
+ xu.data = data;
+ if (-1 == ioctl (self->v4l2_fd, UVCIOC_CTRL_QUERY, &xu)) {
+ return FALSE;
+ }
+ }
+
+ return TRUE;
+}
+
+static void
+fill_probe_commit (GstUvcH264Src * self,
+ uvcx_video_config_probe_commit_t * probe, guint32 frame_interval,
+ guint32 width, guint32 height, guint32 profile,
+ UvcH264StreamFormat stream_format)
+{
+ probe->dwFrameInterval = frame_interval;
+ probe->dwBitRate = self->initial_bitrate;
+ probe->wWidth = width;
+ probe->wHeight = height;
+ probe->wSliceUnits = self->slice_units;
+ probe->wSliceMode = self->slice_mode;
+ probe->wProfile = profile;
+ probe->wIFramePeriod = self->iframe_period;
+ probe->bUsageType = self->usage_type;
+ probe->bRateControlMode = self->rate_control;
+ if (self->fixed_framerate)
+ probe->bRateControlMode |= UVC_H264_RATECONTROL_FIXED_FRM_FLG;
+ probe->bStreamFormat = stream_format;
+ probe->bEntropyCABAC = self->entropy;
+ probe->bTimestamp = self->enable_sei ?
+ UVC_H264_TIMESTAMP_SEI_ENABLE : UVC_H264_TIMESTAMP_SEI_DISABLE;
+ probe->bNumOfReorderFrames = self->num_reorder_frames;
+ probe->bPreviewFlipped = self->preview_flipped ?
+ UVC_H264_PREFLIPPED_HORIZONTAL : UVC_H264_PREFLIPPED_DISABLE;
+ probe->wLeakyBucketSize = self->leaky_bucket_size;
+}
+
+static void
+print_probe_commit (GstUvcH264Src * self,
+ uvcx_video_config_probe_commit_t * probe)
+{
+ GST_DEBUG_OBJECT (self, " Frame interval : %d *100ns",
+ probe->dwFrameInterval);
+ GST_DEBUG_OBJECT (self, " Bit rate : %d", probe->dwBitRate);
+ GST_DEBUG_OBJECT (self, " Hints : %X", probe->bmHints);
+ GST_DEBUG_OBJECT (self, " Configuration index : %d",
+ probe->wConfigurationIndex);
+ GST_DEBUG_OBJECT (self, " Width : %d", probe->wWidth);
+ GST_DEBUG_OBJECT (self, " Height : %d", probe->wHeight);
+ GST_DEBUG_OBJECT (self, " Slice units : %d", probe->wSliceUnits);
+ GST_DEBUG_OBJECT (self, " Slice mode : %X", probe->wSliceMode);
+ GST_DEBUG_OBJECT (self, " Profile : %X", probe->wProfile);
+ GST_DEBUG_OBJECT (self, " IFrame Period : %d ms", probe->wIFramePeriod);
+ GST_DEBUG_OBJECT (self, " Estimated video delay : %d ms",
+ probe->wEstimatedVideoDelay);
+ GST_DEBUG_OBJECT (self, " Estimated max config delay : %d ms",
+ probe->wEstimatedMaxConfigDelay);
+ GST_DEBUG_OBJECT (self, " Usage type : %X", probe->bUsageType);
+ GST_DEBUG_OBJECT (self, " Rate control mode : %X", probe->bRateControlMode);
+ GST_DEBUG_OBJECT (self, " Temporal scale mode : %X",
+ probe->bTemporalScaleMode);
+ GST_DEBUG_OBJECT (self, " Spatial scale mode : %X",
+ probe->bSpatialScaleMode);
+ GST_DEBUG_OBJECT (self, " SNR scale mode : %X", probe->bSNRScaleMode);
+ GST_DEBUG_OBJECT (self, " Stream mux option : %X", probe->bStreamMuxOption);
+ GST_DEBUG_OBJECT (self, " Stream Format : %X", probe->bStreamFormat);
+ GST_DEBUG_OBJECT (self, " Entropy CABAC : %X", probe->bEntropyCABAC);
+ GST_DEBUG_OBJECT (self, " Timestamp : %X", probe->bTimestamp);
+ GST_DEBUG_OBJECT (self, " Num of reorder frames : %d",
+ probe->bNumOfReorderFrames);
+ GST_DEBUG_OBJECT (self, " Preview flipped : %X", probe->bPreviewFlipped);
+ GST_DEBUG_OBJECT (self, " View : %d", probe->bView);
+ GST_DEBUG_OBJECT (self, " Stream ID : %X", probe->bStreamID);
+ GST_DEBUG_OBJECT (self, " Spatial layer ratio : %f",
+ ((probe->bSpatialLayerRatio & 0xF0) >> 4) +
+ ((float) (probe->bSpatialLayerRatio & 0x0F)) / 16);
+ GST_DEBUG_OBJECT (self, " Leaky bucket size : %d ms",
+ probe->wLeakyBucketSize);
+}
+
+static void
+configure_h264 (GstUvcH264Src * self, gint fd)
+{
+ uvcx_video_config_probe_commit_t probe;
+
+ /* Set the secondary format first, so the last SET_CUR will be for the
+ * H264 format. This way, we can still get the static control values with
+ * a GET_CUR. Otherwise all static properties will return 0 because that's
+ * what the GET_CUR of the raw format returns.
+ */
+ if (self->secondary_format == UVC_H264_SRC_FORMAT_RAW) {
+ memset (&probe, 0, sizeof (probe));
+ probe.dwFrameInterval = self->secondary_frame_interval;
+ probe.wWidth = self->secondary_width;
+ probe.wHeight = self->secondary_height;
+ probe.bStreamMuxOption = 5;
+
+ GST_DEBUG_OBJECT (self, "RAW PROBE SET_CUR : ");
+ print_probe_commit (self, &probe);
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE SET_CUR error");
+ return;
+ }
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return;
+ }
+ GST_DEBUG_OBJECT (self, "RAW PROBE GET_CUR : ");
+ print_probe_commit (self, &probe);
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_COMMIT, UVC_SET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "COMMIT SET_CUR error");
+ return;
+ }
+ }
+ /* Print MIN/MAX/DEF probe values for debugging purposes */
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_MIN,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return;
+ }
+ GST_DEBUG_OBJECT (self, "PROBE GET_MIN : ");
+ print_probe_commit (self, &probe);
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_MAX,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return;
+ }
+ GST_DEBUG_OBJECT (self, "PROBE GET_MAX : ");
+ print_probe_commit (self, &probe);
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_DEF,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return;
+ }
+ GST_DEBUG_OBJECT (self, "PROBE GET_DEF : ");
+ print_probe_commit (self, &probe);
+
+ fill_probe_commit (self, &probe, self->main_frame_interval,
+ self->main_width, self->main_height, self->main_profile,
+ self->main_stream_format);
+ if (self->secondary_format != UVC_H264_SRC_FORMAT_NONE)
+ probe.bStreamMuxOption = 3;
+ else
+ probe.bStreamMuxOption = 0;
+ probe.bmHints = UVC_H264_BMHINTS_RESOLUTION | UVC_H264_BMHINTS_PROFILE |
+ UVC_H264_BMHINTS_FRAME_INTERVAL;
+
+ GST_DEBUG_OBJECT (self, "PROBE SET_CUR : ");
+ print_probe_commit (self, &probe);
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE SET_CUR error");
+ return;
+ }
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return;
+ }
+ GST_DEBUG_OBJECT (self, "PROBE GET_CUR : ");
+ print_probe_commit (self, &probe);
+
+ /* Must validate the settings accepted by the encoder */
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_COMMIT, UVC_SET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "COMMIT SET_CUR error");
+ return;
+ }
+}
+
+static void
+v4l2src_prepare_format (GstElement * v4l2src, gint fd, guint fourcc,
+ guint width, guint height, gpointer user_data)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (user_data);
+
+ GST_DEBUG_OBJECT (self, "v4l2src prepare-format with FCC %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+
+ if (self->main_format == UVC_H264_SRC_FORMAT_H264) {
+ /* TODO: update static controls and g_object_notify those that changed */
+ configure_h264 (self, fd);
+
+ /* TODO: update dynamic controls on READY state */
+ /* Configure dynamic controls */
+ set_rate_control (self);
+ update_rate_control (self);
+ set_level_idc (self);
+ update_level_idc_and_get_max_mbps (self);
+ set_bitrate (self);
+ update_bitrate (self);
+ set_qp (self, QP_I_FRAME);
+ update_qp (self, QP_I_FRAME);
+ set_qp (self, QP_P_FRAME);
+ update_qp (self, QP_P_FRAME);
+ set_qp (self, QP_B_FRAME);
+ update_qp (self, QP_B_FRAME);
+ set_ltr (self);
+ update_ltr (self);
+ }
+}
+
+static gboolean
+_extract_caps_info (GstStructure * structure, guint16 * width, guint16 * height,
+ guint32 * frame_interval)
+{
+ gint w, h, fps_n, fps_d;
+ gboolean ret = TRUE;
+
+ ret &= gst_structure_get_int (structure, "width", &w);
+ ret &= gst_structure_get_int (structure, "height", &h);
+ ret &= gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d);
+
+ if (ret) {
+ *width = w;
+ *height = h;
+ /* Interval is in 100ns */
+ *frame_interval = GST_TIME_AS_NSECONDS ((fps_d * GST_SECOND) / fps_n) / 100;
+ }
+
+ return ret;
+}
+
+static guint16
+_extract_profile (GstStructure * structure)
+{
+ const gchar *profile_str;
+ guint16 profile;
+
+ profile = UVC_H264_PROFILE_HIGH;
+ profile_str = gst_structure_get_string (structure, "profile");
+ if (profile_str) {
+ if (!strcmp (profile_str, "constrained-baseline")) {
+ profile = UVC_H264_PROFILE_CONSTRAINED_BASELINE;
+ } else if (!strcmp (profile_str, "baseline")) {
+ profile = UVC_H264_PROFILE_BASELINE;
+ } else if (!strcmp (profile_str, "main")) {
+ profile = UVC_H264_PROFILE_MAIN;
+ } else if (!strcmp (profile_str, "high")) {
+ profile = UVC_H264_PROFILE_HIGH;
+ }
+ }
+ return profile;
+}
+
+static UvcH264StreamFormat
+_extract_stream_format (GstStructure * structure)
+{
+ const gchar *stream_format;
+
+ stream_format = gst_structure_get_string (structure, "stream-format");
+ if (stream_format) {
+ if (!strcmp (stream_format, "avc"))
+ return UVC_H264_STREAMFORMAT_NAL;
+ else if (!strcmp (stream_format, "byte-stream"))
+ return UVC_H264_STREAMFORMAT_ANNEXB;
+ }
+ return UVC_H264_STREAMFORMAT_ANNEXB;
+}
+
+static GstCaps *
+_transform_caps (GstUvcH264Src * self, GstCaps * caps, const gchar * name)
+{
+ GstElement *el = gst_element_factory_make (name, NULL);
+ GstElement *cf = gst_element_factory_make ("capsfilter", NULL);
+ GstPad *sink;
+
+ if (!el || !cf || !gst_bin_add (GST_BIN (self), el)) {
+ if (el)
+ gst_object_unref (el);
+ if (cf)
+ gst_object_unref (cf);
+ goto done;
+ }
+ if (!gst_bin_add (GST_BIN (self), cf)) {
+ gst_object_unref (cf);
+ gst_bin_remove (GST_BIN (self), el);
+ goto done;
+ }
+ if (!gst_element_link (el, cf))
+ goto error_remove;
+
+ sink = gst_element_get_static_pad (el, "sink");
+ if (!sink)
+ goto error_remove;
+ g_object_set (cf, "caps", caps, NULL);
+
+ caps = gst_pad_get_caps (sink);
+ gst_object_unref (sink);
+
+error_remove:
+ gst_bin_remove (GST_BIN (self), cf);
+ gst_bin_remove (GST_BIN (self), el);
+
+done:
+ return caps;
+}
+
+static GstCaps *
+gst_uvc_h264_src_transform_caps (GstUvcH264Src * self, GstCaps * caps)
+{
+ GstCaps *h264 = gst_caps_new_simple ("video/x-h264", NULL);
+ GstCaps *jpg = gst_caps_new_simple ("image/jpeg", NULL);
+ GstCaps *h264_caps = gst_caps_intersect (h264, caps);
+ GstCaps *jpg_caps = gst_caps_intersect (jpg, caps);
+
+ /* TODO: Keep caps order after transformation */
+ caps = _transform_caps (self, caps, self->colorspace_name);
+
+ if (!gst_caps_is_empty (h264_caps)) {
+ GstCaps *temp = gst_caps_union (caps, h264_caps);
+ gst_caps_unref (caps);
+ caps = temp;
+ }
+ if (!gst_caps_is_empty (jpg_caps)) {
+ GstCaps *temp = gst_caps_union (caps, jpg_caps);
+ gst_caps_unref (caps);
+ caps = temp;
+ }
+
+ if (h264_caps)
+ gst_caps_unref (h264_caps);
+ if (jpg_caps)
+ gst_caps_unref (jpg_caps);
+ gst_caps_unref (h264);
+ gst_caps_unref (jpg);
+
+
+ return caps;
+}
+
+static GstCaps *
+gst_uvc_h264_src_fixate_caps (GstUvcH264Src * self, GstPad * v4l_pad,
+ GstCaps * v4l_caps, GstCaps * peer_caps, gboolean primary)
+{
+ GstCaps *caps = NULL;
+ GstCaps *icaps = NULL;
+ GstCaps *tcaps = NULL;
+ int i;
+
+ if (v4l_caps == NULL || gst_caps_is_any (v4l_caps)) {
+ GST_DEBUG_OBJECT (self, "v4l caps are invalid. not fixating");
+ return NULL;
+ }
+
+ tcaps = gst_caps_intersect_full (peer_caps, v4l_caps,
+ GST_CAPS_INTERSECT_FIRST);
+ GST_DEBUG_OBJECT (self, "intersect: %" GST_PTR_FORMAT, tcaps);
+ icaps = gst_caps_normalize (tcaps);
+ gst_caps_unref (tcaps);
+
+ /* Prefer the first caps we are compatible with that the peer proposed */
+ for (i = 0; i < gst_caps_get_size (icaps); i++) {
+ /* get intersection */
+ GstCaps *ipcaps = gst_caps_copy_nth (icaps, i);
+ GstStructure *s = gst_caps_get_structure (ipcaps, 0);
+
+ GST_DEBUG_OBJECT (self, "Testing %s: %" GST_PTR_FORMAT,
+ primary ? "primary" : "secondary", ipcaps);
+ if (primary && gst_structure_has_name (s, "video/x-h264")) {
+ uvcx_video_config_probe_commit_t probe;
+ guint16 width;
+ guint16 height;
+ guint32 interval;
+ guint16 profile;
+ UvcH264StreamFormat stream_format;
+
+ if (_extract_caps_info (s, &width, &height, &interval)) {
+ profile = _extract_profile (s);
+ stream_format = _extract_stream_format (s);
+ fill_probe_commit (self, &probe, interval, width, height,
+ profile, stream_format);
+ probe.bmHints = UVC_H264_BMHINTS_RESOLUTION |
+ UVC_H264_BMHINTS_PROFILE | UVC_H264_BMHINTS_FRAME_INTERVAL;
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE SET_CUR error");
+ return NULL;
+ }
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return NULL;
+ }
+ GST_DEBUG_OBJECT (self, "Probe gives us %d==%d, %d==%d, %d==%d",
+ probe.wWidth, width, probe.wHeight, height,
+ probe.bStreamFormat, stream_format);
+ if (probe.wWidth == width && probe.wHeight == height &&
+ probe.bStreamFormat == stream_format) {
+ caps = ipcaps;
+ break;
+ }
+ }
+ } else if (!primary && self->main_format == UVC_H264_SRC_FORMAT_H264) {
+ uvcx_video_config_probe_commit_t probe;
+ guint16 width;
+ guint16 height;
+ guint32 interval;
+
+ if (_extract_caps_info (s, &width, &height, &interval)) {
+ if (gst_structure_has_name (s, "video/x-raw-yuv")) {
+ guint32 fcc = 0;
+ guint8 mux = 0;
+
+ if (gst_structure_get_fourcc (s, "format", &fcc)) {
+ if (fcc == GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'))
+ mux = 4;
+ else if (fcc == GST_MAKE_FOURCC ('N', 'V', '1', '2'))
+ mux = 8;
+ }
+ if (mux != 0) {
+ memset (&probe, 0, sizeof (probe));
+ probe.dwFrameInterval = interval;
+ probe.wWidth = width;
+ probe.wHeight = height;
+ probe.bStreamMuxOption = mux | 1;
+ probe.bmHints = UVC_H264_BMHINTS_RESOLUTION |
+ UVC_H264_BMHINTS_PROFILE | UVC_H264_BMHINTS_FRAME_INTERVAL;
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE SET_CUR error");
+ return NULL;
+ }
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return NULL;
+ }
+ GST_DEBUG_OBJECT (self, "Probe gives us %d==%d, %d==%d, %d~=%d",
+ probe.wWidth, width, probe.wHeight, height,
+ probe.bStreamMuxOption, mux);
+ if (probe.wWidth == width && probe.wHeight == height &&
+ (probe.bStreamMuxOption & mux) != 0) {
+ caps = ipcaps;
+ break;
+ }
+ }
+ } else if (gst_structure_has_name (s, "image/jpeg")) {
+ /* HACK ALERT: No way of figuring this one out but it seems the
+ * camera doesn't allow for h264 muxing and jpeg resolution higher
+ * than 640x480 so we shouldn't allow it */
+ if (width <= 640 && height <= 480) {
+ caps = ipcaps;
+ break;
+ }
+ }
+ }
+ } else {
+ caps = ipcaps;
+ break;
+ }
+ gst_caps_unref (ipcaps);
+ }
+
+ if (caps) {
+ caps = gst_caps_make_writable (caps);
+ gst_caps_truncate (caps);
+
+ /* now fixate */
+ if (!gst_caps_is_empty (caps)) {
+ gst_pad_fixate_caps (v4l_pad, caps);
+ GST_DEBUG_OBJECT (self, "fixated to: %" GST_PTR_FORMAT, caps);
+ }
+
+ if (gst_caps_is_empty (caps) || gst_caps_is_any (caps)) {
+ gst_caps_unref (caps);
+ caps = NULL;
+ }
+ }
+
+ return caps;
+}
+
+static void
+gst_uvc_h264_src_destroy_pipeline (GstUvcH264Src * self, gboolean v4l2src)
+{
+ GstIterator *iter = NULL;
+ gboolean done;
+
+ if (v4l2src && self->v4l2_src) {
+ gst_bin_remove (GST_BIN (self), self->v4l2_src);
+ gst_element_set_state (self->v4l2_src, GST_STATE_NULL);
+ gst_object_unref (self->v4l2_src);
+ self->v4l2_src = NULL;
+ self->v4l2_fd = -1;
+ self->h264_unit_id = 0;
+ }
+ if (self->mjpg_demux) {
+ gst_bin_remove (GST_BIN (self), self->mjpg_demux);
+ gst_element_set_state (self->mjpg_demux, GST_STATE_NULL);
+ gst_object_unref (self->mjpg_demux);
+ self->mjpg_demux = NULL;
+ }
+ if (self->jpeg_dec) {
+ gst_bin_remove (GST_BIN (self), self->jpeg_dec);
+ gst_element_set_state (self->jpeg_dec, GST_STATE_NULL);
+ gst_object_unref (self->jpeg_dec);
+ self->jpeg_dec = NULL;
+ }
+ if (self->vid_colorspace) {
+ gst_bin_remove (GST_BIN (self), self->vid_colorspace);
+ gst_element_set_state (self->vid_colorspace, GST_STATE_NULL);
+ gst_object_unref (self->vid_colorspace);
+ self->vid_colorspace = NULL;
+ }
+ if (self->vf_colorspace) {
+ gst_bin_remove (GST_BIN (self), self->vf_colorspace);
+ gst_element_set_state (self->vf_colorspace, GST_STATE_NULL);
+ gst_object_unref (self->vf_colorspace);
+ self->vf_colorspace = NULL;
+ }
+ iter = gst_bin_iterate_elements (GST_BIN (self));
+ done = FALSE;
+ while (!done) {
+ GstElement *item = NULL;
+
+ switch (gst_iterator_next (iter, (gpointer *) & item)) {
+ case GST_ITERATOR_OK:
+ if (item != self->v4l2_src) {
+ gst_bin_remove (GST_BIN (self), item);
+ gst_element_set_state (item, GST_STATE_NULL);
+ }
+ gst_object_unref (item);
+ break;
+ case GST_ITERATOR_RESYNC:
+ gst_iterator_resync (iter);
+ break;
+ case GST_ITERATOR_ERROR:
+ done = TRUE;
+ break;
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
+ }
+ }
+ gst_iterator_free (iter);
+}
+
+static gboolean
+ensure_v4l2src (GstUvcH264Src * self)
+{
+ gchar *device = NULL;
+ GstClock *v4l2_clock = NULL;
+
+ if (self->v4l2_src == NULL) {
+ /* Create v4l2 source and set it up */
+ self->v4l2_src = gst_element_factory_make ("v4l2src", NULL);
+ if (!self->v4l2_src || !gst_bin_add (GST_BIN (self), self->v4l2_src))
+ goto error;
+ gst_object_ref (self->v4l2_src);
+ g_signal_connect (self->v4l2_src, "prepare-format",
+ (GCallback) v4l2src_prepare_format, self);
+ }
+
+ g_object_get (self->v4l2_src, "device", &device, NULL);
+ g_object_set (self->v4l2_src,
+ "device", self->device, "num-buffers", self->num_buffers, NULL);
+
+ v4l2_clock = gst_element_get_clock (self->v4l2_src);
+
+ /* Set to NULL if the device changed */
+ if (g_strcmp0 (device, self->device))
+ gst_element_set_state (self->v4l2_src, GST_STATE_NULL);
+ g_free (device);
+
+ if (gst_element_set_state (self->v4l2_src, GST_STATE_READY) !=
+ GST_STATE_CHANGE_SUCCESS) {
+ GST_DEBUG_OBJECT (self, "Unable to set v4l2src to READY state");
+ goto error_remove;
+ }
+
+ /* Set/Update the fd and unit id after we go to READY */
+ g_object_get (self->v4l2_src, "device-fd", &self->v4l2_fd, NULL);
+ self->h264_unit_id = xu_get_id (self);
+
+ if (self->h264_unit_id == 0) {
+ GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
+ ("Device is not a valid UVC H264 camera"), (NULL));
+ goto error_remove;
+ }
+
+ /* going to state READY makes v4l2src lose its reference to the clock */
+ if (v4l2_clock) {
+ gst_element_set_clock (self->v4l2_src, v4l2_clock);
+ gst_element_set_base_time (self->v4l2_src,
+ gst_element_get_base_time (GST_ELEMENT (self)));
+ gst_object_unref (v4l2_clock);
+ }
+
+ return TRUE;
+
+error_remove:
+ gst_element_set_state (self->v4l2_src, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN (self), self->v4l2_src);
+
+error:
+ if (self->v4l2_src)
+ gst_object_unref (self->v4l2_src);
+ self->v4l2_src = NULL;
+ self->v4l2_fd = -1;
+ self->h264_unit_id = 0;
+
+ return FALSE;
+}
+
+static gboolean
+gst_uvc_h264_src_construct_pipeline (GstBaseCameraSrc * bcamsrc)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (bcamsrc);
+ GstIterator *iter = NULL;
+ gboolean iter_done = FALSE;
+ GstPad *vf_pad = NULL;
+ GstCaps *vf_caps = NULL;
+ GstStructure *vf_struct = NULL;
+ GstPad *vid_pad = NULL;
+ GstCaps *vid_caps = NULL;
+ GstStructure *vid_struct = NULL;
+ GstCaps *src_caps = NULL;
+ GstPad *v4l_pad = NULL;
+ GstCaps *v4l_caps = NULL;
+ gboolean jpg2raw = FALSE;
+
+ enum
+ {
+ RAW_NONE, ENCODED_NONE, NONE_RAW, NONE_ENCODED,
+ H264_JPG, H264_RAW, H264_JPG2RAW, NONE_NONE,
+ RAW_RAW, ENCODED_ENCODED,
+ } type;
+
+ GST_DEBUG_OBJECT (self, "Construct pipeline");
+ self->reconfiguring = TRUE;
+
+ if (self->v4l2_src) {
+ uvcx_encoder_reset req = { 0 };
+
+ if (!xu_query (self, UVCX_ENCODER_RESET, UVC_SET_CUR, (guchar *) & req))
+ GST_WARNING_OBJECT (self, " UVCX_ENCODER_RESET SET_CUR error");
+ }
+
+ if (!ensure_v4l2src (self))
+ goto error;
+
+ gst_uvc_h264_src_destroy_pipeline (self, FALSE);
+
+ /* Potentially unlink v4l2src to the ghost pads */
+ gst_ghost_pad_set_target (GST_GHOST_PAD (self->vidsrc), NULL);
+ gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), NULL);
+
+ vf_caps = gst_pad_peer_get_caps (self->vfsrc);
+ vid_caps = gst_pad_peer_get_caps (self->vidsrc);
+
+ GST_DEBUG_OBJECT (self, "vfsrc caps : %" GST_PTR_FORMAT, vf_caps);
+ GST_DEBUG_OBJECT (self, "vidsrc caps : %" GST_PTR_FORMAT, vid_caps);
+ if (!self->started) {
+ GST_DEBUG_OBJECT (self, "video not started. Ignoring vidsrc caps");
+ if (vid_caps)
+ gst_caps_unref (vid_caps);
+ vid_caps = NULL;
+ }
+
+ v4l_pad = gst_element_get_static_pad (self->v4l2_src, "src");
+ v4l_caps = gst_pad_get_caps (v4l_pad);
+ GST_DEBUG_OBJECT (self, "v4l2src caps : %" GST_PTR_FORMAT, v4l_caps);
+ if (vid_caps) {
+ GstCaps *trans_caps = gst_uvc_h264_src_transform_caps (self, vid_caps);
+
+ gst_caps_unref (vid_caps);
+ vid_caps = gst_uvc_h264_src_fixate_caps (self, v4l_pad, v4l_caps,
+ trans_caps, TRUE);
+ gst_caps_unref (trans_caps);
+
+ if (vid_caps) {
+ vid_struct = gst_caps_get_structure (vid_caps, 0);
+ } else {
+ GST_WARNING_OBJECT (self, "Could not negotiate vidsrc caps format");
+ gst_object_unref (v4l_pad);
+ gst_caps_unref (v4l_caps);
+ goto error_remove;
+ }
+ }
+ GST_DEBUG_OBJECT (self, "Fixated vidsrc caps : %" GST_PTR_FORMAT, vid_caps);
+
+ if (vid_caps && gst_structure_has_name (vid_struct, "video/x-h264")) {
+ self->main_format = UVC_H264_SRC_FORMAT_H264;
+ if (!_extract_caps_info (vid_struct, &self->main_width,
+ &self->main_height, &self->main_frame_interval)) {
+ gst_object_unref (v4l_pad);
+ gst_caps_unref (v4l_caps);
+ goto error_remove;
+ }
+
+ self->main_stream_format = _extract_stream_format (vid_struct);
+ self->main_profile = _extract_profile (vid_struct);
+ } else {
+ self->main_format = UVC_H264_SRC_FORMAT_NONE;
+ }
+
+ if (vf_caps) {
+ GstCaps *trans_caps = gst_uvc_h264_src_transform_caps (self, vf_caps);
+
+ gst_caps_unref (vf_caps);
+ vf_caps = gst_uvc_h264_src_fixate_caps (self, v4l_pad, v4l_caps,
+ trans_caps, FALSE);
+
+ /* If we couldn't find a suitable vf cap, try the jpeg2raw pipeline */
+ if (!vf_caps && self->main_format == UVC_H264_SRC_FORMAT_H264) {
+ GstCaps *jpg_caps;
+
+ jpg2raw = TRUE;
+ jpg_caps = _transform_caps (self, trans_caps, self->jpeg_decoder_name);
+
+ vf_caps = gst_uvc_h264_src_fixate_caps (self, v4l_pad, v4l_caps,
+ jpg_caps, FALSE);
+ gst_caps_unref (jpg_caps);
+ }
+ gst_caps_unref (trans_caps);
+ if (vf_caps) {
+ vf_struct = gst_caps_get_structure (vf_caps, 0);
+ } else {
+ GST_WARNING_OBJECT (self, "Could not negotiate vfsrc caps format");
+ gst_object_unref (v4l_pad);
+ gst_caps_unref (v4l_caps);
+ goto error_remove;
+ }
+ }
+ GST_DEBUG_OBJECT (self, "Fixated vfsrc caps : %" GST_PTR_FORMAT, vf_caps);
+ gst_object_unref (v4l_pad);
+ gst_caps_unref (v4l_caps);
+
+ if (vf_caps && vid_caps &&
+ !gst_structure_has_name (vid_struct, "video/x-h264")) {
+ /* Allow for vfsrc+vidsrc to both be raw or jpeg */
+ if (gst_structure_has_name (vid_struct, "image/jpeg") &&
+ gst_structure_has_name (vf_struct, "image/jpeg")) {
+ self->main_format = UVC_H264_SRC_FORMAT_JPG;
+ self->secondary_format = UVC_H264_SRC_FORMAT_JPG;
+ type = ENCODED_ENCODED;
+ } else if (!gst_structure_has_name (vid_struct, "image/jpeg") &&
+ !gst_structure_has_name (vf_struct, "image/jpeg")) {
+ self->main_format = UVC_H264_SRC_FORMAT_RAW;
+ self->secondary_format = UVC_H264_SRC_FORMAT_RAW;
+ type = RAW_RAW;
+ } else {
+ goto error_remove;
+ }
+ } else if (vf_caps && vid_caps) {
+ guint32 smallest_frame_interval;
+
+ if (!_extract_caps_info (vf_struct, &self->secondary_width,
+ &self->secondary_height, &self->secondary_frame_interval))
+ goto error_remove;
+
+ if (jpg2raw == FALSE && gst_structure_has_name (vf_struct, "image/jpeg")) {
+ type = H264_JPG;
+ self->secondary_format = UVC_H264_SRC_FORMAT_JPG;
+ } else {
+ if (jpg2raw) {
+ type = H264_JPG2RAW;
+ self->secondary_format = UVC_H264_SRC_FORMAT_JPG;
+ } else {
+ type = H264_RAW;
+ self->secondary_format = UVC_H264_SRC_FORMAT_RAW;
+ }
+ }
+ smallest_frame_interval = MIN (self->main_frame_interval,
+ self->secondary_frame_interval);
+ /* Just to avoid a potential division by zero, set interval to 30 fps */
+ if (smallest_frame_interval == 0)
+ smallest_frame_interval = 333333;
+
+ /* Frame interval is in 100ns units */
+ src_caps = gst_caps_new_simple ("image/jpeg",
+ "width", G_TYPE_INT, self->secondary_width,
+ "height", G_TYPE_INT, self->secondary_height,
+ "framerate", GST_TYPE_FRACTION,
+ NSEC_PER_SEC / smallest_frame_interval, 100, NULL);
+ } else if (vf_caps || vid_caps) {
+ self->secondary_format = UVC_H264_SRC_FORMAT_NONE;
+ if (vid_struct && gst_structure_has_name (vid_struct, "video/x-h264")) {
+ type = ENCODED_NONE;
+ } else if (vid_struct && gst_structure_has_name (vid_struct, "image/jpeg")) {
+ type = ENCODED_NONE;
+ self->main_format = UVC_H264_SRC_FORMAT_JPG;
+ } else if (vf_struct && gst_structure_has_name (vf_struct, "image/jpeg")) {
+ type = NONE_ENCODED;
+ self->secondary_format = UVC_H264_SRC_FORMAT_JPG;
+ } else if (vid_struct) {
+ type = RAW_NONE;
+ self->main_format = UVC_H264_SRC_FORMAT_RAW;
+ } else if (vf_struct) {
+ type = NONE_RAW;
+ self->secondary_format = UVC_H264_SRC_FORMAT_RAW;
+ } else {
+ g_assert_not_reached ();
+ }
+ } else {
+ type = NONE_NONE;
+ self->main_format = UVC_H264_SRC_FORMAT_NONE;
+ self->secondary_format = UVC_H264_SRC_FORMAT_NONE;
+ }
+
+ switch (type) {
+ case NONE_NONE:
+ GST_DEBUG_OBJECT (self, "None+None");
+ vf_pad = gst_element_get_static_pad (self->v4l2_src, "src");
+ break;
+ case RAW_NONE:
+ GST_DEBUG_OBJECT (self, "Raw+None");
+ self->vid_colorspace = gst_element_factory_make (self->colorspace_name,
+ NULL);
+ if (!self->vid_colorspace ||
+ !gst_bin_add (GST_BIN (self), self->vid_colorspace))
+ goto error_remove;
+ gst_object_ref (self->vid_colorspace);
+ if (!gst_element_link (self->v4l2_src, self->vid_colorspace))
+ goto error_remove_all;
+ vid_pad = gst_element_get_static_pad (self->vid_colorspace, "src");
+ break;
+ case NONE_RAW:
+ GST_DEBUG_OBJECT (self, "None+Raw");
+ self->vf_colorspace = gst_element_factory_make (self->colorspace_name,
+ NULL);
+ if (!self->vf_colorspace ||
+ !gst_bin_add (GST_BIN (self), self->vf_colorspace))
+ goto error_remove;
+ gst_object_ref (self->vf_colorspace);
+ if (!gst_element_link (self->v4l2_src, self->vf_colorspace))
+ goto error_remove_all;
+ vf_pad = gst_element_get_static_pad (self->vf_colorspace, "src");
+ break;
+ case ENCODED_NONE:
+ GST_DEBUG_OBJECT (self, "Encoded+None");
+ vid_pad = gst_element_get_static_pad (self->v4l2_src, "src");
+ break;
+ case NONE_ENCODED:
+ GST_DEBUG_OBJECT (self, "None+Encoded");
+ vf_pad = gst_element_get_static_pad (self->v4l2_src, "src");
+ break;
+ case H264_JPG:
+ GST_DEBUG_OBJECT (self, "H264+JPG");
+ self->mjpg_demux = gst_element_factory_make ("uvch264_mjpgdemux", NULL);
+ if (!self->mjpg_demux || !gst_bin_add (GST_BIN (self), self->mjpg_demux))
+ goto error_remove;
+ gst_object_ref (self->mjpg_demux);
+ g_object_set (self->mjpg_demux, "device-fd", self->v4l2_fd,
+ "num-clock-samples", self->num_clock_samples, NULL);
+ if (!gst_element_link_filtered (self->v4l2_src, self->mjpg_demux,
+ src_caps))
+ goto error_remove_all;
+ vid_pad = gst_element_get_static_pad (self->mjpg_demux, "h264");
+ vf_pad = gst_element_get_static_pad (self->mjpg_demux, "jpeg");
+ break;
+ case H264_RAW:
+ GST_DEBUG_OBJECT (self, "H264+Raw");
+ self->mjpg_demux = gst_element_factory_make ("uvch264_mjpgdemux", NULL);
+ self->vf_colorspace = gst_element_factory_make (self->colorspace_name,
+ NULL);
+ if (!self->mjpg_demux || !self->vf_colorspace)
+ goto error_remove;
+ if (!gst_bin_add (GST_BIN (self), self->mjpg_demux))
+ goto error_remove;
+ gst_object_ref (self->mjpg_demux);
+ g_object_set (self->mjpg_demux, "device-fd", self->v4l2_fd,
+ "num-clock-samples", self->num_clock_samples, NULL);
+ if (!gst_bin_add (GST_BIN (self), self->vf_colorspace)) {
+ gst_object_unref (self->vf_colorspace);
+ self->vf_colorspace = NULL;
+ goto error_remove_all;
+ }
+ gst_object_ref (self->vf_colorspace);
+ if (!gst_element_link_filtered (self->v4l2_src, self->mjpg_demux,
+ src_caps))
+ goto error_remove_all;
+ if (!gst_element_link_pads (self->mjpg_demux, "yuy2",
+ self->vf_colorspace, "sink"))
+ goto error_remove_all;
+ vid_pad = gst_element_get_static_pad (self->mjpg_demux, "h264");
+ vf_pad = gst_element_get_static_pad (self->vf_colorspace, "src");
+ break;
+ case H264_JPG2RAW:
+ GST_DEBUG_OBJECT (self, "H264+Raw(jpegdec)");
+ self->mjpg_demux = gst_element_factory_make ("uvch264_mjpgdemux", NULL);
+ self->jpeg_dec = gst_element_factory_make (self->jpeg_decoder_name, NULL);
+ self->vf_colorspace = gst_element_factory_make (self->colorspace_name,
+ NULL);
+ if (!self->mjpg_demux || !self->jpeg_dec || !self->vf_colorspace)
+ goto error_remove;
+ if (!gst_bin_add (GST_BIN (self), self->mjpg_demux))
+ goto error_remove;
+ gst_object_ref (self->mjpg_demux);
+ g_object_set (self->mjpg_demux, "device-fd", self->v4l2_fd,
+ "num-clock-samples", self->num_clock_samples, NULL);
+ if (!gst_bin_add (GST_BIN (self), self->jpeg_dec)) {
+ gst_object_unref (self->jpeg_dec);
+ self->jpeg_dec = NULL;
+ gst_object_unref (self->vf_colorspace);
+ self->vf_colorspace = NULL;
+ goto error_remove_all;
+ }
+ gst_object_ref (self->jpeg_dec);
+ if (!gst_bin_add (GST_BIN (self), self->vf_colorspace)) {
+ gst_object_unref (self->vf_colorspace);
+ self->vf_colorspace = NULL;
+ goto error_remove_all;
+ }
+ gst_object_ref (self->vf_colorspace);
+ if (!gst_element_link_filtered (self->v4l2_src, self->mjpg_demux,
+ src_caps))
+ goto error_remove_all;
+ if (!gst_element_link_pads (self->mjpg_demux, "jpeg", self->jpeg_dec,
+ "sink"))
+ goto error_remove_all;
+ if (!gst_element_link (self->jpeg_dec, self->vf_colorspace))
+ goto error_remove_all;
+ vid_pad = gst_element_get_static_pad (self->mjpg_demux, "h264");
+ vf_pad = gst_element_get_static_pad (self->vf_colorspace, "src");
+ break;
+ case RAW_RAW:
+ {
+ GstElement *tee = NULL;
+
+ GST_DEBUG_OBJECT (self, "Raw+Raw");
+ tee = gst_element_factory_make ("tee", NULL);
+ if (!tee || !gst_bin_add (GST_BIN (self), tee)) {
+ if (tee)
+ gst_object_unref (tee);
+ goto error_remove;
+ }
+ self->vf_colorspace = gst_element_factory_make (self->colorspace_name,
+ NULL);
+ self->vid_colorspace = gst_element_factory_make (self->colorspace_name,
+ NULL);
+ if (!self->vf_colorspace || !self->vid_colorspace)
+ goto error_remove;
+ if (!gst_bin_add (GST_BIN (self), self->vf_colorspace))
+ goto error_remove;
+ gst_object_ref (self->vf_colorspace);
+ if (!gst_bin_add (GST_BIN (self), self->vid_colorspace)) {
+ gst_object_unref (self->vid_colorspace);
+ self->vid_colorspace = NULL;
+ goto error_remove_all;
+ }
+ gst_object_ref (self->vid_colorspace);
+ if (!gst_element_link (self->v4l2_src, tee))
+ goto error_remove_all;
+ if (!gst_element_link (tee, self->vf_colorspace))
+ goto error_remove_all;
+ if (!gst_element_link (tee, self->vid_colorspace))
+ goto error_remove_all;
+ vf_pad = gst_element_get_static_pad (self->vf_colorspace, "src");
+ vid_pad = gst_element_get_static_pad (self->vid_colorspace, "src");
+ }
+ break;
+ case ENCODED_ENCODED:
+ {
+ GstElement *tee = NULL;
+
+ GST_DEBUG_OBJECT (self, "Encoded+Encoded");
+ tee = gst_element_factory_make ("tee", NULL);
+ if (!tee || !gst_bin_add (GST_BIN (self), tee)) {
+ if (tee)
+ gst_object_unref (tee);
+ goto error_remove;
+ }
+ if (!gst_element_link (self->v4l2_src, tee))
+ goto error_remove_all;
+ vf_pad = gst_element_get_request_pad (tee, "src%d");
+ vid_pad = gst_element_get_request_pad (tee, "src%d");
+ }
+ break;
+ }
+
+ if (!gst_ghost_pad_set_target (GST_GHOST_PAD (self->vidsrc), vid_pad) ||
+ !gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad))
+ goto error_remove_all;
+ if (vid_pad)
+ gst_object_unref (vid_pad);
+ if (vf_pad)
+ gst_object_unref (vf_pad);
+ vid_pad = vf_pad = NULL;
+
+ if (vf_caps)
+ gst_caps_unref (vf_caps);
+ if (vid_caps)
+ gst_caps_unref (vid_caps);
+ if (src_caps)
+ gst_caps_unref (src_caps);
+ vf_caps = vid_caps = src_caps = NULL;
+
+ /* Sync children states, in sink to source order */
+ if (self->vid_colorspace &&
+ !gst_element_sync_state_with_parent (self->vid_colorspace))
+ goto error_remove_all;
+ if (self->vf_colorspace &&
+ !gst_element_sync_state_with_parent (self->vf_colorspace))
+ goto error_remove_all;
+ if (self->jpeg_dec && !gst_element_sync_state_with_parent (self->jpeg_dec))
+ goto error_remove_all;
+ if (self->mjpg_demux &&
+ !gst_element_sync_state_with_parent (self->mjpg_demux))
+ goto error_remove_all;
+ if (self->v4l2_src && !gst_element_sync_state_with_parent (self->v4l2_src))
+ goto error_remove_all;
+
+ /* Sync any remaining children states with bin's state */
+ iter = gst_bin_iterate_elements (GST_BIN (self));
+ iter_done = FALSE;
+ while (!iter_done) {
+ GstElement *item = NULL;
+
+ switch (gst_iterator_next (iter, (gpointer *) & item)) {
+ case GST_ITERATOR_OK:
+ if (!gst_element_sync_state_with_parent (item)) {
+ gst_object_unref (item);
+ gst_iterator_free (iter);
+ goto error_remove_all;
+ }
+ gst_object_unref (item);
+ break;
+ case GST_ITERATOR_RESYNC:
+ gst_iterator_resync (iter);
+ break;
+ case GST_ITERATOR_ERROR:
+ iter_done = TRUE;
+ break;
+ case GST_ITERATOR_DONE:
+ iter_done = TRUE;
+ break;
+ }
+ }
+ gst_iterator_free (iter);
+
+ self->reconfiguring = FALSE;
+ return TRUE;
+
+error_remove_all:
+ gst_uvc_h264_src_destroy_pipeline (self, FALSE);
+error_remove:
+ gst_element_set_state (self->v4l2_src, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN (self), self->v4l2_src);
+
+error:
+ if (self->v4l2_src)
+ gst_object_unref (self->v4l2_src);
+ self->v4l2_src = NULL;
+ self->v4l2_fd = -1;
+ self->h264_unit_id = 0;
+
+ if (self->mjpg_demux)
+ gst_object_unref (self->mjpg_demux);
+ self->mjpg_demux = NULL;
+ if (self->jpeg_dec)
+ gst_object_unref (self->jpeg_dec);
+ self->jpeg_dec = NULL;
+ if (self->vid_colorspace)
+ gst_object_unref (self->vid_colorspace);
+ self->vid_colorspace = NULL;
+ if (self->vf_colorspace)
+ gst_object_unref (self->vf_colorspace);
+ self->vf_colorspace = NULL;
+
+ if (src_caps)
+ gst_caps_unref (src_caps);
+
+ if (vf_caps)
+ gst_caps_unref (vf_caps);
+ if (vid_caps)
+ gst_caps_unref (vid_caps);
+
+ if (vid_pad)
+ gst_object_unref (vid_pad);
+ if (vf_pad)
+ gst_object_unref (vf_pad);
+
+ self->reconfiguring = FALSE;
+ return FALSE;
+}
+
+static GstCaps *
+gst_uvc_h264_src_getcaps (GstPad * pad)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (GST_OBJECT_PARENT (pad));
+ GstCaps *template = NULL;
+ GstCaps *result = NULL;
+
+ if (pad == self->vfsrc)
+ template = gst_static_pad_template_get_caps (&vfsrc_template);
+ else if (pad == self->vidsrc)
+ template = gst_static_pad_template_get_caps (&vidsrc_template);
+ else
+ template = gst_caps_new_empty ();
+
+ if (self->v4l2_src) {
+ GstPad *v4l_pad = gst_element_get_static_pad (self->v4l2_src, "src");
+ GstCaps *v4l_caps = gst_pad_get_caps (v4l_pad);
+ GstCaps *new_caps = gst_uvc_h264_src_transform_caps (self, v4l_caps);
+
+ result = gst_caps_intersect (new_caps, template);
+ gst_object_unref (v4l_pad);
+ gst_caps_unref (v4l_caps);
+ gst_caps_unref (new_caps);
+ gst_caps_unref (template);
+ } else {
+ result = template;
+ }
+
+ return result;
+}
+
+static gboolean
+gst_uvc_h264_src_set_mode (GstBaseCameraSrc * bcamsrc, GstCameraBinMode mode)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (bcamsrc);
+
+ GST_DEBUG_OBJECT (self, "set mode to %d", mode);
+
+ return (mode == MODE_VIDEO);
+}
+
+static gboolean
+gst_uvc_h264_src_start_capture (GstBaseCameraSrc * camerasrc)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (camerasrc);
+ gboolean ret = TRUE;
+
+ GST_DEBUG_OBJECT (self, "start capture");
+
+ if (!self->started) {
+ self->started = TRUE;
+ if (GST_STATE (self) >= GST_STATE_READY) {
+ ret = gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self));
+ if (!ret) {
+ GST_DEBUG_OBJECT (self, "Could not start capture");
+ self->started = FALSE;
+ gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self));
+ }
+ }
+ }
+
+ return ret;
+}
+
+static void
+gst_uvc_h264_src_stop_capture (GstBaseCameraSrc * camerasrc)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (camerasrc);
+
+ GST_DEBUG_OBJECT (self, "stop capture");
+
+ if (self->started) {
+ self->started = FALSE;
+ if (GST_STATE (self) >= GST_STATE_READY)
+ gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self));
+ gst_base_camera_src_finish_capture (camerasrc);
+ }
+}
+
+static void
+gst_uvc_h264_src_pad_linking_cb (GstPad * pad,
+ GstPad * peer, gpointer user_data)
+{
+ GstUvcH264Src *self = GST_UVC_H264_SRC (user_data);
+ gchar *pad_name = gst_pad_get_name (pad);
+
+ GST_DEBUG_OBJECT (self, "Pad %s was (un)linked. Renegotiating", pad_name);
+ g_free (pad_name);
+ if (GST_STATE (self) >= GST_STATE_READY)
+ gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self));
+}
+
+
+static GstStateChangeReturn
+gst_uvc_h264_src_change_state (GstElement * element, GstStateChange trans)
+{
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+ GstUvcH264Src *self = GST_UVC_H264_SRC (element);
+
+ switch (trans) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (!ensure_v4l2src (self)) {
+ ret = GST_STATE_CHANGE_FAILURE;
+ goto end;
+ }
+ gst_segment_init (&self->segment, GST_FORMAT_UNDEFINED);
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ if (!self->v4l2_src)
+ gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self));
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, trans);
+
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto end;
+
+ switch (trans) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ self->vid_newseg = FALSE;
+ self->vf_newseg = FALSE;
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ gst_uvc_h264_src_destroy_pipeline (self, TRUE);
+ break;
+ default:
+ break;
+ }
+
+
+end:
+ return ret;
+}
diff --git a/sys/uvch264/gstuvch264_src.h b/sys/uvch264/gstuvch264_src.h
new file mode 100644
index 000000000..3eb846bc0
--- /dev/null
+++ b/sys/uvch264/gstuvch264_src.h
@@ -0,0 +1,166 @@
+/*
+ * GStreamer
+ *
+ * Copyright (C) 2012 Cisco Systems, Inc.
+ * Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifndef __GST_UVC_H264_SRC_H__
+#define __GST_UVC_H264_SRC_H__
+
+#ifdef HAVE_CONFIG_H
+# include <config.h>
+#endif
+
+#include <gst/gst.h>
+#include <gst/basecamerabinsrc/gstbasecamerasrc.h>
+#if defined (HAVE_GUDEV) && defined (HAVE_LIBUSB)
+#include <libusb.h>
+#endif
+
+#include "uvc_h264.h"
+
+G_BEGIN_DECLS
+#define GST_TYPE_UVC_H264_SRC \
+ (gst_uvc_h264_src_get_type())
+#define GST_UVC_H264_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_UVC_H264_SRC, GstUvcH264Src))
+#define GST_UVC_H264_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_UVC_H264_SRC, GstUvcH264SrcClass))
+#define GST_IS_UVC_H264_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_UVC_H264_SRC))
+#define GST_IS_UVC_H264_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_UVC_H264_SRC))
+ GType gst_uvc_h264_src_get_type (void);
+
+typedef struct _GstUvcH264Src GstUvcH264Src;
+typedef struct _GstUvcH264SrcClass GstUvcH264SrcClass;
+
+enum GstVideoRecordingStatus {
+ GST_VIDEO_RECORDING_STATUS_DONE,
+ GST_VIDEO_RECORDING_STATUS_STARTING,
+ GST_VIDEO_RECORDING_STATUS_RUNNING,
+ GST_VIDEO_RECORDING_STATUS_FINISHING
+};
+
+enum {
+ QP_I_FRAME = 0,
+ QP_P_FRAME,
+ QP_B_FRAME,
+ QP_FRAMES
+};
+
+typedef enum {
+ UVC_H264_SRC_FORMAT_NONE,
+ UVC_H264_SRC_FORMAT_JPG,
+ UVC_H264_SRC_FORMAT_H264,
+ UVC_H264_SRC_FORMAT_RAW
+} GstUvcH264SrcFormat;
+
+/**
+ * GstUcH264Src:
+ *
+ */
+struct _GstUvcH264Src
+{
+ GstBaseCameraSrc parent;
+
+ GstPad *vfsrc;
+ GstPad *imgsrc;
+ GstPad *vidsrc;
+
+ /* source elements */
+ GstElement *v4l2_src;
+ GstElement *mjpg_demux;
+ GstElement *jpeg_dec;
+ GstElement *vid_colorspace;
+ GstElement *vf_colorspace;
+
+ GstUvcH264SrcFormat main_format;
+ guint16 main_width;
+ guint16 main_height;
+ guint32 main_frame_interval;
+ UvcH264StreamFormat main_stream_format;
+ guint16 main_profile;
+ GstUvcH264SrcFormat secondary_format;
+ guint16 secondary_width;
+ guint16 secondary_height;
+ guint32 secondary_frame_interval;
+
+ int v4l2_fd;
+ guint8 h264_unit_id;
+#if defined (HAVE_GUDEV) && defined (HAVE_LIBUSB)
+ libusb_context *usb_ctx;
+#endif
+
+ GstPadEventFunction srcpad_event_func;
+ GstEvent *key_unit_event;
+ GstSegment segment;
+
+ gboolean started;
+
+ /* When restarting the source */
+ gboolean reconfiguring;
+ gboolean vid_newseg;
+ gboolean vf_newseg;
+
+ gchar *colorspace_name;
+ gchar *jpeg_decoder_name;
+ int num_clock_samples;
+
+ /* v4l2src proxied properties */
+ guint32 num_buffers;
+ gchar *device;
+
+ /* Static controls */
+ guint32 initial_bitrate;
+ guint16 slice_units;
+ UvcH264SliceMode slice_mode;
+ guint16 iframe_period;
+ UvcH264UsageType usage_type;
+ UvcH264Entropy entropy;
+ gboolean enable_sei;
+ guint8 num_reorder_frames;
+ gboolean preview_flipped;
+ guint16 leaky_bucket_size;
+
+ /* Dynamic controls */
+ UvcH264RateControl rate_control;
+ gboolean fixed_framerate;
+ guint8 level_idc;
+ guint32 peak_bitrate;
+ guint32 average_bitrate;
+ gint8 min_qp[QP_FRAMES];
+ gint8 max_qp[QP_FRAMES];
+ guint8 ltr_buffer_size;
+ guint8 ltr_encoder_control;
+};
+
+
+/**
+ * GstUvcH264SrcClass:
+ *
+ */
+struct _GstUvcH264SrcClass
+{
+ GstBaseCameraSrcClass parent;
+};
+
+
+#endif /* __GST_UVC_H264_SRC_H__ */
diff --git a/sys/uvch264/uvc_h264.c b/sys/uvch264/uvc_h264.c
new file mode 100644
index 000000000..1c26ae437
--- /dev/null
+++ b/sys/uvch264/uvc_h264.c
@@ -0,0 +1,122 @@
+/*
+ * GStreamer
+ *
+ * Copyright (C) 2012 Cisco Systems, Inc.
+ * Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifdef HAVE_CONFIG_H
+# include <config.h>
+#endif
+
+#include "uvc_h264.h"
+
+GType
+uvc_h264_slicemode_get_type (void)
+{
+ static GType type = 0;
+
+ static const GEnumValue types[] = {
+ {UVC_H264_SLICEMODE_IGNORED, "Ignored", "ignored"},
+ {UVC_H264_SLICEMODE_BITSPERSLICE, "Bits per slice", "bits/slice"},
+ {UVC_H264_SLICEMODE_MBSPERSLICE, "MBs per Slice", "MBs/slice"},
+ {UVC_H264_SLICEMODE_SLICEPERFRAME, "Slice Per Frame", "slice/frame"},
+ {0, NULL, NULL}
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("UvcH264SliceMode", types);
+ }
+ return type;
+}
+
+GType
+uvc_h264_usagetype_get_type (void)
+{
+ static GType type = 0;
+
+ static const GEnumValue types[] = {
+ {UVC_H264_USAGETYPE_REALTIME, "Realtime (video conferencing)", "realtime"},
+ {UVC_H264_USAGETYPE_BROADCAST, "Broadcast", "broadcast"},
+ {UVC_H264_USAGETYPE_STORAGE, "Storage", "storage"},
+ {UVC_H264_USAGETYPE_UCCONFIG_0, "UCConfig 0", "ucconfig0"},
+ {UVC_H264_USAGETYPE_UCCONFIG_1, "UCConfig 1", "ucconfig1"},
+ {UVC_H264_USAGETYPE_UCCONFIG_2Q, "UCConfig 2Q", "ucconfig2q"},
+ {UVC_H264_USAGETYPE_UCCONFIG_2S, "UCConfig 2S", "ucconfig2s"},
+ {UVC_H264_USAGETYPE_UCCONFIG_3, "UCConfig 3", "ucconfig3"},
+ {0, NULL, NULL}
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("UvcH264UsageType", types);
+ }
+ return type;
+}
+
+GType
+uvc_h264_ratecontrol_get_type (void)
+{
+ static GType type = 0;
+
+ static const GEnumValue types[] = {
+ {UVC_H264_RATECONTROL_CBR, "Constant bit rate", "cbr"},
+ {UVC_H264_RATECONTROL_VBR, "Variable bit rate", "vbr"},
+ {UVC_H264_RATECONTROL_CONST_QP, "Constant QP", "qp"},
+ {0, NULL, NULL}
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("UvcH264RateControl", types);
+ }
+ return type;
+}
+
+GType
+uvc_h264_streamformat_get_type (void)
+{
+ static GType type = 0;
+
+ static const GEnumValue types[] = {
+ {UVC_H264_STREAMFORMAT_ANNEXB, "Byte stream format (Annex B)", "byte"},
+ {UVC_H264_STREAMFORMAT_NAL, "NAL stream format", "nal"},
+ {0, NULL, NULL}
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("UvcH264StreamFormat", types);
+ }
+ return type;
+}
+
+GType
+uvc_h264_entropy_get_type (void)
+{
+ static GType type = 0;
+
+ static const GEnumValue types[] = {
+ {UVC_H264_ENTROPY_CAVLC, "CAVLC", "cavlc"},
+ {UVC_H264_ENTROPY_CABAC, "CABAC", "cabac"},
+ {0, NULL, NULL}
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("UvcH264Entropy", types);
+ }
+ return type;
+}
diff --git a/sys/uvch264/uvc_h264.h b/sys/uvch264/uvc_h264.h
new file mode 100644
index 000000000..d27104ecf
--- /dev/null
+++ b/sys/uvch264/uvc_h264.h
@@ -0,0 +1,335 @@
+/*
+ * uvc_h264.h - Definitions of the UVC H.264 Payload specification Version 1.0
+ *
+ * Copyright (c) 2011 USB Implementers Forum, Inc.
+ *
+ * Modification into glib-like header by :
+ * Copyright (C) 2012 Cisco Systems, Inc.
+ * Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+#ifndef _UVC_H264_H_
+#define _UVC_H264_H_
+
+/* Header File for the little-endian platform */
+
+#include <glib.h>
+#include <glib-object.h>
+
+/* bmHints defines */
+
+#define UVC_H264_BMHINTS_RESOLUTION (0x0001)
+#define UVC_H264_BMHINTS_PROFILE (0x0002)
+#define UVC_H264_BMHINTS_RATECONTROL (0x0004)
+#define UVC_H264_BMHINTS_USAGE (0x0008)
+#define UVC_H264_BMHINTS_SLICEMODE (0x0010)
+#define UVC_H264_BMHINTS_SLICEUNITS (0x0020)
+#define UVC_H264_BMHINTS_MVCVIEW (0x0040)
+#define UVC_H264_BMHINTS_TEMPORAL (0x0080)
+#define UVC_H264_BMHINTS_SNR (0x0100)
+#define UVC_H264_BMHINTS_SPATIAL (0x0200)
+#define UVC_H264_BMHINTS_SPATIAL_RATIO (0x0400)
+#define UVC_H264_BMHINTS_FRAME_INTERVAL (0x0800)
+#define UVC_H264_BMHINTS_LEAKY_BKT_SIZE (0x1000)
+#define UVC_H264_BMHINTS_BITRATE (0x2000)
+#define UVC_H264_BMHINTS_ENTROPY (0x4000)
+#define UVC_H264_BMHINTS_IFRAMEPERIOD (0x8000)
+
+
+#define UVC_H264_QP_STEPS_I_FRAME_TYPE (0x01)
+#define UVC_H264_QP_STEPS_P_FRAME_TYPE (0x02)
+#define UVC_H264_QP_STEPS_B_FRAME_TYPE (0x04)
+#define UVC_H264_QP_STEPS_ALL_FRAME_TYPES (UVC_H264_QP_STEPS_I_FRAME_TYPE | \
+ UVC_H264_QP_STEPS_P_FRAME_TYPE | UVC_H264_QP_STEPS_B_FRAME_TYPE)
+
+/* wSliceMode defines */
+
+typedef enum
+{
+ UVC_H264_SLICEMODE_IGNORED = 0x0000,
+ UVC_H264_SLICEMODE_BITSPERSLICE = 0x0001,
+ UVC_H264_SLICEMODE_MBSPERSLICE = 0x0002,
+ UVC_H264_SLICEMODE_SLICEPERFRAME = 0x0003
+} UvcH264SliceMode;
+
+#define UVC_H264_SLICEMODE_TYPE (uvc_h264_slicemode_get_type())
+
+GType uvc_h264_slicemode_get_type (void);
+
+/* bUsageType defines */
+
+typedef enum {
+ UVC_H264_USAGETYPE_REALTIME = 0x01,
+ UVC_H264_USAGETYPE_BROADCAST = 0x02,
+ UVC_H264_USAGETYPE_STORAGE = 0x03,
+ UVC_H264_USAGETYPE_UCCONFIG_0 = 0x04,
+ UVC_H264_USAGETYPE_UCCONFIG_1 = 0x05,
+ UVC_H264_USAGETYPE_UCCONFIG_2Q = 0x06,
+ UVC_H264_USAGETYPE_UCCONFIG_2S = 0x07,
+ UVC_H264_USAGETYPE_UCCONFIG_3 = 0x08,
+} UvcH264UsageType;
+
+#define UVC_H264_USAGETYPE_TYPE (uvc_h264_usagetype_get_type())
+
+GType uvc_h264_usagetype_get_type (void);
+
+/* bRateControlMode defines */
+
+typedef enum {
+ UVC_H264_RATECONTROL_CBR = 0x01,
+ UVC_H264_RATECONTROL_VBR = 0x02,
+ UVC_H264_RATECONTROL_CONST_QP = 0x03,
+} UvcH264RateControl;
+
+#define UVC_H264_RATECONTROL_FIXED_FRM_FLG (0x10)
+
+#define UVC_H264_RATECONTROL_TYPE (uvc_h264_ratecontrol_get_type())
+
+GType uvc_h264_ratecontrol_get_type (void);
+
+/* bStreamFormat defines */
+
+typedef enum {
+ UVC_H264_STREAMFORMAT_ANNEXB = 0x00,
+ UVC_H264_STREAMFORMAT_NAL = 0x01,
+} UvcH264StreamFormat;
+
+#define UVC_H264_STREAMFORMAT_TYPE (uvc_h264_streamformat_get_type())
+
+GType uvc_h264_streamformat_get_type (void);
+
+/* bEntropyCABAC defines */
+
+typedef enum {
+ UVC_H264_ENTROPY_CAVLC = 0x00,
+ UVC_H264_ENTROPY_CABAC = 0x01,
+} UvcH264Entropy;
+
+#define UVC_H264_ENTROPY_TYPE (uvc_h264_entropy_get_type())
+
+GType uvc_h264_entropy_get_type (void);
+
+/* bProfile defines */
+#define UVC_H264_PROFILE_CONSTRAINED_BASELINE 0x4240
+#define UVC_H264_PROFILE_BASELINE 0x4200
+#define UVC_H264_PROFILE_MAIN 0x4D00
+#define UVC_H264_PROFILE_HIGH 0x6400
+
+/* bTimingstamp defines */
+
+#define UVC_H264_TIMESTAMP_SEI_DISABLE (0x00)
+#define UVC_H264_TIMESTAMP_SEI_ENABLE (0x01)
+
+/* bPreviewFlipped defines */
+
+#define UVC_H264_PREFLIPPED_DISABLE (0x00)
+#define UVC_H264_PREFLIPPED_HORIZONTAL (0x01)
+
+/* wPicType defines */
+#define UVC_H264_PICTYPE_I_FRAME (0x00)
+#define UVC_H264_PICTYPE_IDR (0x01)
+#define UVC_H264_PICTYPE_IDR_WITH_PPS_SPS (0x02)
+
+
+/* wLayerID Macro */
+
+/* wLayerID
+ |------------+------------+------------+----------------+------------|
+ | Reserved | StreamID | QualityID | DependencyID | TemporalID |
+ | (3 bits) | (3 bits) | (3 bits) | (4 bits) | (3 bits) |
+ |------------+------------+------------+----------------+------------|
+ |15 13|12 10|9 7|6 3|2 0|
+ |------------+------------+------------+----------------+------------|
+*/
+
+#define xLayerID(stream_id, quality_id, dependency_id, temporal_id) \
+ ((((stream_id) & 7) << 10) | \
+ (((quality_id) & 7) << 7) | \
+ (((dependency_id) & 15) << 3) | \
+ ((temporal_id) & 7))
+
+/* id extraction from wLayerID */
+
+#define xStream_id(layer_id) (((layer_id) >> 10) & 7)
+#define xQuality_id(layer_id) (((layer_id) >> 7) & 7)
+#define xDependency_id(layer_id) (((layer_id) >> 3) & 15)
+#define xTemporal_id(layer_id) ((layer_id)&7)
+
+/* UVC H.264 control selectors */
+
+typedef enum _uvcx_control_selector_t
+{
+ UVCX_VIDEO_CONFIG_PROBE = 0x01,
+ UVCX_VIDEO_CONFIG_COMMIT = 0x02,
+ UVCX_RATE_CONTROL_MODE = 0x03,
+ UVCX_TEMPORAL_SCALE_MODE = 0x04,
+ UVCX_SPATIAL_SCALE_MODE = 0x05,
+ UVCX_SNR_SCALE_MODE = 0x06,
+ UVCX_LTR_BUFFER_SIZE_CONTROL = 0x07,
+ UVCX_LTR_PICTURE_CONTROL = 0x08,
+ UVCX_PICTURE_TYPE_CONTROL = 0x09,
+ UVCX_VERSION = 0x0A,
+ UVCX_ENCODER_RESET = 0x0B,
+ UVCX_FRAMERATE_CONFIG = 0x0C,
+ UVCX_VIDEO_ADVANCE_CONFIG = 0x0D,
+ UVCX_BITRATE_LAYERS = 0x0E,
+ UVCX_QP_STEPS_LAYERS = 0x0F,
+} uvcx_control_selector_t;
+
+
+typedef struct _uvcx_video_config_probe_commit_t
+{
+ guint32 dwFrameInterval;
+ guint32 dwBitRate;
+ guint16 bmHints;
+ guint16 wConfigurationIndex;
+ guint16 wWidth;
+ guint16 wHeight;
+ guint16 wSliceUnits;
+ guint16 wSliceMode;
+ guint16 wProfile;
+ guint16 wIFramePeriod;
+ guint16 wEstimatedVideoDelay;
+ guint16 wEstimatedMaxConfigDelay;
+ guint8 bUsageType;
+ guint8 bRateControlMode;
+ guint8 bTemporalScaleMode;
+ guint8 bSpatialScaleMode;
+ guint8 bSNRScaleMode;
+ guint8 bStreamMuxOption;
+ guint8 bStreamFormat;
+ guint8 bEntropyCABAC;
+ guint8 bTimestamp;
+ guint8 bNumOfReorderFrames;
+ guint8 bPreviewFlipped;
+ guint8 bView;
+ guint8 bReserved1;
+ guint8 bReserved2;
+ guint8 bStreamID;
+ guint8 bSpatialLayerRatio;
+ guint16 wLeakyBucketSize;
+} __attribute__((packed)) uvcx_video_config_probe_commit_t;
+
+
+typedef struct _uvcx_rate_control_mode_t
+{
+ guint16 wLayerID;
+ guint8 bRateControlMode;
+} __attribute__((packed)) uvcx_rate_control_mode_t;
+
+
+typedef struct _uvcx_temporal_scale_mode_t
+{
+ guint16 wLayerID;
+ guint8 bTemporalScaleMode;
+} __attribute__((packed)) uvcx_temporal_scale_mode_t;
+
+
+typedef struct _uvcx_spatial_scale_mode_t
+{
+ guint16 wLayerID;
+ guint8 bSpatialScaleMode;
+} __attribute__((packed)) uvcx_spatial_scale_mode_t;
+
+
+typedef struct _uvcx_snr_scale_mode_t
+{
+ guint16 wLayerID;
+ guint8 bSNRScaleMode;
+ guint8 bMGSSublayerMode;
+} __attribute__((packed)) uvcx_snr_scale_mode_t;
+
+
+typedef struct _uvcx_ltr_buffer_size_control_t
+{
+ guint16 wLayerID;
+ guint8 bLTRBufferSize;
+ guint8 bLTREncoderControl;
+} __attribute__((packed)) uvcx_ltr_buffer_size_control_t;
+
+typedef struct _uvcx_ltr_picture_control
+{
+ guint16 wLayerID;
+ guint8 bPutAtPositionInLTRBuffer;
+ guint8 bEncodeUsingLTR;
+} __attribute__((packed)) uvcx_ltr_picture_control;
+
+
+typedef struct _uvcx_picture_type_control_t
+{
+ guint16 wLayerID;
+ guint16 wPicType;
+} __attribute__((packed)) uvcx_picture_type_control_t;
+
+
+typedef struct _uvcx_version_t
+{
+ guint16 wVersion;
+} __attribute__((packed)) uvcx_version_t;
+
+
+typedef struct _uvcx_encoder_reset
+{
+ guint16 wLayerID;
+} __attribute__((packed)) uvcx_encoder_reset;
+
+
+typedef struct _uvcx_framerate_config_t
+{
+ guint16 wLayerID;
+ guint32 dwFrameInterval;
+} __attribute__((packed)) uvcx_framerate_config_t;
+
+
+typedef struct _uvcx_video_advance_config_t
+{
+ guint16 wLayerID;
+ guint32 dwMb_max;
+ guint8 blevel_idc;
+ guint8 bReserved;
+} __attribute__((packed)) uvcx_video_advance_config_t;
+
+
+typedef struct _uvcx_bitrate_layers_t
+{
+ guint16 wLayerID;
+ guint32 dwPeakBitrate;
+ guint32 dwAverageBitrate;
+} __attribute__((packed)) uvcx_bitrate_layers_t;
+
+
+typedef struct _uvcx_qp_steps_layers_t
+{
+ guint16 wLayerID;
+ guint8 bFrameType;
+ guint8 bMinQp;
+ guint8 bMaxQp;
+} __attribute__((packed)) uvcx_qp_steps_layers_t;
+
+
+#ifdef _WIN32
+// GUID of the UVC H.264 extension unit: {A29E7641-DE04-47E3-8B2B-F4341AFF003B}
+DEFINE_GUID(GUID_UVCX_H264_XU, 0xA29E7641, 0xDE04, 0x47E3, 0x8B, 0x2B, 0xF4, 0x34, 0x1A, 0xFF, 0x00, 0x3B);
+#else
+#define GUID_UVCX_H264_XU \
+ {0x41, 0x76, 0x9e, 0xa2, 0x04, 0xde, 0xe3, 0x47, 0x8b, 0x2b, 0xF4, 0x34, 0x1A, 0xFF, 0x00, 0x3B}
+#endif
+
+#endif /*_UVC_H264_H_*/