diff options
author | Youness Alaoui <youness.alaoui@collabora.co.uk> | 2012-09-10 16:09:26 -0400 |
---|---|---|
committer | Olivier Crête <olivier.crete@collabora.com> | 2012-09-10 16:09:26 -0400 |
commit | 1ba24e1306dd1623bae3400e12e073c9f6fc8d51 (patch) | |
tree | fe2df82a40468ac40fe25a58fc7479a7e2e3a3d4 | |
parent | 1ef529601b0a9dfd9d278e37dbe141f21b9b8525 (diff) | |
download | gstreamer-plugins-bad-1ba24e1306dd1623bae3400e12e073c9f6fc8d51.tar.gz |
UVC H264 plugin
28 files changed, 6921 insertions, 6 deletions
diff --git a/configure.ac b/configure.ac index 47b93d99c..a468404a2 100644 --- a/configure.ac +++ b/configure.ac @@ -325,7 +325,7 @@ GST_PLUGINS_NONPORTED=" aiff \ gsettings jasper ladspa \ musepack musicbrainz nas neon ofa openal rsvg sdl sndfile spandsp timidity \ directsound directdraw direct3d9 acm wininet \ - wildmidi xvid lv2 teletextdec sndio" + wildmidi xvid lv2 teletextdec sndio uvch264" AC_SUBST(GST_PLUGINS_NONPORTED) dnl these are all the gst plug-ins, compilable without additional libs @@ -689,6 +689,27 @@ AG_GST_CHECK_FEATURE(VCD, [Video CD], vcdsrc, [ AC_CHECK_HEADER(linux/cdrom.h, HAVE_VCD="yes", HAVE_VCD="no") ]) + +dnl *** UVC H264 *** +translit(dnm, m, l) AM_CONDITIONAL(USE_UVCH264, true) +AG_GST_CHECK_FEATURE(UVCH264, [UVC H264], uvch264, [ + AC_CHECK_HEADER(linux/uvcvideo.h, HAVE_UVCH264=yes, HAVE_UVCH264=no) + AG_GST_PKG_CHECK_MODULES(GST_VIDEO, gstreamer-video-0.10 >= 0.10.36) + PKG_CHECK_MODULES(G_UDEV, gudev-1.0 , [ + AC_DEFINE([HAVE_GUDEV], 1, [Define if gudev is installed]) + HAVE_GUDEV="yes" ], + [HAVE_GUDEV="no"]) + PKG_CHECK_MODULES(LIBUSB, libusb-1.0 , [ + AC_DEFINE([HAVE_LIBUSB], 1, [Define if libusb 1.x is installed]) + HAVE_LIBUSB="yes" ], + [HAVE_LIBUSB="no"]) +]) +AC_SUBST(LIBUDEV_CFLAGS) +AC_SUBST(LIBUDEV_LIBS) +AC_SUBST(LIBUSB_CFLAGS) +AC_SUBST(LIBUSB_LIBS) + + dnl *** ext plug-ins *** dnl keep this list sorted alphabetically ! @@ -1905,6 +1926,7 @@ AM_CONDITIONAL(USE_VP8, false) AM_CONDITIONAL(USE_RTMP, false) AM_CONDITIONAL(USE_TELETEXTDEC, false) AM_CONDITIONAL(USE_SNDIO, false) +AM_CONDITIONAL(USE_UVCH264, false) fi dnl of EXT plugins @@ -2079,6 +2101,7 @@ sys/linsys/Makefile sys/osxvideo/Makefile sys/qtwrapper/Makefile sys/shm/Makefile +sys/uvch264/Makefile sys/vcd/Makefile sys/vdpau/Makefile sys/vdpau/gstvdp/Makefile @@ -2097,6 +2120,7 @@ tests/examples/directfb/Makefile tests/examples/mxf/Makefile tests/examples/scaletempo/Makefile tests/examples/opencv/Makefile +tests/examples/uvch264/Makefile tests/icles/Makefile ext/voamrwbenc/Makefile ext/voaacenc/Makefile diff --git a/sys/Makefile.am b/sys/Makefile.am index d1a29b344..d79d22325 100644 --- a/sys/Makefile.am +++ b/sys/Makefile.am @@ -130,9 +130,15 @@ else WINSCREENCAP_DIR= endif -SUBDIRS = $(ACM_DIR) $(APPLE_MEDIA_DIR) $(AVC_DIR) $(D3DVIDEOSINK_DIR) $(DECKLINK_DIR) $(DIRECTDRAW_DIR) $(DIRECTSOUND_DIR) $(DIRECTSHOW_DIR) $(DVB_DIR) $(FBDEV_DIR) $(LINSYS_DIR) $(OSX_VIDEO_DIR) $(PVR_DIR) $(QT_DIR) $(SHM_DIR) $(VCD_DIR) $(VDPAU_DIR) $(WININET_DIR) $(WINSCREENCAP_DIR) +if USE_UVCH264 +UVCH264_DIR=uvch264 +else +UVCH264_DIR= +endif + +SUBDIRS = $(ACM_DIR) $(APPLE_MEDIA_DIR) $(AVC_DIR) $(D3DVIDEOSINK_DIR) $(DECKLINK_DIR) $(DIRECTDRAW_DIR) $(DIRECTSOUND_DIR) $(DIRECTSHOW_DIR) $(DVB_DIR) $(FBDEV_DIR) $(LINSYS_DIR) $(OSX_VIDEO_DIR) $(PVR_DIR) $(QT_DIR) $(SHM_DIR) $(UVCH264_DIR) $(VCD_DIR) $(VDPAU_DIR) $(WININET_DIR) $(WINSCREENCAP_DIR) DIST_SUBDIRS = acmenc acmmp3dec applemedia avc d3dvideosink decklink directdraw directsound dvb linsys fbdev dshowdecwrapper dshowsrcwrapper dshowvideosink \ - osxvideo pvr2d qtwrapper shm vcd vdpau wasapi wininet winks winscreencap + osxvideo pvr2d qtwrapper shm uvch264 vcd vdpau wasapi wininet winks winscreencap include $(top_srcdir)/common/parallel-subdirs.mak diff --git a/sys/uvch264/Makefile.am b/sys/uvch264/Makefile.am new file mode 100644 index 000000000..8ecff113f --- /dev/null +++ b/sys/uvch264/Makefile.am @@ -0,0 +1,48 @@ +glib_gen_prefix = __gst_uvc_h264 +glib_gen_basename = gstuvch264 + +include $(top_srcdir)/common/gst-glib-gen.mak + +built_sources = gstuvch264-marshal.c +built_headers = gstuvch264-marshal.h + +BUILT_SOURCES = $(built_sources) $(built_headers) + +CLEANFILES = $(BUILT_SOURCES) + +EXTRA_DIST = gstuvch264-marshal.list + + +plugin_LTLIBRARIES = libgstuvch264.la + +libgstuvch264_la_SOURCES = gstuvch264.c \ + gstuvch264_mjpgdemux.c \ + gstuvch264_src.c \ + uvc_h264.c + +nodist_libgstuvch264_la_SOURCES = $(built_sources) + +libgstuvch264_la_CFLAGS = $(GST_PLUGINS_BAD_CFLAGS) \ + $(GST_PLUGINS_BASE_CFLAGS) \ + $(GST_BASE_CFLAGS) \ + $(GST_VIDEO_CFLAGS) \ + $(GST_CFLAGS) \ + $(G_UDEV_CFLAGS) \ + $(LIBUSB_CFLAGS) \ + -DGST_USE_UNSTABLE_API + +libgstuvch264_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) +libgstuvch264_la_LIBTOOLFLAGS = --tag=disable-static + +libgstuvch264_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \ + $(GST_BASE_LIBS) \ + $(GST_PLUGINS_BASE_LIBS) \ + $(GST_VIDEO_LIBS) \ + $(GST_LIBS) \ + $(G_UDEV_LIBS) \ + $(LIBUSB_LIBS) \ + $(top_builddir)/gst-libs/gst/basecamerabinsrc/libgstbasecamerabinsrc-$(GST_MAJORMINOR).la + +noinst_HEADERS = gstuvch264_mjpgdemux.h \ + gstuvch264_src.h \ + uvc_h264.h diff --git a/sys/uvch264/gstuvch264-marshal.list b/sys/uvch264/gstuvch264-marshal.list new file mode 100644 index 000000000..a9ec0dd26 --- /dev/null +++ b/sys/uvch264/gstuvch264-marshal.list @@ -0,0 +1,3 @@ +BOOLEAN:STRING,POINTER,POINTER,POINTER +BOOLEAN:STRING,POINTER,POINTER +BOOLEAN:STRING,POINTER diff --git a/sys/uvch264/gstuvch264.c b/sys/uvch264/gstuvch264.c new file mode 100644 index 000000000..77ad73dc4 --- /dev/null +++ b/sys/uvch264/gstuvch264.c @@ -0,0 +1,50 @@ +/* GStreamer + * + * uvch264: a plugin for handling UVC compliant H264 encoding cameras + * + * Copyright (C) 2012 Cisco Systems, Inc. + * Author: Youness Alaoui <youness.alaoui@collabora.co.uk> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include <config.h> +#endif + +#include <gst/gst.h> +#include "gstuvch264_mjpgdemux.h" +#include "gstuvch264_src.h" + +static gboolean +plugin_init (GstPlugin * plugin) +{ + if (!gst_element_register (plugin, "uvch264_mjpgdemux", GST_RANK_NONE, + GST_TYPE_UVC_H264_MJPG_DEMUX)) + return FALSE; + + if (!gst_element_register (plugin, "uvch264_src", GST_RANK_NONE, + GST_TYPE_UVC_H264_SRC)) + return FALSE; + + return TRUE; +} + +GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, + GST_VERSION_MINOR, + "uvch264", + "UVC compliant H264 encoding cameras plugin", + plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN) diff --git a/sys/uvch264/gstuvch264_mjpgdemux.c b/sys/uvch264/gstuvch264_mjpgdemux.c new file mode 100644 index 000000000..4bc689981 --- /dev/null +++ b/sys/uvch264/gstuvch264_mjpgdemux.c @@ -0,0 +1,723 @@ +/* GStreamer + * + * uvch264_mjpg_demux: a demuxer for muxed stream in UVC H264 compliant MJPG + * + * Copyright (C) 2012 Cisco Systems, Inc. + * Author: Youness Alaoui <youness.alaoui@collabora.co.uk> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +/** + * SECTION:element-uvch264-mjpgdemux + * @short_description: UVC H264 compliant MJPG demuxer + * + * Parses a MJPG stream from a UVC H264 compliant encoding camera and extracts + * each muxed stream into separate pads. + * + */ + +#ifdef HAVE_CONFIG_H +#include <config.h> +#endif + +#include <string.h> +#include <linux/uvcvideo.h> +#include <linux/usb/video.h> +#include <sys/ioctl.h> + +#ifndef UVCIOC_GET_LAST_SCR +#include <time.h> + +struct uvc_last_scr_sample +{ + __u32 dev_frequency; + __u32 dev_stc; + __u16 dev_sof; + struct timespec host_ts; + __u16 host_sof; +}; + +#define UVCIOC_GET_LAST_SCR _IOR('u', 0x23, struct uvc_last_scr_sample) +#endif + +#include "gstuvch264_mjpgdemux.h" + +enum +{ + PROP_0, + PROP_DEVICE_FD, + PROP_NUM_CLOCK_SAMPLES +}; + +#define DEFAULT_NUM_CLOCK_SAMPLES 32 + +static GstStaticPadTemplate mjpgsink_pad_template = +GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("image/jpeg, " + "width = (int) [ 0, MAX ]," + "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ") + ); + +static GstStaticPadTemplate jpegsrc_pad_template = +GST_STATIC_PAD_TEMPLATE ("jpeg", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("image/jpeg, " + "width = (int) [ 0, MAX ]," + "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ") + ); + +static GstStaticPadTemplate h264src_pad_template = +GST_STATIC_PAD_TEMPLATE ("h264", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("video/x-h264, " + "width = (int) [ 0, MAX ], " + "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ") + ); + +static GstStaticPadTemplate yuy2src_pad_template = +GST_STATIC_PAD_TEMPLATE ("yuy2", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("video/x-raw-yuv, " + "format = (fourcc) YUY2, " + "width = (int) [ 0, MAX ], " + "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ") + ); +static GstStaticPadTemplate nv12src_pad_template = +GST_STATIC_PAD_TEMPLATE ("nv12", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("video/x-raw-yuv, " + "format = (fourcc) NV21, " + "width = (int) [ 0, MAX ], " + "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ") + ); + + +GST_DEBUG_CATEGORY_STATIC (uvc_h264_mjpg_demux_debug); +#define GST_CAT_DEFAULT uvc_h264_mjpg_demux_debug + +typedef struct +{ + guint32 dev_stc; + guint32 dev_sof; + GstClockTime host_ts; + guint32 host_sof; +} GstUvcH264ClockSample; + +struct _GstUvcH264MjpgDemuxPrivate +{ + int device_fd; + int num_clock_samples; + GstUvcH264ClockSample *clock_samples; + int last_sample; + int num_samples; + GstPad *sink_pad; + GstPad *jpeg_pad; + GstPad *h264_pad; + GstPad *yuy2_pad; + GstPad *nv12_pad; + GstCaps *h264_caps; + GstCaps *yuy2_caps; + GstCaps *nv12_caps; + guint16 h264_width; + guint16 h264_height; + guint16 yuy2_width; + guint16 yuy2_height; + guint16 nv12_width; + guint16 nv12_height; +}; + +typedef struct +{ + guint16 version; + guint16 header_len; + guint32 type; + guint16 width; + guint16 height; + guint32 frame_interval; + guint16 delay; + guint32 pts; +} __attribute__ ((packed)) AuxiliaryStreamHeader; + +static void gst_uvc_h264_mjpg_demux_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec); +static void gst_uvc_h264_mjpg_demux_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec); +static void gst_uvc_h264_mjpg_demux_dispose (GObject * object); +static GstFlowReturn gst_uvc_h264_mjpg_demux_chain (GstPad * pad, + GstBuffer * buffer); +static gboolean gst_uvc_h264_mjpg_demux_sink_setcaps (GstPad * pad, + GstCaps * caps); +static GstCaps *gst_uvc_h264_mjpg_demux_getcaps (GstPad * pad); + +#define _do_init(x) \ + GST_DEBUG_CATEGORY_INIT (uvc_h264_mjpg_demux_debug, \ + "uvch264_mjpgdemux", 0, "UVC H264 MJPG Demuxer"); + +GST_BOILERPLATE_FULL (GstUvcH264MjpgDemux, gst_uvc_h264_mjpg_demux, GstElement, + GST_TYPE_ELEMENT, _do_init); + +static void +gst_uvc_h264_mjpg_demux_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + GstPadTemplate *pt; + + /* do not use gst_element_class_add_static_pad_template to stay compatible + * with gstreamer 0.10.35 */ + pt = gst_static_pad_template_get (&mjpgsink_pad_template); + gst_element_class_add_pad_template (element_class, pt); + gst_object_unref (pt); + pt = gst_static_pad_template_get (&jpegsrc_pad_template); + gst_element_class_add_pad_template (element_class, pt); + gst_object_unref (pt); + pt = gst_static_pad_template_get (&h264src_pad_template); + gst_element_class_add_pad_template (element_class, pt); + gst_object_unref (pt); + pt = gst_static_pad_template_get (&yuy2src_pad_template); + gst_element_class_add_pad_template (element_class, pt); + gst_object_unref (pt); + pt = gst_static_pad_template_get (&nv12src_pad_template); + gst_element_class_add_pad_template (element_class, pt); + gst_object_unref (pt); + + gst_element_class_set_details_simple (element_class, + "UVC H264 MJPG Demuxer", + "Video/Demuxer", + "Demux UVC H264 auxiliary streams from MJPG images", + "Youness Alaoui <youness.alaoui@collabora.co.uk>"); +} + +static void +gst_uvc_h264_mjpg_demux_class_init (GstUvcH264MjpgDemuxClass * klass) +{ + GObjectClass *gobject_class = (GObjectClass *) klass; + + g_type_class_add_private (gobject_class, sizeof (GstUvcH264MjpgDemuxPrivate)); + + gobject_class->set_property = gst_uvc_h264_mjpg_demux_set_property; + gobject_class->get_property = gst_uvc_h264_mjpg_demux_get_property; + gobject_class->dispose = gst_uvc_h264_mjpg_demux_dispose; + + + g_object_class_install_property (gobject_class, PROP_DEVICE_FD, + g_param_spec_int ("device-fd", "device-fd", + "File descriptor of the v4l2 device", + -1, G_MAXINT, -1, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_NUM_CLOCK_SAMPLES, + g_param_spec_int ("num-clock-samples", "num-clock-samples", + "Number of clock samples to gather for the PTS synchronization" + " (-1 = unlimited)", + 0, G_MAXINT, DEFAULT_NUM_CLOCK_SAMPLES, + G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS)); +} + +static void +gst_uvc_h264_mjpg_demux_init (GstUvcH264MjpgDemux * self, + GstUvcH264MjpgDemuxClass * g_class) +{ + self->priv = G_TYPE_INSTANCE_GET_PRIVATE (self, GST_TYPE_UVC_H264_MJPG_DEMUX, + GstUvcH264MjpgDemuxPrivate); + + + self->priv->device_fd = -1; + + /* create the sink and src pads */ + self->priv->sink_pad = + gst_pad_new_from_static_template (&mjpgsink_pad_template, "sink"); + gst_pad_set_chain_function (self->priv->sink_pad, + GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_chain)); + gst_pad_set_setcaps_function (self->priv->sink_pad, + GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_sink_setcaps)); + gst_pad_set_getcaps_function (self->priv->sink_pad, + GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_getcaps)); + gst_element_add_pad (GST_ELEMENT (self), self->priv->sink_pad); + + /* JPEG */ + self->priv->jpeg_pad = + gst_pad_new_from_static_template (&jpegsrc_pad_template, "jpeg"); + gst_pad_set_getcaps_function (self->priv->jpeg_pad, + GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_getcaps)); + gst_element_add_pad (GST_ELEMENT (self), self->priv->jpeg_pad); + + /* H264 */ + self->priv->h264_pad = + gst_pad_new_from_static_template (&h264src_pad_template, "h264"); + gst_pad_use_fixed_caps (self->priv->h264_pad); + gst_element_add_pad (GST_ELEMENT (self), self->priv->h264_pad); + + /* YUY2 */ + self->priv->yuy2_pad = + gst_pad_new_from_static_template (&yuy2src_pad_template, "yuy2"); + gst_pad_use_fixed_caps (self->priv->yuy2_pad); + gst_element_add_pad (GST_ELEMENT (self), self->priv->yuy2_pad); + + /* NV12 */ + self->priv->nv12_pad = + gst_pad_new_from_static_template (&nv12src_pad_template, "nv12"); + gst_pad_use_fixed_caps (self->priv->nv12_pad); + gst_element_add_pad (GST_ELEMENT (self), self->priv->nv12_pad); + + self->priv->h264_caps = gst_caps_new_simple ("video/x-h264", NULL); + self->priv->yuy2_caps = gst_caps_new_simple ("video/x-raw-yuv", + "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'), NULL); + self->priv->nv12_caps = gst_caps_new_simple ("video/x-raw-yuv", + "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('N', 'V', '1', '2'), NULL); + self->priv->h264_width = self->priv->h264_height = 0; + self->priv->yuy2_width = self->priv->yuy2_height = 0; + self->priv->nv12_width = self->priv->nv12_height = 0; +} + +static void +gst_uvc_h264_mjpg_demux_dispose (GObject * object) +{ + GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object); + + if (self->priv->h264_caps) + gst_caps_unref (self->priv->h264_caps); + self->priv->h264_caps = NULL; + if (self->priv->yuy2_caps) + gst_caps_unref (self->priv->yuy2_caps); + self->priv->yuy2_caps = NULL; + if (self->priv->nv12_caps) + gst_caps_unref (self->priv->nv12_caps); + self->priv->nv12_caps = NULL; + if (self->priv->clock_samples) + g_free (self->priv->clock_samples); + self->priv->clock_samples = NULL; + + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +static void +gst_uvc_h264_mjpg_demux_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec) +{ + GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object); + + switch (prop_id) { + case PROP_DEVICE_FD: + self->priv->device_fd = g_value_get_int (value); + break; + case PROP_NUM_CLOCK_SAMPLES: + self->priv->num_clock_samples = g_value_get_int (value); + if (self->priv->clock_samples) { + if (self->priv->num_clock_samples) { + self->priv->clock_samples = g_realloc_n (self->priv->clock_samples, + self->priv->num_clock_samples, sizeof (GstUvcH264ClockSample)); + if (self->priv->num_samples > self->priv->num_clock_samples) { + self->priv->num_samples = self->priv->num_clock_samples; + if (self->priv->last_sample >= self->priv->num_samples) + self->priv->last_sample = self->priv->num_samples - 1; + } + } else { + g_free (self->priv->clock_samples); + self->priv->clock_samples = NULL; + self->priv->last_sample = -1; + self->priv->num_samples = 0; + } + } + if (self->priv->num_clock_samples > 0) { + self->priv->clock_samples = g_malloc0_n (self->priv->num_clock_samples, + sizeof (GstUvcH264ClockSample)); + self->priv->last_sample = -1; + self->priv->num_samples = 0; + } + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec); + break; + } +} + +static void +gst_uvc_h264_mjpg_demux_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec) +{ + GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object); + + switch (prop_id) { + case PROP_DEVICE_FD: + g_value_set_int (value, self->priv->device_fd); + break; + case PROP_NUM_CLOCK_SAMPLES: + g_value_set_int (value, self->priv->num_clock_samples); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec); + break; + } +} + + +static gboolean +gst_uvc_h264_mjpg_demux_sink_setcaps (GstPad * pad, GstCaps * caps) +{ + GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (GST_OBJECT_PARENT (pad)); + + return gst_pad_set_caps (self->priv->jpeg_pad, caps); +} + +static GstCaps * +gst_uvc_h264_mjpg_demux_getcaps (GstPad * pad) +{ + GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (GST_OBJECT_PARENT (pad)); + GstCaps *result = NULL; + + if (pad == self->priv->jpeg_pad) + result = gst_pad_peer_get_caps (self->priv->sink_pad); + else if (pad == self->priv->sink_pad) + result = gst_pad_peer_get_caps (self->priv->jpeg_pad); + + /* TODO: intersect with template and fixate caps */ + if (result == NULL) + result = gst_caps_copy (gst_pad_get_pad_template_caps (pad)); + + return result; +} + +static gboolean +_pts_to_timestamp (GstUvcH264MjpgDemux * self, GstBuffer * buf, guint32 pts) +{ + GstUvcH264MjpgDemuxPrivate *priv = self->priv; + GstUvcH264ClockSample *current_sample = NULL; + GstUvcH264ClockSample *oldest_sample = NULL; + guint32 next_sample; + struct uvc_last_scr_sample sample; + guint32 dev_sof; + + if (self->priv->device_fd == -1 || priv->clock_samples == NULL) + return FALSE; + + if (-1 == ioctl (priv->device_fd, UVCIOC_GET_LAST_SCR, &sample)) { + //GST_WARNING_OBJECT (self, " GET_LAST_SCR error"); + return FALSE; + } + + dev_sof = (guint32) (sample.dev_sof + 2048) << 16; + if (priv->num_samples > 0 && + priv->clock_samples[priv->last_sample].dev_sof == dev_sof) { + current_sample = &priv->clock_samples[priv->last_sample]; + } else { + next_sample = (priv->last_sample + 1) % priv->num_clock_samples; + current_sample = &priv->clock_samples[next_sample]; + current_sample->dev_stc = sample.dev_stc; + current_sample->dev_sof = dev_sof; + current_sample->host_ts = sample.host_ts.tv_sec * GST_SECOND + + sample.host_ts.tv_nsec * GST_NSECOND; + current_sample->host_sof = (guint32) (sample.host_sof + 2048) << 16; + + priv->num_samples++; + priv->last_sample = next_sample; + + /* Debug printing */ + GST_DEBUG_OBJECT (self, "device frequency: %u", sample.dev_frequency); + GST_DEBUG_OBJECT (self, "dev_sof: %u", sample.dev_sof); + GST_DEBUG_OBJECT (self, "dev_stc: %u", sample.dev_stc); + GST_DEBUG_OBJECT (self, "host_ts: %lu -- %" GST_TIME_FORMAT, + current_sample->host_ts, GST_TIME_ARGS (current_sample->host_ts)); + GST_DEBUG_OBJECT (self, "host_sof: %u", sample.host_sof); + GST_DEBUG_OBJECT (self, "PTS: %u", pts); + GST_DEBUG_OBJECT (self, "Diff: %u - %f\n", sample.dev_stc - pts, + (gdouble) (sample.dev_stc - pts) / sample.dev_frequency); + } + + if (priv->num_samples < priv->num_clock_samples) + return FALSE; + + next_sample = (priv->last_sample + 1) % priv->num_clock_samples; + oldest_sample = &priv->clock_samples[next_sample]; + + /* TODO: Use current_sample and oldest_sample to do the + * double linear regression and calculate a new PTS */ + (void) oldest_sample; + + return TRUE; +} + +static GstFlowReturn +gst_uvc_h264_mjpg_demux_chain (GstPad * pad, GstBuffer * buf) +{ + GstUvcH264MjpgDemux *self; + GstFlowReturn ret = GST_FLOW_OK; + GstBufferList *jpeg_buf = gst_buffer_list_new (); + GstBufferListIterator *jpeg_it = gst_buffer_list_iterate (jpeg_buf); + GstBufferList *aux_buf = NULL; + GstBufferListIterator *aux_it = NULL; + AuxiliaryStreamHeader aux_header = { 0 }; + GstBuffer *sub_buffer = NULL; + guint32 aux_size = 0; + GstPad *aux_pad = NULL; + GstCaps **aux_caps = NULL; + guint last_offset; + guint i; + guchar *data; + guint size; + + self = GST_UVC_H264_MJPG_DEMUX (GST_PAD_PARENT (pad)); + + last_offset = 0; + data = GST_BUFFER_DATA (buf); + size = GST_BUFFER_SIZE (buf); + if (data == NULL || size == 0) { + ret = gst_pad_push (self->priv->jpeg_pad, buf); + goto done; + } + + gst_buffer_list_iterator_add_group (jpeg_it); + for (i = 0; i < size - 1; i++) { + /* Check for APP4 (0xe4) marker in the jpeg */ + if (data[i] == 0xff && data[i + 1] == 0xe4) { + guint16 segment_size; + + /* Sanity check sizes and get segment size */ + if (i + 4 >= size) { + GST_ELEMENT_ERROR (self, STREAM, DEMUX, + ("Not enough data to read marker size"), (NULL)); + ret = GST_FLOW_ERROR; + goto done; + } + segment_size = GUINT16_FROM_BE (*((guint16 *) (data + i + 2))); + + if (i + segment_size + 2 >= size) { + GST_ELEMENT_ERROR (self, STREAM, DEMUX, + ("Not enough data to read marker content"), (NULL)); + ret = GST_FLOW_ERROR; + goto done; + } + GST_DEBUG_OBJECT (self, + "Found APP4 marker (%d). JPG: %d-%d - APP4: %d - %d", segment_size, + last_offset, i, i, i + 2 + segment_size); + + /* Add JPEG data between the last offset and this market */ + if (i - last_offset > 0) { + sub_buffer = gst_buffer_create_sub (buf, last_offset, i - last_offset); + gst_buffer_copy_metadata (sub_buffer, buf, GST_BUFFER_COPY_ALL); + gst_buffer_list_iterator_add (jpeg_it, sub_buffer); + } + last_offset = i + 2 + segment_size; + + /* Reset i/segment size to the app4 data (ignore marker header/size) */ + i += 4; + segment_size -= 2; + + /* If this is a new auxiliary stream, initialize everything properly */ + if (aux_buf == NULL) { + if (segment_size < sizeof (aux_header) + sizeof (aux_size)) { + GST_ELEMENT_ERROR (self, STREAM, DEMUX, + ("Not enough data to read aux header"), (NULL)); + ret = GST_FLOW_ERROR; + goto done; + } + + aux_header = *((AuxiliaryStreamHeader *) (data + i)); + /* version should be little endian but it looks more like BE */ + aux_header.version = GUINT16_FROM_BE (aux_header.version); + aux_header.header_len = GUINT16_FROM_LE (aux_header.header_len); + aux_header.width = GUINT16_FROM_LE (aux_header.width); + aux_header.height = GUINT16_FROM_LE (aux_header.height); + aux_header.frame_interval = GUINT32_FROM_LE (aux_header.frame_interval); + aux_header.delay = GUINT16_FROM_LE (aux_header.delay); + aux_header.pts = GUINT32_FROM_LE (aux_header.pts); + GST_DEBUG_OBJECT (self, "New auxiliary stream : v%d - %d bytes - %" + GST_FOURCC_FORMAT " %dx%d -- %d *100ns -- %d ms -- %d", + aux_header.version, aux_header.header_len, + GST_FOURCC_ARGS (aux_header.type), + aux_header.width, aux_header.height, + aux_header.frame_interval, aux_header.delay, aux_header.pts); + aux_size = *((guint32 *) (data + i + aux_header.header_len)); + GST_DEBUG_OBJECT (self, "Auxiliary stream size : %d bytes", aux_size); + + if (aux_size > 0) { + guint16 *width = NULL; + guint16 *height = NULL; + + /* Find the auxiliary stream's pad and caps */ + switch (aux_header.type) { + case GST_MAKE_FOURCC ('H', '2', '6', '4'): + aux_pad = self->priv->h264_pad; + aux_caps = &self->priv->h264_caps; + width = &self->priv->h264_width; + height = &self->priv->h264_height; + break; + case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'): + aux_pad = self->priv->yuy2_pad; + aux_caps = &self->priv->yuy2_caps; + width = &self->priv->yuy2_width; + height = &self->priv->yuy2_height; + break; + case GST_MAKE_FOURCC ('N', 'V', '1', '2'): + aux_pad = self->priv->nv12_pad; + aux_caps = &self->priv->nv12_caps; + width = &self->priv->nv12_width; + height = &self->priv->nv12_height; + break; + default: + GST_ELEMENT_ERROR (self, STREAM, DEMUX, + ("Unknown auxiliary stream format : %" GST_FOURCC_FORMAT, + GST_FOURCC_ARGS (aux_header.type)), (NULL)); + ret = GST_FLOW_ERROR; + break; + } + + if (ret != GST_FLOW_OK) + goto done; + + if (*width != aux_header.width || *height != aux_header.height) { + GstCaps *peercaps = gst_pad_peer_get_caps (aux_pad); + GstStructure *s = NULL; + gint fps_num = 1000000000 / aux_header.frame_interval; + gint fps_den = 100; + + /* TODO: intersect with pad template */ + GST_DEBUG ("peercaps : %" GST_PTR_FORMAT, peercaps); + if (peercaps && !gst_caps_is_any (peercaps)) + s = gst_caps_get_structure (peercaps, 0); + if (s) { + /* TODO: make sure it contains the right format/width/height */ + gst_structure_fixate_field_nearest_fraction (s, "framerate", + fps_num, fps_den); + GST_DEBUG ("Fixated struct : %" GST_PTR_FORMAT, s); + gst_structure_get_fraction (s, "framerate", &fps_num, &fps_den); + } + if (peercaps) + gst_caps_unref (peercaps); + + *width = aux_header.width; + *height = aux_header.height; + *aux_caps = gst_caps_make_writable (*aux_caps); + /* FIXME: fps must match the caps and be allowed and represent + our first buffer */ + gst_caps_set_simple (*aux_caps, + "width", G_TYPE_INT, aux_header.width, + "height", G_TYPE_INT, aux_header.height, + "framerate", GST_TYPE_FRACTION, fps_num, fps_den, NULL); + if (!gst_pad_set_caps (aux_pad, *aux_caps)) { + ret = GST_FLOW_NOT_NEGOTIATED; + goto done; + } + } + + /* Create new auxiliary buffer list and adjust i/segment size */ + aux_buf = gst_buffer_list_new (); + aux_it = gst_buffer_list_iterate (aux_buf); + gst_buffer_list_iterator_add_group (aux_it); + } + + i += sizeof (aux_header) + sizeof (aux_size); + segment_size -= sizeof (aux_header) + sizeof (aux_size); + } + + if (segment_size > aux_size) { + GST_ELEMENT_ERROR (self, STREAM, DEMUX, + ("Expected %d auxiliary data, got %d bytes", aux_size, + segment_size), (NULL)); + ret = GST_FLOW_ERROR; + goto done; + } + + if (segment_size > 0) { + sub_buffer = gst_buffer_create_sub (buf, i, segment_size); + GST_BUFFER_DURATION (sub_buffer) = + aux_header.frame_interval * 100 * GST_NSECOND; + gst_buffer_copy_metadata (sub_buffer, buf, GST_BUFFER_COPY_TIMESTAMPS); + gst_buffer_set_caps (sub_buffer, *aux_caps); + + _pts_to_timestamp (self, sub_buffer, aux_header.pts); + + gst_buffer_list_iterator_add (aux_it, sub_buffer); + + aux_size -= segment_size; + + /* Push completed aux data */ + if (aux_size == 0) { + gst_buffer_list_iterator_free (aux_it); + aux_it = NULL; + GST_DEBUG_OBJECT (self, "Pushing %" GST_FOURCC_FORMAT + " auxiliary buffer %" GST_PTR_FORMAT, + GST_FOURCC_ARGS (aux_header.type), *aux_caps); + ret = gst_pad_push_list (aux_pad, aux_buf); + aux_buf = NULL; + if (ret != GST_FLOW_OK) { + GST_WARNING_OBJECT (self, "Error pushing %" GST_FOURCC_FORMAT + " auxiliary data", GST_FOURCC_ARGS (aux_header.type)); + goto done; + } + } + } + + i += segment_size - 1; + } else if (data[i] == 0xff && data[i + 1] == 0xda) { + + /* The APP4 markers must be before the SOS marker, so this is the end */ + GST_DEBUG_OBJECT (self, "Found SOS marker."); + + sub_buffer = gst_buffer_create_sub (buf, last_offset, size - last_offset); + gst_buffer_copy_metadata (sub_buffer, buf, GST_BUFFER_COPY_ALL); + gst_buffer_list_iterator_add (jpeg_it, sub_buffer); + last_offset = size; + break; + } + } + gst_buffer_list_iterator_free (jpeg_it); + jpeg_it = NULL; + + if (aux_buf != NULL) { + GST_ELEMENT_ERROR (self, STREAM, DEMUX, + ("Incomplete auxiliary stream. %d bytes missing", aux_size), (NULL)); + ret = GST_FLOW_ERROR; + goto done; + } + + if (last_offset != size) { + /* this means there was no SOS marker in the jpg, so we assume the JPG was + just a container */ + GST_DEBUG_OBJECT (self, "SOS marker wasn't found. MJPG is container only"); + gst_buffer_list_unref (jpeg_buf); + jpeg_buf = NULL; + } else { + ret = gst_pad_push_list (self->priv->jpeg_pad, jpeg_buf); + jpeg_buf = NULL; + } + + if (ret != GST_FLOW_OK) { + GST_WARNING_OBJECT (self, "Error pushing jpeg data"); + goto done; + } + +done: + /* In case of error, unref whatever was left */ + if (aux_it) + gst_buffer_list_iterator_free (aux_it); + if (aux_buf) + gst_buffer_list_unref (aux_buf); + if (jpeg_it) + gst_buffer_list_iterator_free (jpeg_it); + if (jpeg_buf) + gst_buffer_list_unref (jpeg_buf); + + /* We must always unref the input buffer since we never push it out */ + gst_buffer_unref (buf); + + return ret; +} diff --git a/sys/uvch264/gstuvch264_mjpgdemux.h b/sys/uvch264/gstuvch264_mjpgdemux.h new file mode 100644 index 000000000..8c4445201 --- /dev/null +++ b/sys/uvch264/gstuvch264_mjpgdemux.h @@ -0,0 +1,66 @@ +/* GStreamer + * + * uvch264_mjpg_demux: a demuxer for muxed stream in UVC H264 compliant MJPG + * + * Copyright (C) 2012 Cisco Systems, Inc. + * Author: Youness Alaoui <youness.alaoui@collabora.co.uk> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __GST_UVC_H264_MJPG_DEMUX_H__ +#define __GST_UVC_H264_MJPG_DEMUX_H__ + +#include <gst/gst.h> + + +G_BEGIN_DECLS + +#define GST_TYPE_UVC_H264_MJPG_DEMUX \ + (gst_uvc_h264_mjpg_demux_get_type()) +#define GST_UVC_H264_MJPG_DEMUX(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), \ + GST_TYPE_UVC_H264_MJPG_DEMUX, \ + GstUvcH264MjpgDemux)) +#define GST_UVC_H264_MJPG_DEMUX_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), \ + GST_TYPE_UVC_H264_MJPG_DEMUX, \ + GstUvcH264MjpgDemuxClass)) +#define GST_IS_UVC_H264_MJPG_DEMUX(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), \ + GST_TYPE_UVC_H264_MJPG_DEMUX)) +#define GST_IS_UVC_H264_MJPG_DEMUX_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), \ + GST_TYPE_UVC_H264_MJPG_DEMUX)) + +typedef struct _GstUvcH264MjpgDemux GstUvcH264MjpgDemux; +typedef struct _GstUvcH264MjpgDemuxPrivate GstUvcH264MjpgDemuxPrivate; +typedef struct _GstUvcH264MjpgDemuxClass GstUvcH264MjpgDemuxClass; + +struct _GstUvcH264MjpgDemux { + GstElement element; + GstUvcH264MjpgDemuxPrivate *priv; +}; + +struct _GstUvcH264MjpgDemuxClass { + GstElementClass parent_class; +}; + +GType gst_uvc_h264_mjpg_demux_get_type (void); + +G_END_DECLS + +#endif /* __GST_UVC_H264_MJPG_DEMUX_H__ */ diff --git a/sys/uvch264/gstuvch264_src.c b/sys/uvch264/gstuvch264_src.c new file mode 100644 index 000000000..69555d3e1 --- /dev/null +++ b/sys/uvch264/gstuvch264_src.c @@ -0,0 +1,3180 @@ +/* + * GStreamer + * + * Copyright (C) 2012 Cisco Systems, Inc. + * Author: Youness Alaoui <youness.alaoui@collabora.co.uk> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + + +/** + * SECTION:element-uvch264-src + * + * A camera bin src element that wraps v4l2src and implements UVC H264 + * Extension Units (XU) to control the H264 encoder in the camera + */ + +#ifdef HAVE_CONFIG_H +# include <config.h> +#endif + +#include <gst/video/video.h> +#include <linux/uvcvideo.h> +#include <linux/usb/video.h> +#include <sys/ioctl.h> +#include <string.h> + +#if defined (HAVE_GUDEV) && defined (HAVE_LIBUSB) +#include <gudev/gudev.h> +#include <libusb.h> + +typedef struct +{ + int8_t bLength; + int8_t bDescriptorType; + int8_t bDescriptorSubType; + int8_t bUnitID; + uint8_t guidExtensionCode[16]; +} __attribute__ ((__packed__)) xu_descriptor; + +#define GUID_FORMAT "02X%02X%02X%02X-%02X%02X%02X%02X-"\ + "%02X%02X%02X%02X-%02X%02X%02X%02X" +#define GUID_ARGS(guid) guid[0], guid[1], guid[2], guid[3], \ + guid[4], guid[5], guid[6], guid[7], \ + guid[8], guid[9], guid[10], guid[11], \ + guid[12], guid[13], guid[14], guid[15] + +#define USB_VIDEO_CONTROL 1 +#define USB_VIDEO_CONTROL_INTERFACE 0x24 +#define USB_VIDEO_CONTROL_XU_TYPE 0x06 +#endif + +#include "gstuvch264_src.h" +#include "gstuvch264-marshal.h" + +#ifndef UVCIOC_XU_FIND_UNIT +/* Define the needed structure if <linux/uvcvideo.h> is too old. + * This might fail though if the kernel itself does not support it. + */ +struct uvc_xu_find_unit +{ + __u8 guid[16]; + __u8 unit; +}; +#define UVCIOC_XU_FIND_UNIT _IOWR('u', 0x22, struct uvc_xu_find_unit) +#endif + + +enum +{ + PROP_0, + /* uvch264_src properties */ + PROP_COLORSPACE_NAME, + PROP_JPEG_DECODER_NAME, + PROP_NUM_CLOCK_SAMPLES, + /* v4l2src properties */ + PROP_NUM_BUFFERS, + PROP_DEVICE, + PROP_DEVICE_NAME, + /* Static controls */ + PROP_INITIAL_BITRATE, + PROP_SLICE_UNITS, + PROP_SLICE_MODE, + PROP_IFRAME_PERIOD, + PROP_USAGE_TYPE, + PROP_ENTROPY, + PROP_ENABLE_SEI, + PROP_NUM_REORDER_FRAMES, + PROP_PREVIEW_FLIPPED, + PROP_LEAKY_BUCKET_SIZE, + /* Dynamic controls */ + PROP_RATE_CONTROL, + PROP_FIXED_FRAMERATE, + PROP_MAX_MBPS, /* read-only */ + PROP_LEVEL_IDC, + PROP_PEAK_BITRATE, + PROP_AVERAGE_BITRATE, + PROP_MIN_IFRAME_QP, + PROP_MAX_IFRAME_QP, + PROP_MIN_PFRAME_QP, + PROP_MAX_PFRAME_QP, + PROP_MIN_BFRAME_QP, + PROP_MAX_BFRAME_QP, + PROP_LTR_BUFFER_SIZE, + PROP_LTR_ENCODER_CONTROL, +}; +/* In caps : frame interval (fps), width, height, profile, mux */ +/* Ignored: temporal, spatial, SNR, MVC views, version, reset */ +/* Events: LTR, generate IDR */ + +enum +{ + /* action signals */ + SIGNAL_GET_ENUM_SETTING, + SIGNAL_GET_BOOLEAN_SETTING, + SIGNAL_GET_INT_SETTING, + LAST_SIGNAL +}; + +static guint _signals[LAST_SIGNAL]; + +/* Default values */ +#define DEFAULT_COLORSPACE_NAME "ffmpegcolorspace" +#define DEFAULT_JPEG_DECODER_NAME "jpegdec" +#define DEFAULT_NUM_CLOCK_SAMPLES 0 +#define DEFAULT_NUM_BUFFERS -1 +#define DEFAULT_DEVICE "/dev/video0" +#define DEFAULT_DEVICE_NAME NULL +#define DEFAULT_INITIAL_BITRATE 3000000 +#define DEFAULT_SLICE_UNITS 4 +#define DEFAULT_SLICE_MODE UVC_H264_SLICEMODE_SLICEPERFRAME +#define DEFAULT_IFRAME_PERIOD 10000 +#define DEFAULT_USAGE_TYPE UVC_H264_USAGETYPE_REALTIME +#define DEFAULT_ENTROPY UVC_H264_ENTROPY_CAVLC +#define DEFAULT_ENABLE_SEI FALSE +#define DEFAULT_NUM_REORDER_FRAMES 0 +#define DEFAULT_PREVIEW_FLIPPED FALSE +#define DEFAULT_LEAKY_BUCKET_SIZE 1000 +#define DEFAULT_RATE_CONTROL UVC_H264_RATECONTROL_CBR +#define DEFAULT_FIXED_FRAMERATE FALSE +#define DEFAULT_LEVEL_IDC 40 +#define DEFAULT_PEAK_BITRATE DEFAULT_INITIAL_BITRATE +#define DEFAULT_AVERAGE_BITRATE DEFAULT_INITIAL_BITRATE +#define DEFAULT_MIN_QP 10 +#define DEFAULT_MAX_QP 46 +#define DEFAULT_LTR_BUFFER_SIZE 0 +#define DEFAULT_LTR_ENCODER_CONTROL 0 + +#define NSEC_PER_SEC (G_USEC_PER_SEC * 1000) + + +GST_DEBUG_CATEGORY (uvc_h264_src_debug); +#define GST_CAT_DEFAULT uvc_h264_src_debug + +GST_BOILERPLATE (GstUvcH264Src, gst_uvc_h264_src, + GstBaseCameraSrc, GST_TYPE_BASE_CAMERA_SRC); + +#define GST_UVC_H264_SRC_VF_CAPS_STR \ + GST_VIDEO_CAPS_RGB ";" \ + GST_VIDEO_CAPS_RGB";" \ + GST_VIDEO_CAPS_BGR";" \ + GST_VIDEO_CAPS_RGBx";" \ + GST_VIDEO_CAPS_xRGB";" \ + GST_VIDEO_CAPS_BGRx";" \ + GST_VIDEO_CAPS_xBGR";" \ + GST_VIDEO_CAPS_RGBA";" \ + GST_VIDEO_CAPS_ARGB";" \ + GST_VIDEO_CAPS_BGRA";" \ + GST_VIDEO_CAPS_ABGR";" \ + GST_VIDEO_CAPS_RGB_16";" \ + GST_VIDEO_CAPS_RGB_15";" \ + "video/x-raw-rgb, bpp = (int)8, depth = (int)8, " \ + "width = "GST_VIDEO_SIZE_RANGE" , " \ + "height = " GST_VIDEO_SIZE_RANGE ", " \ + "framerate = "GST_VIDEO_FPS_RANGE ";" \ + GST_VIDEO_CAPS_GRAY8";" \ + GST_VIDEO_CAPS_GRAY16("BIG_ENDIAN")";" \ + GST_VIDEO_CAPS_GRAY16("LITTLE_ENDIAN")";" \ + GST_VIDEO_CAPS_YUV ("{ I420 , NV12 , NV21 , YV12 , YUY2 ," \ + " Y42B , Y444 , YUV9 , YVU9 , Y41B , Y800 , Y8 , GREY ," \ + " Y16 , UYVY , YVYU , IYU1 , v308 , AYUV, A420}") ";" \ + "image/jpeg, " \ + "width = " GST_VIDEO_SIZE_RANGE ", " \ + "height = " GST_VIDEO_SIZE_RANGE ", " \ + "framerate = " GST_VIDEO_FPS_RANGE + +#define GST_UVC_H264_SRC_VID_CAPS_STR \ + GST_UVC_H264_SRC_VF_CAPS_STR ";" \ + "video/x-h264, " \ + "width = " GST_VIDEO_SIZE_RANGE ", " \ + "height = " GST_VIDEO_SIZE_RANGE ", " \ + "framerate = " GST_VIDEO_FPS_RANGE ", " \ + "stream-format = (string) { byte-stream, avc }, " \ + "alignment = (string) { au }, " \ + "profile = (string) { high, main, baseline, constrained-baseline }" + +static GstStaticPadTemplate vfsrc_template = +GST_STATIC_PAD_TEMPLATE (GST_BASE_CAMERA_SRC_VIEWFINDER_PAD_NAME, + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_UVC_H264_SRC_VF_CAPS_STR)); + +static GstStaticPadTemplate imgsrc_template = +GST_STATIC_PAD_TEMPLATE (GST_BASE_CAMERA_SRC_IMAGE_PAD_NAME, + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS_NONE); + +static GstStaticPadTemplate vidsrc_template = +GST_STATIC_PAD_TEMPLATE (GST_BASE_CAMERA_SRC_VIDEO_PAD_NAME, + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_UVC_H264_SRC_VID_CAPS_STR)); + + +static void gst_uvc_h264_src_dispose (GObject * object); +static void gst_uvc_h264_src_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec); +static void gst_uvc_h264_src_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec); +static gboolean gst_uvc_h264_src_event (GstPad * pad, GstEvent * event); +static gboolean gst_uvc_h264_src_send_event (GstElement * element, + GstEvent * event); +static gboolean gst_uvc_h264_src_construct_pipeline (GstBaseCameraSrc * + bcamsrc); +static gboolean gst_uvc_h264_src_set_mode (GstBaseCameraSrc * bcamsrc, + GstCameraBinMode mode); +static gboolean gst_uvc_h264_src_start_capture (GstBaseCameraSrc * camerasrc); +static void gst_uvc_h264_src_stop_capture (GstBaseCameraSrc * camerasrc); +static GstStateChangeReturn gst_uvc_h264_src_change_state (GstElement * element, + GstStateChange trans); +static gboolean gst_uvc_h264_src_buffer_probe (GstPad * pad, + GstBuffer * buffer, gpointer user_data); +static gboolean gst_uvc_h264_src_event_probe (GstPad * pad, + GstEvent * event, gpointer user_data); +static void gst_uvc_h264_src_pad_linking_cb (GstPad * pad, + GstPad * peer, gpointer user_data); +static GstCaps *gst_uvc_h264_src_getcaps (GstPad * pad); + + +static void v4l2src_prepare_format (GstElement * v4l2src, gint fd, guint fourcc, + guint width, guint height, gpointer user_data); +static void fill_probe_commit (GstUvcH264Src * self, + uvcx_video_config_probe_commit_t * probe, guint32 frame_interval, + guint32 width, guint32 height, guint32 profile, + UvcH264StreamFormat stream_format); +static gboolean xu_query (GstUvcH264Src * self, guint selector, guint query, + guchar * data); + +static void set_rate_control (GstUvcH264Src * self); +static void set_level_idc (GstUvcH264Src * self); +static void set_bitrate (GstUvcH264Src * self); +static void set_qp (GstUvcH264Src * self, gint type); +static void set_ltr (GstUvcH264Src * self); +static void update_rate_control (GstUvcH264Src * self); +static guint32 update_level_idc_and_get_max_mbps (GstUvcH264Src * self); +static void update_bitrate (GstUvcH264Src * self); +static gboolean update_qp (GstUvcH264Src * self, gint type); +static void update_ltr (GstUvcH264Src * self); + +static gboolean gst_uvc_h264_src_get_enum_setting (GstUvcH264Src * self, + gchar * property, gint * mask, gint * default_value); +static gboolean gst_uvc_h264_src_get_boolean_setting (GstUvcH264Src * self, + gchar * property, gboolean * changeable, gboolean * def); +static gboolean gst_uvc_h264_src_get_int_setting (GstUvcH264Src * self, + gchar * property, gint * min, gint * def, gint * max); + +static void +gst_uvc_h264_src_base_init (gpointer g_class) +{ + GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class); + GstPadTemplate *pt; + + GST_DEBUG_CATEGORY_INIT (uvc_h264_src_debug, "uvch264_src", + 0, "UVC H264 Compliant camera bin source"); + + gst_element_class_set_details_simple (gstelement_class, + "UVC H264 Source", + "Source/Video", + "UVC H264 Encoding camera source", + "Youness Alaoui <youness.alaoui@collabora.co.uk>"); + + /* Don't use gst_element_class_add_static_pad_template in order to keep + * the plugin compatible with gst 0.10.35 */ + pt = gst_static_pad_template_get (&vidsrc_template); + gst_element_class_add_pad_template (gstelement_class, pt); + gst_object_unref (pt); + + pt = gst_static_pad_template_get (&imgsrc_template); + gst_element_class_add_pad_template (gstelement_class, pt); + gst_object_unref (pt); + + pt = gst_static_pad_template_get (&vfsrc_template); + gst_element_class_add_pad_template (gstelement_class, pt); + gst_object_unref (pt); +} + +static void +gst_uvc_h264_src_class_init (GstUvcH264SrcClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + GstBaseCameraSrcClass *gstbasecamerasrc_class; + + gobject_class = G_OBJECT_CLASS (klass); + gstelement_class = GST_ELEMENT_CLASS (klass); + gstbasecamerasrc_class = GST_BASE_CAMERA_SRC_CLASS (klass); + + gobject_class->dispose = gst_uvc_h264_src_dispose; + gobject_class->set_property = gst_uvc_h264_src_set_property; + gobject_class->get_property = gst_uvc_h264_src_get_property; + + gstelement_class->change_state = gst_uvc_h264_src_change_state; + gstelement_class->send_event = gst_uvc_h264_src_send_event; + + gstbasecamerasrc_class->construct_pipeline = + gst_uvc_h264_src_construct_pipeline; + gstbasecamerasrc_class->set_mode = gst_uvc_h264_src_set_mode; + gstbasecamerasrc_class->start_capture = gst_uvc_h264_src_start_capture; + gstbasecamerasrc_class->stop_capture = gst_uvc_h264_src_stop_capture; + + /* Properties */ + g_object_class_install_property (gobject_class, PROP_COLORSPACE_NAME, + g_param_spec_string ("colorspace-name", "colorspace element name", + "The name of the colorspace element", + DEFAULT_COLORSPACE_NAME, G_PARAM_CONSTRUCT | G_PARAM_READWRITE | + GST_PARAM_MUTABLE_READY | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_JPEG_DECODER_NAME, + g_param_spec_string ("jpeg-decoder-name", "jpeg decoder element name", + "The name of the jpeg decoder element", + DEFAULT_JPEG_DECODER_NAME, G_PARAM_CONSTRUCT | G_PARAM_READWRITE | + GST_PARAM_MUTABLE_READY | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_NUM_CLOCK_SAMPLES, + g_param_spec_int ("num-clock-samples", "num-clock-samples", + "Number of clock samples to gather for the PTS synchronization" + " (-1 = unlimited)", + 0, G_MAXINT, DEFAULT_NUM_CLOCK_SAMPLES, + G_PARAM_READWRITE | G_PARAM_CONSTRUCT | GST_PARAM_MUTABLE_PLAYING | + G_PARAM_STATIC_STRINGS)); + + /* v4l2src proxied properties */ + g_object_class_install_property (gobject_class, PROP_NUM_BUFFERS, + g_param_spec_int ("num-buffers", "num-buffers", + "Number of buffers to output before sending EOS (-1 = unlimited)", + -1, G_MAXINT, DEFAULT_NUM_BUFFERS, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_DEVICE, + g_param_spec_string ("device", "device", + "Device location", + DEFAULT_DEVICE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_DEVICE_NAME, + g_param_spec_string ("device-name", "Device name", + "Name of the device", DEFAULT_DEVICE_NAME, + G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + + /* Static controls */ + g_object_class_install_property (gobject_class, PROP_INITIAL_BITRATE, + g_param_spec_uint ("initial-bitrate", "Initial bitrate", + "Initial bitrate in bits/second (static control)", + 0, G_MAXUINT, DEFAULT_INITIAL_BITRATE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_READY)); + g_object_class_install_property (gobject_class, PROP_SLICE_UNITS, + g_param_spec_uint ("slice-units", "Slice units", + "Slice units (static control)", + 0, G_MAXUINT16, DEFAULT_SLICE_UNITS, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_READY)); + g_object_class_install_property (gobject_class, PROP_SLICE_MODE, + g_param_spec_enum ("slice-mode", "Slice mode", + "Defines the unit of the slice-units property (static control)", + UVC_H264_SLICEMODE_TYPE, + DEFAULT_SLICE_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_READY)); + g_object_class_install_property (gobject_class, PROP_IFRAME_PERIOD, + g_param_spec_uint ("iframe-period", "I Frame Period", + "Time between IDR frames in milliseconds (static control)", + 0, G_MAXUINT16, DEFAULT_IFRAME_PERIOD, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_READY)); + g_object_class_install_property (gobject_class, PROP_USAGE_TYPE, + g_param_spec_enum ("usage-type", "Usage type", + "The usage type (static control)", + UVC_H264_USAGETYPE_TYPE, DEFAULT_USAGE_TYPE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_READY)); + g_object_class_install_property (gobject_class, PROP_ENTROPY, + g_param_spec_enum ("entropy", "Entropy", + "Entropy (static control)", + UVC_H264_ENTROPY_TYPE, DEFAULT_ENTROPY, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_READY)); + g_object_class_install_property (gobject_class, PROP_ENABLE_SEI, + g_param_spec_boolean ("enable-sei", "Enable SEI", + "Enable SEI picture timing (static control)", + DEFAULT_ENABLE_SEI, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_READY)); + g_object_class_install_property (gobject_class, PROP_NUM_REORDER_FRAMES, + g_param_spec_uint ("num-reorder-frames", "Number of Reorder frames", + "Number of B frames between the references frames (static control)", + 0, G_MAXUINT8, DEFAULT_NUM_REORDER_FRAMES, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_READY)); + g_object_class_install_property (gobject_class, PROP_PREVIEW_FLIPPED, + g_param_spec_boolean ("preview-flipped", "Flip preview", + "Horizontal flipped image for non H.264 streams (static control)", + DEFAULT_PREVIEW_FLIPPED, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_READY)); + g_object_class_install_property (gobject_class, PROP_LEAKY_BUCKET_SIZE, + g_param_spec_uint ("leaky-bucket-size", "Size of the leaky bucket size", + "Size of the leaky bucket size in milliseconds (static control)", + 0, G_MAXUINT16, DEFAULT_LEAKY_BUCKET_SIZE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_READY)); + + /* Dynamic controls */ + g_object_class_install_property (gobject_class, PROP_RATE_CONTROL, + g_param_spec_enum ("rate-control", "Rate control", + "Rate control mode (static & dynamic control)", + UVC_H264_RATECONTROL_TYPE, DEFAULT_RATE_CONTROL, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + g_object_class_install_property (gobject_class, PROP_FIXED_FRAMERATE, + g_param_spec_boolean ("fixed-framerate", "Fixed framerate", + "Fixed framerate (static & dynamic control)", + DEFAULT_FIXED_FRAMERATE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + g_object_class_install_property (gobject_class, PROP_MAX_MBPS, + g_param_spec_uint ("max-mbps", "Max macroblocks/second", + "The number of macroblocks per second for the maximum processing rate", + 0, G_MAXUINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_LEVEL_IDC, + g_param_spec_uint ("level-idc", "Level IDC", + "Level IDC (dynamic control)", + 0, G_MAXUINT8, DEFAULT_LEVEL_IDC, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + g_object_class_install_property (gobject_class, PROP_PEAK_BITRATE, + g_param_spec_uint ("peak-bitrate", "Peak bitrate", + "The peak bitrate in bits/second (dynamic control)", + 0, G_MAXUINT, DEFAULT_PEAK_BITRATE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + g_object_class_install_property (gobject_class, PROP_AVERAGE_BITRATE, + g_param_spec_uint ("average-bitrate", "Average bitrate", + "The average bitrate in bits/second (dynamic control)", + 0, G_MAXUINT, DEFAULT_AVERAGE_BITRATE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + g_object_class_install_property (gobject_class, PROP_MIN_IFRAME_QP, + g_param_spec_int ("min-iframe-qp", "Minimum I frame QP", + "The minimum Quantization step size for I frames (dynamic control)", + -G_MAXINT8, G_MAXINT8, DEFAULT_MIN_QP, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + g_object_class_install_property (gobject_class, PROP_MAX_IFRAME_QP, + g_param_spec_int ("max-iframe-qp", "Minimum I frame QP", + "The minimum Quantization step size for I frames (dynamic control)", + -G_MAXINT8, G_MAXINT8, DEFAULT_MAX_QP, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + g_object_class_install_property (gobject_class, PROP_MIN_PFRAME_QP, + g_param_spec_int ("min-pframe-qp", "Minimum P frame QP", + "The minimum Quantization step size for P frames (dynamic control)", + -G_MAXINT8, G_MAXINT8, DEFAULT_MIN_QP, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + g_object_class_install_property (gobject_class, PROP_MAX_PFRAME_QP, + g_param_spec_int ("max-pframe-qp", "Minimum P frame QP", + "The minimum Quantization step size for P frames (dynamic control)", + -G_MAXINT8, G_MAXINT8, DEFAULT_MAX_QP, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + g_object_class_install_property (gobject_class, PROP_MIN_BFRAME_QP, + g_param_spec_int ("min-bframe-qp", "Minimum B frame QP", + "The minimum Quantization step size for B frames (dynamic control)", + -G_MAXINT8, G_MAXINT8, DEFAULT_MIN_QP, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + g_object_class_install_property (gobject_class, PROP_MAX_BFRAME_QP, + g_param_spec_int ("max-bframe-qp", "Minimum B frame QP", + "The minimum Quantization step size for B frames (dynamic control)", + -G_MAXINT8, G_MAXINT8, DEFAULT_MAX_QP, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + g_object_class_install_property (gobject_class, PROP_LTR_BUFFER_SIZE, + g_param_spec_int ("ltr-buffer-size", "LTR Buffer size", + "Total number of Long-Term Reference frames (dynamic control)", + 0, G_MAXUINT8, DEFAULT_LTR_BUFFER_SIZE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + g_object_class_install_property (gobject_class, PROP_LTR_ENCODER_CONTROL, + g_param_spec_int ("ltr-encoder-control", "LTR frames controled by device", + "Number of LTR frames the device can control (dynamic control)", + 0, G_MAXUINT8, DEFAULT_LTR_ENCODER_CONTROL, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_MUTABLE_PLAYING)); + + _signals[SIGNAL_GET_ENUM_SETTING] = + g_signal_new_class_handler ("get-enum-setting", + G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, + G_CALLBACK (gst_uvc_h264_src_get_enum_setting), + NULL, NULL, __gst_uvc_h264_marshal_BOOLEAN__STRING_POINTER_POINTER, + G_TYPE_BOOLEAN, 3, G_TYPE_STRING, G_TYPE_POINTER, G_TYPE_POINTER, 0); + _signals[SIGNAL_GET_BOOLEAN_SETTING] = + g_signal_new_class_handler ("get-boolean-setting", + G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, + G_CALLBACK (gst_uvc_h264_src_get_boolean_setting), NULL, NULL, + __gst_uvc_h264_marshal_BOOLEAN__STRING_POINTER_POINTER, + G_TYPE_BOOLEAN, 3, G_TYPE_STRING, G_TYPE_POINTER, G_TYPE_POINTER, 0); + _signals[SIGNAL_GET_INT_SETTING] = + g_signal_new_class_handler ("get-int-setting", + G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, + G_CALLBACK (gst_uvc_h264_src_get_int_setting), NULL, NULL, + __gst_uvc_h264_marshal_BOOLEAN__STRING_POINTER_POINTER_POINTER, + G_TYPE_BOOLEAN, 4, G_TYPE_STRING, G_TYPE_POINTER, G_TYPE_POINTER, + G_TYPE_POINTER, 0); + +} + +static void +gst_uvc_h264_src_init (GstUvcH264Src * self, GstUvcH264SrcClass * klass) +{ + self->vfsrc = + gst_ghost_pad_new_no_target (GST_BASE_CAMERA_SRC_VIEWFINDER_PAD_NAME, + GST_PAD_SRC); + gst_pad_set_getcaps_function (self->vfsrc, + GST_DEBUG_FUNCPTR (gst_uvc_h264_src_getcaps)); + gst_element_add_pad (GST_ELEMENT (self), self->vfsrc); + + self->imgsrc = + gst_ghost_pad_new_no_target (GST_BASE_CAMERA_SRC_IMAGE_PAD_NAME, + GST_PAD_SRC); + gst_element_add_pad (GST_ELEMENT (self), self->imgsrc); + + self->vidsrc = + gst_ghost_pad_new_no_target (GST_BASE_CAMERA_SRC_VIDEO_PAD_NAME, + GST_PAD_SRC); + gst_pad_set_getcaps_function (self->vidsrc, + GST_DEBUG_FUNCPTR (gst_uvc_h264_src_getcaps)); + gst_element_add_pad (GST_ELEMENT (self), self->vidsrc); + gst_pad_add_buffer_probe (self->vidsrc, + (GCallback) gst_uvc_h264_src_buffer_probe, self); + gst_pad_add_event_probe (self->vfsrc, + (GCallback) gst_uvc_h264_src_event_probe, self); + gst_pad_add_event_probe (self->vidsrc, + (GCallback) gst_uvc_h264_src_event_probe, self); + + self->srcpad_event_func = GST_PAD_EVENTFUNC (self->vfsrc); + + gst_pad_set_event_function (self->imgsrc, gst_uvc_h264_src_event); + gst_pad_set_event_function (self->vidsrc, gst_uvc_h264_src_event); + gst_pad_set_event_function (self->vfsrc, gst_uvc_h264_src_event); + + g_signal_connect (self->vidsrc, "linked", + (GCallback) gst_uvc_h264_src_pad_linking_cb, self); + g_signal_connect (self->vidsrc, "unlinked", + (GCallback) gst_uvc_h264_src_pad_linking_cb, self); + g_signal_connect (self->vfsrc, "linked", + (GCallback) gst_uvc_h264_src_pad_linking_cb, self); + g_signal_connect (self->vfsrc, "unlinked", + (GCallback) gst_uvc_h264_src_pad_linking_cb, self); + + self->vid_newseg = FALSE; + self->vf_newseg = FALSE; + self->v4l2_fd = -1; + gst_base_camera_src_set_mode (GST_BASE_CAMERA_SRC (self), MODE_VIDEO); + + self->main_format = UVC_H264_SRC_FORMAT_NONE; + self->main_width = 0; + self->main_height = 0; + self->main_frame_interval = 0; + self->main_stream_format = UVC_H264_STREAMFORMAT_ANNEXB; + self->main_profile = UVC_H264_PROFILE_CONSTRAINED_BASELINE; + self->secondary_format = UVC_H264_SRC_FORMAT_NONE; + self->secondary_width = 0; + self->secondary_height = 0; + self->secondary_frame_interval = 0; + + /* v4l2src properties */ + self->num_buffers = DEFAULT_NUM_BUFFERS; + self->device = g_strdup (DEFAULT_DEVICE); + + /* Static controls */ + self->initial_bitrate = DEFAULT_INITIAL_BITRATE; + self->slice_units = DEFAULT_SLICE_UNITS; + self->slice_mode = DEFAULT_SLICE_MODE; + self->iframe_period = DEFAULT_IFRAME_PERIOD; + self->usage_type = DEFAULT_USAGE_TYPE; + self->entropy = DEFAULT_ENTROPY; + self->enable_sei = DEFAULT_ENABLE_SEI; + self->num_reorder_frames = DEFAULT_NUM_REORDER_FRAMES; + self->preview_flipped = DEFAULT_PREVIEW_FLIPPED; + self->leaky_bucket_size = DEFAULT_LEAKY_BUCKET_SIZE; + + /* Dynamic controls */ + self->rate_control = DEFAULT_RATE_CONTROL; + self->fixed_framerate = DEFAULT_FIXED_FRAMERATE; + self->level_idc = DEFAULT_LEVEL_IDC; + self->peak_bitrate = DEFAULT_PEAK_BITRATE; + self->average_bitrate = DEFAULT_AVERAGE_BITRATE; + self->min_qp[QP_I_FRAME] = DEFAULT_MIN_QP; + self->max_qp[QP_I_FRAME] = DEFAULT_MAX_QP; + self->min_qp[QP_P_FRAME] = DEFAULT_MIN_QP; + self->max_qp[QP_P_FRAME] = DEFAULT_MAX_QP; + self->min_qp[QP_B_FRAME] = DEFAULT_MIN_QP; + self->max_qp[QP_B_FRAME] = DEFAULT_MAX_QP; + self->ltr_buffer_size = DEFAULT_LTR_BUFFER_SIZE; + self->ltr_encoder_control = DEFAULT_LTR_ENCODER_CONTROL; +} + +static void +gst_uvc_h264_src_dispose (GObject * object) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (object); + +#if defined (HAVE_GUDEV) && defined (HAVE_LIBUSB) + if (self->usb_ctx) + libusb_exit (self->usb_ctx); + self->usb_ctx = NULL; +#else + (void) self; +#endif + + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +static void +gst_uvc_h264_src_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (object); + + switch (prop_id) { + case PROP_COLORSPACE_NAME: + g_free (self->colorspace_name); + self->colorspace_name = g_value_dup_string (value); + break; + case PROP_JPEG_DECODER_NAME: + g_free (self->jpeg_decoder_name); + self->jpeg_decoder_name = g_value_dup_string (value); + break; + case PROP_NUM_CLOCK_SAMPLES: + self->num_clock_samples = g_value_get_int (value); + if (self->mjpg_demux) + g_object_set (self->mjpg_demux, + "num-clock-samples", self->num_clock_samples, NULL); + break; + /* v4l2 properties */ + case PROP_NUM_BUFFERS: + self->num_buffers = g_value_get_int (value); + if (self->v4l2_src) + g_object_set_property (G_OBJECT (self->v4l2_src), "num-buffers", value); + break; + case PROP_DEVICE: + g_free (self->device); + self->device = g_value_dup_string (value); + if (self->v4l2_src) + g_object_set_property (G_OBJECT (self->v4l2_src), "device", value); + break; + /* Static controls */ + case PROP_INITIAL_BITRATE: + self->initial_bitrate = g_value_get_uint (value); + break; + case PROP_SLICE_UNITS: + self->slice_units = g_value_get_uint (value); + break; + case PROP_SLICE_MODE: + self->slice_mode = g_value_get_enum (value); + break; + case PROP_IFRAME_PERIOD: + self->iframe_period = g_value_get_uint (value); + break; + case PROP_USAGE_TYPE: + self->usage_type = g_value_get_enum (value); + break; + case PROP_ENTROPY: + self->entropy = g_value_get_enum (value); + break; + case PROP_ENABLE_SEI: + self->enable_sei = g_value_get_boolean (value); + break; + case PROP_NUM_REORDER_FRAMES: + self->num_reorder_frames = g_value_get_uint (value); + break; + case PROP_PREVIEW_FLIPPED: + self->preview_flipped = g_value_get_boolean (value); + break; + case PROP_LEAKY_BUCKET_SIZE: + self->leaky_bucket_size = g_value_get_uint (value); + break; + + + /* Dynamic controls */ + case PROP_RATE_CONTROL: + self->rate_control = g_value_get_enum (value); + set_rate_control (self); + update_rate_control (self); + break; + case PROP_FIXED_FRAMERATE: + self->fixed_framerate = g_value_get_boolean (value); + set_rate_control (self); + update_rate_control (self); + break; + case PROP_LEVEL_IDC: + self->level_idc = g_value_get_uint (value); + set_level_idc (self); + update_level_idc_and_get_max_mbps (self); + break; + case PROP_PEAK_BITRATE: + self->peak_bitrate = g_value_get_uint (value); + set_bitrate (self); + update_bitrate (self); + break; + case PROP_AVERAGE_BITRATE: + self->average_bitrate = g_value_get_uint (value); + set_bitrate (self); + update_bitrate (self); + break; + case PROP_MIN_IFRAME_QP: + self->min_qp[QP_I_FRAME] = g_value_get_int (value); + set_qp (self, QP_I_FRAME); + update_qp (self, QP_I_FRAME); + break; + case PROP_MAX_IFRAME_QP: + self->max_qp[QP_I_FRAME] = g_value_get_int (value); + set_qp (self, QP_I_FRAME); + update_qp (self, QP_I_FRAME); + break; + case PROP_MIN_PFRAME_QP: + self->min_qp[QP_P_FRAME] = g_value_get_int (value); + set_qp (self, QP_P_FRAME); + update_qp (self, QP_P_FRAME); + break; + case PROP_MAX_PFRAME_QP: + self->max_qp[QP_P_FRAME] = g_value_get_int (value); + set_qp (self, QP_P_FRAME); + update_qp (self, QP_P_FRAME); + break; + case PROP_MIN_BFRAME_QP: + self->min_qp[QP_B_FRAME] = g_value_get_int (value); + set_qp (self, QP_B_FRAME); + update_qp (self, QP_B_FRAME); + break; + case PROP_MAX_BFRAME_QP: + self->max_qp[QP_B_FRAME] = g_value_get_int (value); + set_qp (self, QP_B_FRAME); + update_qp (self, QP_B_FRAME); + break; + case PROP_LTR_BUFFER_SIZE: + self->ltr_buffer_size = g_value_get_int (value); + set_ltr (self); + update_ltr (self); + break; + case PROP_LTR_ENCODER_CONTROL: + self->ltr_encoder_control = g_value_get_int (value); + set_ltr (self); + update_ltr (self); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec); + break; + } +} + +static void +gst_uvc_h264_src_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (object); + uvcx_video_config_probe_commit_t probe; + + switch (prop_id) { + case PROP_INITIAL_BITRATE: + case PROP_SLICE_UNITS: + case PROP_SLICE_MODE: + case PROP_IFRAME_PERIOD: + case PROP_USAGE_TYPE: + case PROP_ENTROPY: + case PROP_ENABLE_SEI: + case PROP_NUM_REORDER_FRAMES: + case PROP_PREVIEW_FLIPPED: + case PROP_LEAKY_BUCKET_SIZE: + fill_probe_commit (self, &probe, 0, 0, 0, 0, 0); + if (GST_STATE (self) >= GST_STATE_PAUSED) { + xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR, + (guchar *) & probe); + } + break; + default: + break; + } + + switch (prop_id) { + case PROP_COLORSPACE_NAME: + g_value_set_string (value, self->colorspace_name); + break; + case PROP_JPEG_DECODER_NAME: + g_value_set_string (value, self->jpeg_decoder_name); + break; + case PROP_NUM_CLOCK_SAMPLES: + g_value_set_int (value, self->num_clock_samples); + break; + /* v4l2src properties */ + case PROP_NUM_BUFFERS: + g_value_set_int (value, self->num_buffers); + break; + case PROP_DEVICE: + g_value_set_string (value, self->device); + break; + case PROP_DEVICE_NAME: + if (self->v4l2_src) + g_object_get_property (G_OBJECT (self->v4l2_src), "device-name", value); + else + g_value_set_static_string (value, ""); + break; + /* Static controls */ + case PROP_INITIAL_BITRATE: + g_value_set_uint (value, probe.dwBitRate); + break; + case PROP_SLICE_UNITS: + g_value_set_uint (value, probe.wSliceUnits); + break; + case PROP_SLICE_MODE: + g_value_set_enum (value, probe.wSliceMode); + break; + case PROP_IFRAME_PERIOD: + g_value_set_uint (value, probe.wIFramePeriod); + break; + case PROP_USAGE_TYPE: + g_value_set_enum (value, probe.bUsageType); + break; + case PROP_ENTROPY: + g_value_set_enum (value, probe.bEntropyCABAC); + break; + case PROP_ENABLE_SEI: + g_value_set_boolean (value, + (probe.bTimestamp == UVC_H264_TIMESTAMP_SEI_ENABLE)); + break; + case PROP_NUM_REORDER_FRAMES: + g_value_set_uint (value, probe.bNumOfReorderFrames); + break; + case PROP_PREVIEW_FLIPPED: + g_value_set_boolean (value, + (probe.bPreviewFlipped == UVC_H264_PREFLIPPED_HORIZONTAL)); + break; + case PROP_LEAKY_BUCKET_SIZE: + g_value_set_uint (value, probe.wLeakyBucketSize); + break; + + /* Dynamic controls */ + case PROP_RATE_CONTROL: + update_rate_control (self); + g_value_set_enum (value, self->rate_control); + break; + case PROP_FIXED_FRAMERATE: + update_rate_control (self); + g_value_set_boolean (value, self->fixed_framerate); + break; + case PROP_MAX_MBPS: + g_value_set_uint (value, update_level_idc_and_get_max_mbps (self)); + break; + case PROP_LEVEL_IDC: + update_level_idc_and_get_max_mbps (self); + g_value_set_uint (value, self->level_idc); + break; + case PROP_PEAK_BITRATE: + update_bitrate (self); + g_value_set_uint (value, self->peak_bitrate); + break; + case PROP_AVERAGE_BITRATE: + update_bitrate (self); + g_value_set_uint (value, self->average_bitrate); + break; + case PROP_MIN_IFRAME_QP: + update_qp (self, QP_I_FRAME); + g_value_set_int (value, self->min_qp[QP_I_FRAME]); + break; + case PROP_MAX_IFRAME_QP: + update_qp (self, QP_I_FRAME); + g_value_set_int (value, self->max_qp[QP_I_FRAME]); + break; + case PROP_MIN_PFRAME_QP: + update_qp (self, QP_P_FRAME); + g_value_set_int (value, self->min_qp[QP_P_FRAME]); + break; + case PROP_MAX_PFRAME_QP: + update_qp (self, QP_P_FRAME); + g_value_set_int (value, self->max_qp[QP_P_FRAME]); + break; + case PROP_MIN_BFRAME_QP: + update_qp (self, QP_B_FRAME); + g_value_set_int (value, self->min_qp[QP_B_FRAME]); + break; + case PROP_MAX_BFRAME_QP: + update_qp (self, QP_B_FRAME); + g_value_set_int (value, self->max_qp[QP_B_FRAME]); + break; + case PROP_LTR_BUFFER_SIZE: + update_ltr (self); + g_value_set_int (value, self->ltr_buffer_size); + break; + case PROP_LTR_ENCODER_CONTROL: + update_ltr (self); + g_value_set_int (value, self->ltr_encoder_control); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec); + break; + } +} + +/* Set dynamic controls */ +static void +set_rate_control (GstUvcH264Src * self) +{ + uvcx_rate_control_mode_t req; + + if (!xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_GET_CUR, (guchar *) & req)) { + GST_WARNING_OBJECT (self, " RATE_CONTROL GET_CUR error"); + return; + } + + req.bRateControlMode = self->rate_control; + if (self->fixed_framerate) + req.bRateControlMode |= UVC_H264_RATECONTROL_FIXED_FRM_FLG; + + if (!xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_SET_CUR, (guchar *) & req)) { + GST_WARNING_OBJECT (self, " RATE_CONTROL SET_CUR error"); + return; + } +} + +static void +set_level_idc (GstUvcH264Src * self) +{ + uvcx_video_advance_config_t req; + + if (!xu_query (self, UVCX_VIDEO_ADVANCE_CONFIG, UVC_GET_CUR, + (guchar *) & req)) { + GST_WARNING_OBJECT (self, " VIDEO_ADVANCE_CONFIG GET_CUR error"); + return; + } + + req.blevel_idc = self->level_idc; + if (!xu_query (self, UVCX_VIDEO_ADVANCE_CONFIG, UVC_SET_CUR, + (guchar *) & req)) { + GST_WARNING_OBJECT (self, " VIDEO_ADVANCE_CONFIG SET_CUR error"); + return; + } +} + +static void +set_bitrate (GstUvcH264Src * self) +{ + uvcx_bitrate_layers_t req; + + if (!xu_query (self, UVCX_BITRATE_LAYERS, UVC_GET_CUR, (guchar *) & req)) { + GST_WARNING_OBJECT (self, " BITRATE_LAYERS GET_CUR error"); + return; + } + + req.dwPeakBitrate = self->peak_bitrate; + req.dwAverageBitrate = self->average_bitrate; + if (!xu_query (self, UVCX_BITRATE_LAYERS, UVC_SET_CUR, (guchar *) & req)) { + GST_WARNING_OBJECT (self, " BITRATE_LAYERS SET_CUR error"); + return; + } +} + +static void +set_qp (GstUvcH264Src * self, gint type) +{ + uvcx_qp_steps_layers_t req; + + req.wLayerID = 0; + switch (type) { + case QP_I_FRAME: + req.bFrameType = UVC_H264_QP_STEPS_I_FRAME_TYPE; + break; + case QP_P_FRAME: + req.bFrameType = UVC_H264_QP_STEPS_P_FRAME_TYPE; + break; + case QP_B_FRAME: + req.bFrameType = UVC_H264_QP_STEPS_B_FRAME_TYPE; + break; + default: + return; + } + req.bMinQp = 0; + req.bMaxQp = 0; + if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_SET_CUR, (guchar *) & req)) { + GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS SET_CUR error"); + return; + } + + if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_GET_CUR, (guchar *) & req)) { + GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS GET_CUR error"); + return; + } + + req.bMinQp = self->min_qp[type]; + req.bMaxQp = self->max_qp[type]; + if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_SET_CUR, (guchar *) & req)) { + GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS SET_CUR error"); + return; + } +} + +static void +set_ltr (GstUvcH264Src * self) +{ + uvcx_ltr_buffer_size_control_t req; + + if (!xu_query (self, UVCX_LTR_BUFFER_SIZE_CONTROL, UVC_GET_CUR, + (guchar *) & req)) { + GST_WARNING_OBJECT (self, " LTR_BUFFER_SIZE GET_CUR error"); + return; + } + + req.bLTRBufferSize = self->ltr_buffer_size; + req.bLTREncoderControl = self->ltr_encoder_control; + if (!xu_query (self, UVCX_LTR_BUFFER_SIZE_CONTROL, UVC_SET_CUR, + (guchar *) & req)) { + GST_WARNING_OBJECT (self, "LTR_BUFFER_SIZE SET_CUR error"); + return; + } +} + +/* Get Dynamic controls */ + +static void +update_rate_control (GstUvcH264Src * self) +{ + uvcx_rate_control_mode_t req; + + if (!xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_GET_CUR, (guchar *) & req)) { + GST_WARNING_OBJECT (self, " RATE_CONTROL GET_CUR error"); + return; + } + + if (self->rate_control != (req.bRateControlMode & + ~UVC_H264_RATECONTROL_FIXED_FRM_FLG)) { + self->rate_control = (req.bRateControlMode & + ~UVC_H264_RATECONTROL_FIXED_FRM_FLG); + g_object_notify (G_OBJECT (self), "rate-control"); + } + if (self->fixed_framerate != ((req.bRateControlMode & + UVC_H264_RATECONTROL_FIXED_FRM_FLG) != 0)) { + self->fixed_framerate = ((req.bRateControlMode & + UVC_H264_RATECONTROL_FIXED_FRM_FLG) != 0); + g_object_notify (G_OBJECT (self), "fixed-framerate"); + } +} + + +static guint32 +update_level_idc_and_get_max_mbps (GstUvcH264Src * self) +{ + uvcx_video_advance_config_t req; + + if (!xu_query (self, UVCX_VIDEO_ADVANCE_CONFIG, UVC_GET_CUR, + (guchar *) & req)) { + GST_WARNING_OBJECT (self, " VIDEO_ADVANCE_CONFIG GET_CUR error"); + return 0; + } + + if (self->level_idc != req.blevel_idc) { + self->level_idc = req.blevel_idc; + g_object_notify (G_OBJECT (self), "level-idc"); + } + return req.dwMb_max; +} + +static void +update_bitrate (GstUvcH264Src * self) +{ + uvcx_bitrate_layers_t req; + + if (!xu_query (self, UVCX_BITRATE_LAYERS, UVC_GET_CUR, (guchar *) & req)) { + GST_WARNING_OBJECT (self, " BITRATE_LAYERS GET_CUR error"); + return; + } + if (self->peak_bitrate != req.dwPeakBitrate) { + self->peak_bitrate = req.dwPeakBitrate; + g_object_notify (G_OBJECT (self), "peak-bitrate"); + } + if (self->average_bitrate != req.dwAverageBitrate) { + self->average_bitrate = req.dwAverageBitrate; + g_object_notify (G_OBJECT (self), "average-bitrate"); + } +} + +static gboolean +update_qp (GstUvcH264Src * self, gint type) +{ + uvcx_qp_steps_layers_t req; + guint8 frame_type; + + req.wLayerID = 0; + switch (type) { + case QP_I_FRAME: + frame_type = UVC_H264_QP_STEPS_I_FRAME_TYPE; + break; + case QP_P_FRAME: + frame_type = UVC_H264_QP_STEPS_P_FRAME_TYPE; + break; + case QP_B_FRAME: + frame_type = UVC_H264_QP_STEPS_B_FRAME_TYPE; + break; + default: + return FALSE; + } + req.bFrameType = frame_type; + req.bMinQp = 0; + req.bMaxQp = 0; + if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_SET_CUR, (guchar *) & req)) { + GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS SET_CUR error"); + return FALSE; + } + + if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_GET_CUR, (guchar *) & req)) { + GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS GET_CUR error"); + return FALSE; + } + + if (req.bFrameType == frame_type) { + if (self->min_qp[type] != req.bMinQp) { + self->min_qp[type] = req.bMinQp; + switch (type) { + case QP_I_FRAME: + g_object_notify (G_OBJECT (self), "min-iframe-qp"); + break; + case QP_P_FRAME: + g_object_notify (G_OBJECT (self), "min-pframe-qp"); + break; + case QP_B_FRAME: + g_object_notify (G_OBJECT (self), "min-bframe-qp"); + break; + default: + break; + } + } + if (self->max_qp[type] != req.bMaxQp) { + self->max_qp[type] = req.bMaxQp; + switch (type) { + case QP_I_FRAME: + g_object_notify (G_OBJECT (self), "max-iframe-qp"); + break; + case QP_P_FRAME: + g_object_notify (G_OBJECT (self), "max-pframe-qp"); + break; + case QP_B_FRAME: + g_object_notify (G_OBJECT (self), "max-bframe-qp"); + break; + default: + break; + } + } + return TRUE; + } else { + self->min_qp[type] = 0xFF; + self->max_qp[type] = 0xFF; + return FALSE; + } +} + +static void +update_ltr (GstUvcH264Src * self) +{ + uvcx_ltr_buffer_size_control_t req; + + if (!xu_query (self, UVCX_LTR_BUFFER_SIZE_CONTROL, UVC_GET_CUR, + (guchar *) & req)) { + GST_WARNING_OBJECT (self, " LTR_BUFFER_SIZE GET_CUR error"); + return; + } + + if (self->ltr_buffer_size != req.bLTRBufferSize) { + self->ltr_buffer_size = req.bLTRBufferSize; + g_object_notify (G_OBJECT (self), "ltr-buffer-size"); + } + if (self->ltr_encoder_control != req.bLTREncoderControl) { + self->ltr_encoder_control = req.bLTREncoderControl; + g_object_notify (G_OBJECT (self), "ltr-encoder-control"); + } +} + +#define STORE_MIN_DEF_MAX(type) \ + *(type *)min = *((type *) (min_p + offset)); \ + *(type *)def = *((type *) (def_p + offset)); \ + *(type *)max = *((type *) (max_p + offset)); + +static gboolean +probe_setting (GstUvcH264Src * self, uvcx_control_selector_t selector, + guint offset, gint size, gpointer min, gpointer def, gpointer max) +{ + guchar *min_p, *def_p, *max_p; + gboolean ret = FALSE; + __u16 len; + + if (!xu_query (self, selector, UVC_GET_LEN, (guchar *) & len)) { + GST_WARNING_OBJECT (self, "probe_setting GET_LEN error"); + return FALSE; + } + min_p = g_malloc0 (len); + def_p = g_malloc0 (len); + max_p = g_malloc0 (len); + + if (!xu_query (self, selector, UVC_GET_MIN, min_p)) { + GST_WARNING_OBJECT (self, "probe_setting GET_MIN error"); + goto end; + } + if (!xu_query (self, selector, UVC_GET_DEF, def_p)) { + GST_WARNING_OBJECT (self, "probe_setting GET_DEF error"); + goto end; + } + if (!xu_query (self, selector, UVC_GET_MAX, max_p)) { + GST_WARNING_OBJECT (self, "probe_setting GET_MAX error"); + goto end; + } + + switch (size) { + case -1: + STORE_MIN_DEF_MAX (gint8); + ret = TRUE; + break; + case 1: + STORE_MIN_DEF_MAX (guint8); + ret = TRUE; + break; + case -2: + STORE_MIN_DEF_MAX (gint16); + ret = TRUE; + break; + case 2: + STORE_MIN_DEF_MAX (guint16); + ret = TRUE; + break; + case -4: + STORE_MIN_DEF_MAX (gint32); + ret = TRUE; + break; + case 4: + STORE_MIN_DEF_MAX (guint32); + ret = TRUE; + break; + default: + break; + } + +end: + g_free (min_p); + g_free (def_p); + g_free (max_p); + + return ret; +} + +static gboolean +test_enum_setting (GstUvcH264Src * self, guint offset, guint size, + guint16 value) +{ + uvcx_video_config_probe_commit_t cur; + uvcx_video_config_probe_commit_t req; + guchar *req_p = (guchar *) & req; + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR, (guchar *) & cur)) { + GST_WARNING_OBJECT (self, " GET_CUR error"); + return FALSE; + } + + req = cur; + + if (size == 1) + *((guint8 *) (req_p + offset)) = (guint8) value; + else + *((guint16 *) (req_p + offset)) = value; + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR, req_p)) { + GST_WARNING_OBJECT (self, " SET_CUR error"); + return FALSE; + } + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR, req_p)) { + GST_WARNING_OBJECT (self, " GET_CUR error"); + return FALSE; + } + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR, (guchar *) & cur)) { + GST_WARNING_OBJECT (self, " SET_CUR error"); + return FALSE; + } + + if (size == 1) + return *((guint8 *) (req_p + offset)) == (guint8) value; + else + return *((guint16 *) (req_p + offset)) == value; +} + +static gboolean +gst_uvc_h264_src_get_enum_setting (GstUvcH264Src * self, gchar * property, + gint * mask, gint * default_value) +{ + guint8 min, def, max; + guint8 en; + gboolean ret = FALSE; + + if (g_strcmp0 (property, "slice-mode") == 0) { + guint16 min16, def16, max16; + guint16 en16; + + ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE, + offsetof (uvcx_video_config_probe_commit_t, wSliceMode), 2, + &min16, &def16, &max16); + if (ret) { + *default_value = def16; + *mask = 0; + for (en16 = min16; en16 <= max16; en16++) { + if (test_enum_setting (self, offsetof (uvcx_video_config_probe_commit_t, + wSliceMode), 2, en16)) + *mask |= (1 << en16); + } + } + } else if (g_strcmp0 (property, "usage-type") == 0) { + ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE, + offsetof (uvcx_video_config_probe_commit_t, bUsageType), 1, + &min, &def, &max); + if (ret) { + *default_value = def; + *mask = 0; + for (en = min; en <= max; en++) { + if (test_enum_setting (self, offsetof (uvcx_video_config_probe_commit_t, + bUsageType), 1, en)) + *mask |= (1 << en); + } + } + } else if (g_strcmp0 (property, "entropy") == 0) { + ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE, + offsetof (uvcx_video_config_probe_commit_t, bEntropyCABAC), 1, + &min, &def, &max); + if (ret) { + *mask = (1 << min) | (1 << max); + *default_value = def; + } + } else if (g_strcmp0 (property, "rate-control") == 0) { + ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE, + offsetof (uvcx_video_config_probe_commit_t, bRateControlMode), 1, + &min, &def, &max); + if (ret) { + uvcx_rate_control_mode_t cur; + + *default_value = def; + *mask = 0; + + xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_GET_CUR, (guchar *) & cur); + + for (en = min; en <= max; en++) { + uvcx_rate_control_mode_t req = { 0, en }; + + if (xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_SET_CUR, + (guchar *) & req) && + xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_GET_CUR, + (guchar *) & req) && req.bRateControlMode == en) + *mask |= (1 << en); + } + xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_SET_CUR, (guchar *) & cur); + } + } + + return ret; +} + +static gboolean +gst_uvc_h264_src_get_boolean_setting (GstUvcH264Src * self, gchar * property, + gboolean * changeable, gboolean * default_value) +{ + guint8 min, def, max; + gboolean ret = FALSE; + + if (g_strcmp0 (property, "enable-sei") == 0) { + ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE, + offsetof (uvcx_video_config_probe_commit_t, bTimestamp), 1, + &min, &def, &max); + *changeable = (min != max); + *default_value = (def != 0); + } else if (g_strcmp0 (property, "preview-flipped") == 0) { + ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE, + offsetof (uvcx_video_config_probe_commit_t, bPreviewFlipped), 1, + &min, &def, &max); + *changeable = (min != max); + *default_value = (def != 0); + } else if (g_strcmp0 (property, "fixed-framerate") == 0) { + ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE, + offsetof (uvcx_video_config_probe_commit_t, bRateControlMode), 1, + &min, &def, &max); + *changeable = ((max & UVC_H264_RATECONTROL_FIXED_FRM_FLG) != 0); + *default_value = ((def & UVC_H264_RATECONTROL_FIXED_FRM_FLG) != 0); + } + + return ret; +} + +static gboolean +gst_uvc_h264_src_get_int_setting (GstUvcH264Src * self, gchar * property, + gint * min, gint * def, gint * max) +{ + guint32 min32, def32, max32; + guint16 min16, def16, max16; + guint8 min8, def8, max8; + gint8 smin8, sdef8, smax8; + gboolean ret = FALSE; + + GST_DEBUG_OBJECT (self, "Probing int property %s", property); + if (g_strcmp0 (property, "initial-bitrate") == 0) { + ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE, + offsetof (uvcx_video_config_probe_commit_t, dwBitRate), 4, + &min32, &def32, &max32); + *min = min32; + *def = def32; + *max = max32; + } else if (g_strcmp0 (property, "slice-units") == 0) { + ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE, + offsetof (uvcx_video_config_probe_commit_t, wSliceUnits), 2, + &min16, &def16, &max16); + *min = min16; + *def = def16; + *max = max16; + } else if (g_strcmp0 (property, "iframe-period") == 0) { + ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE, + offsetof (uvcx_video_config_probe_commit_t, wIFramePeriod), 2, + &min16, &def16, &max16); + *min = min16; + *def = def16; + *max = max16; + } else if (g_strcmp0 (property, "num-reorder-frames") == 0) { + ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE, + offsetof (uvcx_video_config_probe_commit_t, bNumOfReorderFrames), 1, + &min8, &def8, &max8); + *min = min8; + *def = def8; + *max = max8; + } else if (g_strcmp0 (property, "leaky-bucket-size") == 0) { + ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE, + offsetof (uvcx_video_config_probe_commit_t, wLeakyBucketSize), 2, + &min16, &def16, &max16); + *min = min16; + *def = def16; + *max = max16; + } else if (g_strcmp0 (property, "level-idc") == 0) { + ret = probe_setting (self, UVCX_VIDEO_ADVANCE_CONFIG, + offsetof (uvcx_video_advance_config_t, blevel_idc), 1, + &min8, &def8, &max8); + *min = min8; + *def = def8; + *max = max8; + } else if (g_strcmp0 (property, "max-mbps") == 0) { + ret = probe_setting (self, UVCX_VIDEO_ADVANCE_CONFIG, + offsetof (uvcx_video_advance_config_t, dwMb_max), 4, + &min32, &def32, &max32); + *min = min32; + *def = def32; + *max = max32; + } else if (g_strcmp0 (property, "peak-bitrate") == 0) { + ret = probe_setting (self, UVCX_BITRATE_LAYERS, + offsetof (uvcx_bitrate_layers_t, dwPeakBitrate), 4, + &min32, &def32, &max32); + *min = min32; + *def = def32; + *max = max32; + } else if (g_strcmp0 (property, "average-bitrate") == 0) { + ret = probe_setting (self, UVCX_BITRATE_LAYERS, + offsetof (uvcx_bitrate_layers_t, dwAverageBitrate), 4, + &min32, &def32, &max32); + *min = min32; + *def = def32; + *max = max32; + } else if (g_strcmp0 (property, "min-iframe-qp") == 0) { + if (update_qp (self, QP_I_FRAME)) + ret = probe_setting (self, UVCX_QP_STEPS_LAYERS, + offsetof (uvcx_qp_steps_layers_t, bMinQp), 1, &smin8, &sdef8, &smax8); + *min = smin8; + *def = sdef8; + *max = smax8; + } else if (g_strcmp0 (property, "max-iframe-qp") == 0) { + if (update_qp (self, QP_I_FRAME)) + ret = probe_setting (self, UVCX_QP_STEPS_LAYERS, + offsetof (uvcx_qp_steps_layers_t, bMaxQp), 1, &smin8, &sdef8, &smax8); + *min = smin8; + *def = sdef8; + *max = smax8; + } else if (g_strcmp0 (property, "min-pframe-qp") == 0) { + if (update_qp (self, QP_P_FRAME)) + ret = probe_setting (self, UVCX_QP_STEPS_LAYERS, + offsetof (uvcx_qp_steps_layers_t, bMinQp), 1, &smin8, &sdef8, &smax8); + *min = smin8; + *def = sdef8; + *max = smax8; + } else if (g_strcmp0 (property, "max-pframe-qp") == 0) { + if (update_qp (self, QP_P_FRAME)) + ret = probe_setting (self, UVCX_QP_STEPS_LAYERS, + offsetof (uvcx_qp_steps_layers_t, bMaxQp), 1, &smin8, &sdef8, &smax8); + *min = smin8; + *def = sdef8; + *max = smax8; + } else if (g_strcmp0 (property, "min-bframe-qp") == 0) { + if (update_qp (self, QP_B_FRAME)) + ret = probe_setting (self, UVCX_QP_STEPS_LAYERS, + offsetof (uvcx_qp_steps_layers_t, bMinQp), 1, &smin8, &sdef8, &smax8); + *min = smin8; + *def = sdef8; + *max = smax8; + } else if (g_strcmp0 (property, "max-bframe-qp") == 0) { + if (update_qp (self, QP_B_FRAME)) + ret = probe_setting (self, UVCX_QP_STEPS_LAYERS, + offsetof (uvcx_qp_steps_layers_t, bMaxQp), 1, &smin8, &sdef8, &smax8); + *min = smin8; + *def = sdef8; + *max = smax8; + } else if (g_strcmp0 (property, "ltr-buffer-size") == 0) { + ret = probe_setting (self, UVCX_LTR_BUFFER_SIZE_CONTROL, + offsetof (uvcx_ltr_buffer_size_control_t, bLTRBufferSize), 1, + &min8, &def8, &max8); + *min = min8; + *def = def8; + *max = max8; + } else if (g_strcmp0 (property, "ltr-encoder-control") == 0) { + ret = probe_setting (self, UVCX_LTR_BUFFER_SIZE_CONTROL, + offsetof (uvcx_ltr_buffer_size_control_t, bLTREncoderControl), 1, + &min8, &def8, &max8); + *min = min8; + *def = def8; + *max = max8; + } + + return ret; +} + +static gboolean +gst_uvc_h264_src_event_probe (GstPad * pad, GstEvent * event, + gpointer user_data) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (user_data); + gboolean ret = TRUE; + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_EOS: + ret = !self->reconfiguring; + break; + case GST_EVENT_NEWSEGMENT: + if (pad == self->vidsrc) { + ret = !self->vid_newseg; + self->vid_newseg = TRUE; + } else if (pad == self->vfsrc) { + ret = !self->vf_newseg; + self->vf_newseg = TRUE; + } + break; + default: + break; + } + + return ret; +} + +static gboolean +gst_uvc_h264_src_buffer_probe (GstPad * pad, GstBuffer * buffer, + gpointer user_data) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (user_data); + + /* TODO: Check the NALU type and make sure it is a keyframe */ + if (self->key_unit_event) { + GstClockTime ts, running_time, stream_time; + gboolean all_headers; + guint count; + GstEvent *downstream; + + if (gst_video_event_parse_upstream_force_key_unit (self->key_unit_event, + &ts, &all_headers, &count)) { + if (!GST_CLOCK_TIME_IS_VALID (ts)) { + ts = GST_BUFFER_TIMESTAMP (buffer); + } + running_time = gst_segment_to_running_time (&self->segment, + GST_FORMAT_TIME, ts); + + stream_time = gst_segment_to_stream_time (&self->segment, + GST_FORMAT_TIME, ts); + + GST_DEBUG_OBJECT (self, "Sending downstream force-key-unit : %d - %d ts=%" + GST_TIME_FORMAT " running time =%" GST_TIME_FORMAT " stream=%" + GST_TIME_FORMAT, all_headers, count, GST_TIME_ARGS (ts), + GST_TIME_ARGS (running_time), GST_TIME_ARGS (stream_time)); + downstream = gst_video_event_new_downstream_force_key_unit (ts, + running_time, stream_time, all_headers, count); + gst_pad_push_event (self->vidsrc, downstream); + gst_event_replace (&self->key_unit_event, NULL); + } + } + return TRUE; +} + +static gboolean +gst_uvc_h264_src_parse_event (GstUvcH264Src * self, GstPad * pad, + GstEvent * event) +{ + const GstStructure *s = gst_event_get_structure (event); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_CUSTOM_UPSTREAM: + if (pad == self->vidsrc && self->main_format == UVC_H264_SRC_FORMAT_H264) { + if (gst_video_event_is_force_key_unit (event)) { + uvcx_picture_type_control_t req = { 0, 0 }; + GstClockTime ts; + gboolean all_headers; + + if (gst_video_event_parse_upstream_force_key_unit (event, + &ts, &all_headers, NULL)) { + GST_INFO_OBJECT (self, "Received upstream force-key-unit : %d %" + GST_TIME_FORMAT, all_headers, GST_TIME_ARGS (ts)); + /* TODO: wait until 'ts' time is reached */ + if (all_headers) + req.wPicType = UVC_H264_PICTYPE_IDR_WITH_PPS_SPS; + else + req.wPicType = UVC_H264_PICTYPE_IDR; + + if (!xu_query (self, UVCX_PICTURE_TYPE_CONTROL, UVC_SET_CUR, + (guchar *) & req)) { + GST_WARNING_OBJECT (self, " PICTURE_TYPE_CONTROL SET_CUR error"); + } else { + gst_event_replace (&self->key_unit_event, event); + gst_event_unref (event); + + return TRUE; + } + } + } else if (s && + gst_structure_has_name (s, "uvc-h264-ltr-picture-control")) { + guint put_at, encode_using; + + if (gst_structure_get_uint (s, "put-at", &put_at) && + gst_structure_get_uint (s, "encode-using", &encode_using)) { + uvcx_ltr_picture_control req = { 0, put_at, encode_using }; + + if (!xu_query (self, UVCX_LTR_PICTURE_CONTROL, UVC_SET_CUR, + (guchar *) & req)) { + GST_WARNING_OBJECT (self, " LTR PICTURE_CONTROL SET_CUR error"); + } else { + gst_event_unref (event); + + return TRUE; + } + } + return TRUE; + } else if (s && gst_structure_has_name (s, "uvc-h264-bitrate-control")) { + guint average, peak; + + if (gst_structure_get_uint (s, "average-bitrate", &average) && + gst_structure_get_uint (s, "peak-bitrate", &peak)) { + self->average_bitrate = average; + self->peak_bitrate = peak; + set_bitrate (self); + update_bitrate (self); + + gst_event_unref (event); + + return TRUE; + } + } else if (s && gst_structure_has_name (s, "uvc-h264-qp-control")) { + gint min_qp, max_qp; + gboolean valid_event = FALSE; + + if (gst_structure_get_int (s, "min-iframe-qp", &min_qp) && + gst_structure_get_int (s, "max-iframe-qp", &max_qp)) { + self->min_qp[QP_I_FRAME] = min_qp; + self->max_qp[QP_I_FRAME] = max_qp; + set_qp (self, QP_I_FRAME); + update_qp (self, QP_I_FRAME); + valid_event = TRUE; + } + if (gst_structure_get_int (s, "min-pframe-qp", &min_qp) && + gst_structure_get_int (s, "max-pframe-qp", &max_qp)) { + self->min_qp[QP_P_FRAME] = min_qp; + self->max_qp[QP_P_FRAME] = max_qp; + set_qp (self, QP_P_FRAME); + update_qp (self, QP_P_FRAME); + valid_event = TRUE; + } + if (gst_structure_get_int (s, "min-bframe-qp", &min_qp) && + gst_structure_get_int (s, "max-bframe-qp", &max_qp)) { + self->min_qp[QP_B_FRAME] = min_qp; + self->max_qp[QP_B_FRAME] = max_qp; + set_qp (self, QP_B_FRAME); + update_qp (self, QP_B_FRAME); + valid_event = TRUE; + } + + if (valid_event) { + gst_event_unref (event); + + return TRUE; + } + } else if (s && gst_structure_has_name (s, "uvc-h264-rate-control")) { + UvcH264RateControl rate; + gboolean fixed_framerate; + + if (gst_structure_get_enum (s, "rate-control", + UVC_H264_RATECONTROL_TYPE, (gint *) & rate) && + gst_structure_get_boolean (s, "fixed-framerate", + &fixed_framerate)) { + self->rate_control = rate; + self->fixed_framerate = fixed_framerate; + set_rate_control (self); + update_rate_control (self); + + gst_event_unref (event); + + return TRUE; + } + } else if (s && gst_structure_has_name (s, "uvc-h264-level-idc")) { + guint level_idc; + + if (gst_structure_get_uint (s, "level-idc", &level_idc)) { + self->level_idc = level_idc; + set_level_idc (self); + update_level_idc_and_get_max_mbps (self); + + gst_event_unref (event); + } + } + } + if (s && gst_structure_has_name (s, "renegotiate")) { + GST_DEBUG_OBJECT (self, "Received renegotiate on %s", + GST_PAD_NAME (pad)); + /* TODO: Do not reconstruct pipeline twice if we receive + the event on both pads */ + if (GST_STATE (self) >= GST_STATE_READY) { + /* TODO: diff the caps */ + gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self)); + } + return TRUE; + } + break; + default: + break; + } + + return FALSE; +} + +static gboolean +gst_uvc_h264_src_send_event (GstElement * element, GstEvent * event) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (element); + + if (gst_uvc_h264_src_parse_event (self, self->vidsrc, event)) + return TRUE; + + return GST_ELEMENT_CLASS (parent_class)->send_event (element, event); +} + +static gboolean +gst_uvc_h264_src_event (GstPad * pad, GstEvent * event) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (GST_PAD_PARENT (pad)); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_NEWSEGMENT: + if (!self->vid_newseg && pad == self->vidsrc) { + gboolean update; + gdouble rate, applied_rate; + GstFormat format; + gint64 start, stop, position; + + gst_event_parse_new_segment_full (event, &update, &rate, + &applied_rate, &format, &start, &stop, &position); + gst_segment_set_newsegment (&self->segment, update, rate, format, + start, stop, position); + } + break; + case GST_EVENT_FLUSH_STOP: + if (pad == self->vidsrc) { + gst_segment_init (&self->segment, GST_FORMAT_UNDEFINED); + self->vid_newseg = FALSE; + } + if (pad == self->vfsrc) + self->vf_newseg = FALSE; + break; + default: + if (gst_uvc_h264_src_parse_event (self, pad, event)) + return TRUE; + break; + } + return self->srcpad_event_func (pad, event); +} + +static guint8 +xu_get_id (GstUvcH264Src * self) +{ + struct uvc_xu_find_unit xu; + static const __u8 guid[16] = GUID_UVCX_H264_XU; + + if (self->v4l2_fd == -1) { + GST_WARNING_OBJECT (self, "Can't query XU with fd = -1"); + return 0; + } + + memcpy (xu.guid, guid, 16); + xu.unit = 0; + + if (-1 == ioctl (self->v4l2_fd, UVCIOC_XU_FIND_UNIT, &xu)) { +#if defined (HAVE_GUDEV) && defined (HAVE_LIBUSB) + /* Fallback on libusb */ + GUdevClient *client; + GUdevDevice *udevice; + GUdevDevice *parent; + guint64 busnum; + guint64 devnum; + libusb_device **device_list = NULL; + libusb_device *device = NULL; + ssize_t cnt; + int i, j, k; + + GST_DEBUG_OBJECT (self, "XU_FIND_UNIT ioctl failed. Fallback on libusb"); + + if (self->usb_ctx == NULL) + libusb_init (&self->usb_ctx); + + client = g_udev_client_new (NULL); + if (client) { + udevice = g_udev_client_query_by_device_file (client, self->device); + if (udevice) { + parent = g_udev_device_get_parent_with_subsystem (udevice, "usb", + "usb_device"); + if (parent) { + busnum = g_udev_device_get_sysfs_attr_as_uint64 (parent, "busnum"); + devnum = g_udev_device_get_sysfs_attr_as_uint64 (parent, "devnum"); + + cnt = libusb_get_device_list (self->usb_ctx, &device_list); + for (i = 0; i < cnt; i++) { + if (busnum == libusb_get_bus_number (device_list[i]) && + devnum == libusb_get_device_address (device_list[i])) { + device = libusb_ref_device (device_list[i]); + break; + } + } + libusb_free_device_list (device_list, 1); + g_object_unref (parent); + } + g_object_unref (udevice); + } + g_object_unref (client); + } + + if (device) { + struct libusb_device_descriptor desc; + + if (libusb_get_device_descriptor (device, &desc) == 0) { + for (i = 0; i < desc.bNumConfigurations; ++i) { + struct libusb_config_descriptor *config = NULL; + + if (libusb_get_config_descriptor (device, i, &config) == 0) { + for (j = 0; j < config->bNumInterfaces; j++) { + for (k = 0; k < config->interface[j].num_altsetting; k++) { + const struct libusb_interface_descriptor *interface; + const guint8 *ptr = NULL; + + interface = &config->interface[j].altsetting[k]; + if (interface->bInterfaceClass != LIBUSB_CLASS_VIDEO || + interface->bInterfaceSubClass != USB_VIDEO_CONTROL) + continue; + ptr = interface->extra; + while (ptr - interface->extra + + sizeof (xu_descriptor) < interface->extra_length) { + xu_descriptor *desc = (xu_descriptor *) ptr; + + GST_DEBUG_OBJECT (self, "Found VideoControl interface with " + "unit id %d : %" GUID_FORMAT, desc->bUnitID, + GUID_ARGS (desc->guidExtensionCode)); + if (desc->bDescriptorType == USB_VIDEO_CONTROL_INTERFACE && + desc->bDescriptorSubType == USB_VIDEO_CONTROL_XU_TYPE && + memcmp (desc->guidExtensionCode, guid, 16) == 0) { + guint8 unit_id = desc->bUnitID; + + GST_DEBUG_OBJECT (self, "Found H264 XU unit : %d", unit_id); + + libusb_unref_device (device); + return unit_id; + } + ptr += desc->bLength; + } + } + } + } + } + } + libusb_unref_device (device); + } +#else + GST_WARNING_OBJECT (self, "XU_FIND_UNIT ioctl failed"); +#endif + return 0; + } + + return xu.unit; +} + +static gboolean +xu_query (GstUvcH264Src * self, guint selector, guint query, guchar * data) +{ + struct uvc_xu_control_query xu; + __u16 len; + + if (self->v4l2_fd == -1) { + GST_WARNING_OBJECT (self, "Can't query XU with fd = -1"); + return FALSE; + } + + xu.unit = self->h264_unit_id; + xu.selector = selector; + + xu.query = UVC_GET_LEN; + xu.size = sizeof (len); + xu.data = (unsigned char *) &len; + if (-1 == ioctl (self->v4l2_fd, UVCIOC_CTRL_QUERY, &xu)) { + GST_WARNING_OBJECT (self, "PROBE GET_LEN error"); + return FALSE; + } + + if (query == UVC_GET_LEN) { + *((__u16 *) data) = len; + } else { + xu.query = query; + xu.size = len; + xu.data = data; + if (-1 == ioctl (self->v4l2_fd, UVCIOC_CTRL_QUERY, &xu)) { + return FALSE; + } + } + + return TRUE; +} + +static void +fill_probe_commit (GstUvcH264Src * self, + uvcx_video_config_probe_commit_t * probe, guint32 frame_interval, + guint32 width, guint32 height, guint32 profile, + UvcH264StreamFormat stream_format) +{ + probe->dwFrameInterval = frame_interval; + probe->dwBitRate = self->initial_bitrate; + probe->wWidth = width; + probe->wHeight = height; + probe->wSliceUnits = self->slice_units; + probe->wSliceMode = self->slice_mode; + probe->wProfile = profile; + probe->wIFramePeriod = self->iframe_period; + probe->bUsageType = self->usage_type; + probe->bRateControlMode = self->rate_control; + if (self->fixed_framerate) + probe->bRateControlMode |= UVC_H264_RATECONTROL_FIXED_FRM_FLG; + probe->bStreamFormat = stream_format; + probe->bEntropyCABAC = self->entropy; + probe->bTimestamp = self->enable_sei ? + UVC_H264_TIMESTAMP_SEI_ENABLE : UVC_H264_TIMESTAMP_SEI_DISABLE; + probe->bNumOfReorderFrames = self->num_reorder_frames; + probe->bPreviewFlipped = self->preview_flipped ? + UVC_H264_PREFLIPPED_HORIZONTAL : UVC_H264_PREFLIPPED_DISABLE; + probe->wLeakyBucketSize = self->leaky_bucket_size; +} + +static void +print_probe_commit (GstUvcH264Src * self, + uvcx_video_config_probe_commit_t * probe) +{ + GST_DEBUG_OBJECT (self, " Frame interval : %d *100ns", + probe->dwFrameInterval); + GST_DEBUG_OBJECT (self, " Bit rate : %d", probe->dwBitRate); + GST_DEBUG_OBJECT (self, " Hints : %X", probe->bmHints); + GST_DEBUG_OBJECT (self, " Configuration index : %d", + probe->wConfigurationIndex); + GST_DEBUG_OBJECT (self, " Width : %d", probe->wWidth); + GST_DEBUG_OBJECT (self, " Height : %d", probe->wHeight); + GST_DEBUG_OBJECT (self, " Slice units : %d", probe->wSliceUnits); + GST_DEBUG_OBJECT (self, " Slice mode : %X", probe->wSliceMode); + GST_DEBUG_OBJECT (self, " Profile : %X", probe->wProfile); + GST_DEBUG_OBJECT (self, " IFrame Period : %d ms", probe->wIFramePeriod); + GST_DEBUG_OBJECT (self, " Estimated video delay : %d ms", + probe->wEstimatedVideoDelay); + GST_DEBUG_OBJECT (self, " Estimated max config delay : %d ms", + probe->wEstimatedMaxConfigDelay); + GST_DEBUG_OBJECT (self, " Usage type : %X", probe->bUsageType); + GST_DEBUG_OBJECT (self, " Rate control mode : %X", probe->bRateControlMode); + GST_DEBUG_OBJECT (self, " Temporal scale mode : %X", + probe->bTemporalScaleMode); + GST_DEBUG_OBJECT (self, " Spatial scale mode : %X", + probe->bSpatialScaleMode); + GST_DEBUG_OBJECT (self, " SNR scale mode : %X", probe->bSNRScaleMode); + GST_DEBUG_OBJECT (self, " Stream mux option : %X", probe->bStreamMuxOption); + GST_DEBUG_OBJECT (self, " Stream Format : %X", probe->bStreamFormat); + GST_DEBUG_OBJECT (self, " Entropy CABAC : %X", probe->bEntropyCABAC); + GST_DEBUG_OBJECT (self, " Timestamp : %X", probe->bTimestamp); + GST_DEBUG_OBJECT (self, " Num of reorder frames : %d", + probe->bNumOfReorderFrames); + GST_DEBUG_OBJECT (self, " Preview flipped : %X", probe->bPreviewFlipped); + GST_DEBUG_OBJECT (self, " View : %d", probe->bView); + GST_DEBUG_OBJECT (self, " Stream ID : %X", probe->bStreamID); + GST_DEBUG_OBJECT (self, " Spatial layer ratio : %f", + ((probe->bSpatialLayerRatio & 0xF0) >> 4) + + ((float) (probe->bSpatialLayerRatio & 0x0F)) / 16); + GST_DEBUG_OBJECT (self, " Leaky bucket size : %d ms", + probe->wLeakyBucketSize); +} + +static void +configure_h264 (GstUvcH264Src * self, gint fd) +{ + uvcx_video_config_probe_commit_t probe; + + /* Set the secondary format first, so the last SET_CUR will be for the + * H264 format. This way, we can still get the static control values with + * a GET_CUR. Otherwise all static properties will return 0 because that's + * what the GET_CUR of the raw format returns. + */ + if (self->secondary_format == UVC_H264_SRC_FORMAT_RAW) { + memset (&probe, 0, sizeof (probe)); + probe.dwFrameInterval = self->secondary_frame_interval; + probe.wWidth = self->secondary_width; + probe.wHeight = self->secondary_height; + probe.bStreamMuxOption = 5; + + GST_DEBUG_OBJECT (self, "RAW PROBE SET_CUR : "); + print_probe_commit (self, &probe); + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "PROBE SET_CUR error"); + return; + } + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "PROBE GET_CUR error"); + return; + } + GST_DEBUG_OBJECT (self, "RAW PROBE GET_CUR : "); + print_probe_commit (self, &probe); + + if (!xu_query (self, UVCX_VIDEO_CONFIG_COMMIT, UVC_SET_CUR, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "COMMIT SET_CUR error"); + return; + } + } + /* Print MIN/MAX/DEF probe values for debugging purposes */ + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_MIN, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "PROBE GET_CUR error"); + return; + } + GST_DEBUG_OBJECT (self, "PROBE GET_MIN : "); + print_probe_commit (self, &probe); + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_MAX, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "PROBE GET_CUR error"); + return; + } + GST_DEBUG_OBJECT (self, "PROBE GET_MAX : "); + print_probe_commit (self, &probe); + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_DEF, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "PROBE GET_CUR error"); + return; + } + GST_DEBUG_OBJECT (self, "PROBE GET_DEF : "); + print_probe_commit (self, &probe); + + fill_probe_commit (self, &probe, self->main_frame_interval, + self->main_width, self->main_height, self->main_profile, + self->main_stream_format); + if (self->secondary_format != UVC_H264_SRC_FORMAT_NONE) + probe.bStreamMuxOption = 3; + else + probe.bStreamMuxOption = 0; + probe.bmHints = UVC_H264_BMHINTS_RESOLUTION | UVC_H264_BMHINTS_PROFILE | + UVC_H264_BMHINTS_FRAME_INTERVAL; + + GST_DEBUG_OBJECT (self, "PROBE SET_CUR : "); + print_probe_commit (self, &probe); + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "PROBE SET_CUR error"); + return; + } + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "PROBE GET_CUR error"); + return; + } + GST_DEBUG_OBJECT (self, "PROBE GET_CUR : "); + print_probe_commit (self, &probe); + + /* Must validate the settings accepted by the encoder */ + if (!xu_query (self, UVCX_VIDEO_CONFIG_COMMIT, UVC_SET_CUR, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "COMMIT SET_CUR error"); + return; + } +} + +static void +v4l2src_prepare_format (GstElement * v4l2src, gint fd, guint fourcc, + guint width, guint height, gpointer user_data) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (user_data); + + GST_DEBUG_OBJECT (self, "v4l2src prepare-format with FCC %" GST_FOURCC_FORMAT, + GST_FOURCC_ARGS (fourcc)); + + if (self->main_format == UVC_H264_SRC_FORMAT_H264) { + /* TODO: update static controls and g_object_notify those that changed */ + configure_h264 (self, fd); + + /* TODO: update dynamic controls on READY state */ + /* Configure dynamic controls */ + set_rate_control (self); + update_rate_control (self); + set_level_idc (self); + update_level_idc_and_get_max_mbps (self); + set_bitrate (self); + update_bitrate (self); + set_qp (self, QP_I_FRAME); + update_qp (self, QP_I_FRAME); + set_qp (self, QP_P_FRAME); + update_qp (self, QP_P_FRAME); + set_qp (self, QP_B_FRAME); + update_qp (self, QP_B_FRAME); + set_ltr (self); + update_ltr (self); + } +} + +static gboolean +_extract_caps_info (GstStructure * structure, guint16 * width, guint16 * height, + guint32 * frame_interval) +{ + gint w, h, fps_n, fps_d; + gboolean ret = TRUE; + + ret &= gst_structure_get_int (structure, "width", &w); + ret &= gst_structure_get_int (structure, "height", &h); + ret &= gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d); + + if (ret) { + *width = w; + *height = h; + /* Interval is in 100ns */ + *frame_interval = GST_TIME_AS_NSECONDS ((fps_d * GST_SECOND) / fps_n) / 100; + } + + return ret; +} + +static guint16 +_extract_profile (GstStructure * structure) +{ + const gchar *profile_str; + guint16 profile; + + profile = UVC_H264_PROFILE_HIGH; + profile_str = gst_structure_get_string (structure, "profile"); + if (profile_str) { + if (!strcmp (profile_str, "constrained-baseline")) { + profile = UVC_H264_PROFILE_CONSTRAINED_BASELINE; + } else if (!strcmp (profile_str, "baseline")) { + profile = UVC_H264_PROFILE_BASELINE; + } else if (!strcmp (profile_str, "main")) { + profile = UVC_H264_PROFILE_MAIN; + } else if (!strcmp (profile_str, "high")) { + profile = UVC_H264_PROFILE_HIGH; + } + } + return profile; +} + +static UvcH264StreamFormat +_extract_stream_format (GstStructure * structure) +{ + const gchar *stream_format; + + stream_format = gst_structure_get_string (structure, "stream-format"); + if (stream_format) { + if (!strcmp (stream_format, "avc")) + return UVC_H264_STREAMFORMAT_NAL; + else if (!strcmp (stream_format, "byte-stream")) + return UVC_H264_STREAMFORMAT_ANNEXB; + } + return UVC_H264_STREAMFORMAT_ANNEXB; +} + +static GstCaps * +_transform_caps (GstUvcH264Src * self, GstCaps * caps, const gchar * name) +{ + GstElement *el = gst_element_factory_make (name, NULL); + GstElement *cf = gst_element_factory_make ("capsfilter", NULL); + GstPad *sink; + + if (!el || !cf || !gst_bin_add (GST_BIN (self), el)) { + if (el) + gst_object_unref (el); + if (cf) + gst_object_unref (cf); + goto done; + } + if (!gst_bin_add (GST_BIN (self), cf)) { + gst_object_unref (cf); + gst_bin_remove (GST_BIN (self), el); + goto done; + } + if (!gst_element_link (el, cf)) + goto error_remove; + + sink = gst_element_get_static_pad (el, "sink"); + if (!sink) + goto error_remove; + g_object_set (cf, "caps", caps, NULL); + + caps = gst_pad_get_caps (sink); + gst_object_unref (sink); + +error_remove: + gst_bin_remove (GST_BIN (self), cf); + gst_bin_remove (GST_BIN (self), el); + +done: + return caps; +} + +static GstCaps * +gst_uvc_h264_src_transform_caps (GstUvcH264Src * self, GstCaps * caps) +{ + GstCaps *h264 = gst_caps_new_simple ("video/x-h264", NULL); + GstCaps *jpg = gst_caps_new_simple ("image/jpeg", NULL); + GstCaps *h264_caps = gst_caps_intersect (h264, caps); + GstCaps *jpg_caps = gst_caps_intersect (jpg, caps); + + /* TODO: Keep caps order after transformation */ + caps = _transform_caps (self, caps, self->colorspace_name); + + if (!gst_caps_is_empty (h264_caps)) { + GstCaps *temp = gst_caps_union (caps, h264_caps); + gst_caps_unref (caps); + caps = temp; + } + if (!gst_caps_is_empty (jpg_caps)) { + GstCaps *temp = gst_caps_union (caps, jpg_caps); + gst_caps_unref (caps); + caps = temp; + } + + if (h264_caps) + gst_caps_unref (h264_caps); + if (jpg_caps) + gst_caps_unref (jpg_caps); + gst_caps_unref (h264); + gst_caps_unref (jpg); + + + return caps; +} + +static GstCaps * +gst_uvc_h264_src_fixate_caps (GstUvcH264Src * self, GstPad * v4l_pad, + GstCaps * v4l_caps, GstCaps * peer_caps, gboolean primary) +{ + GstCaps *caps = NULL; + GstCaps *icaps = NULL; + GstCaps *tcaps = NULL; + int i; + + if (v4l_caps == NULL || gst_caps_is_any (v4l_caps)) { + GST_DEBUG_OBJECT (self, "v4l caps are invalid. not fixating"); + return NULL; + } + + tcaps = gst_caps_intersect_full (peer_caps, v4l_caps, + GST_CAPS_INTERSECT_FIRST); + GST_DEBUG_OBJECT (self, "intersect: %" GST_PTR_FORMAT, tcaps); + icaps = gst_caps_normalize (tcaps); + gst_caps_unref (tcaps); + + /* Prefer the first caps we are compatible with that the peer proposed */ + for (i = 0; i < gst_caps_get_size (icaps); i++) { + /* get intersection */ + GstCaps *ipcaps = gst_caps_copy_nth (icaps, i); + GstStructure *s = gst_caps_get_structure (ipcaps, 0); + + GST_DEBUG_OBJECT (self, "Testing %s: %" GST_PTR_FORMAT, + primary ? "primary" : "secondary", ipcaps); + if (primary && gst_structure_has_name (s, "video/x-h264")) { + uvcx_video_config_probe_commit_t probe; + guint16 width; + guint16 height; + guint32 interval; + guint16 profile; + UvcH264StreamFormat stream_format; + + if (_extract_caps_info (s, &width, &height, &interval)) { + profile = _extract_profile (s); + stream_format = _extract_stream_format (s); + fill_probe_commit (self, &probe, interval, width, height, + profile, stream_format); + probe.bmHints = UVC_H264_BMHINTS_RESOLUTION | + UVC_H264_BMHINTS_PROFILE | UVC_H264_BMHINTS_FRAME_INTERVAL; + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "PROBE SET_CUR error"); + return NULL; + } + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "PROBE GET_CUR error"); + return NULL; + } + GST_DEBUG_OBJECT (self, "Probe gives us %d==%d, %d==%d, %d==%d", + probe.wWidth, width, probe.wHeight, height, + probe.bStreamFormat, stream_format); + if (probe.wWidth == width && probe.wHeight == height && + probe.bStreamFormat == stream_format) { + caps = ipcaps; + break; + } + } + } else if (!primary && self->main_format == UVC_H264_SRC_FORMAT_H264) { + uvcx_video_config_probe_commit_t probe; + guint16 width; + guint16 height; + guint32 interval; + + if (_extract_caps_info (s, &width, &height, &interval)) { + if (gst_structure_has_name (s, "video/x-raw-yuv")) { + guint32 fcc = 0; + guint8 mux = 0; + + if (gst_structure_get_fourcc (s, "format", &fcc)) { + if (fcc == GST_MAKE_FOURCC ('Y', 'U', 'Y', '2')) + mux = 4; + else if (fcc == GST_MAKE_FOURCC ('N', 'V', '1', '2')) + mux = 8; + } + if (mux != 0) { + memset (&probe, 0, sizeof (probe)); + probe.dwFrameInterval = interval; + probe.wWidth = width; + probe.wHeight = height; + probe.bStreamMuxOption = mux | 1; + probe.bmHints = UVC_H264_BMHINTS_RESOLUTION | + UVC_H264_BMHINTS_PROFILE | UVC_H264_BMHINTS_FRAME_INTERVAL; + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "PROBE SET_CUR error"); + return NULL; + } + + if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR, + (guchar *) & probe)) { + GST_WARNING_OBJECT (self, "PROBE GET_CUR error"); + return NULL; + } + GST_DEBUG_OBJECT (self, "Probe gives us %d==%d, %d==%d, %d~=%d", + probe.wWidth, width, probe.wHeight, height, + probe.bStreamMuxOption, mux); + if (probe.wWidth == width && probe.wHeight == height && + (probe.bStreamMuxOption & mux) != 0) { + caps = ipcaps; + break; + } + } + } else if (gst_structure_has_name (s, "image/jpeg")) { + /* HACK ALERT: No way of figuring this one out but it seems the + * camera doesn't allow for h264 muxing and jpeg resolution higher + * than 640x480 so we shouldn't allow it */ + if (width <= 640 && height <= 480) { + caps = ipcaps; + break; + } + } + } + } else { + caps = ipcaps; + break; + } + gst_caps_unref (ipcaps); + } + + if (caps) { + caps = gst_caps_make_writable (caps); + gst_caps_truncate (caps); + + /* now fixate */ + if (!gst_caps_is_empty (caps)) { + gst_pad_fixate_caps (v4l_pad, caps); + GST_DEBUG_OBJECT (self, "fixated to: %" GST_PTR_FORMAT, caps); + } + + if (gst_caps_is_empty (caps) || gst_caps_is_any (caps)) { + gst_caps_unref (caps); + caps = NULL; + } + } + + return caps; +} + +static void +gst_uvc_h264_src_destroy_pipeline (GstUvcH264Src * self, gboolean v4l2src) +{ + GstIterator *iter = NULL; + gboolean done; + + if (v4l2src && self->v4l2_src) { + gst_bin_remove (GST_BIN (self), self->v4l2_src); + gst_element_set_state (self->v4l2_src, GST_STATE_NULL); + gst_object_unref (self->v4l2_src); + self->v4l2_src = NULL; + self->v4l2_fd = -1; + self->h264_unit_id = 0; + } + if (self->mjpg_demux) { + gst_bin_remove (GST_BIN (self), self->mjpg_demux); + gst_element_set_state (self->mjpg_demux, GST_STATE_NULL); + gst_object_unref (self->mjpg_demux); + self->mjpg_demux = NULL; + } + if (self->jpeg_dec) { + gst_bin_remove (GST_BIN (self), self->jpeg_dec); + gst_element_set_state (self->jpeg_dec, GST_STATE_NULL); + gst_object_unref (self->jpeg_dec); + self->jpeg_dec = NULL; + } + if (self->vid_colorspace) { + gst_bin_remove (GST_BIN (self), self->vid_colorspace); + gst_element_set_state (self->vid_colorspace, GST_STATE_NULL); + gst_object_unref (self->vid_colorspace); + self->vid_colorspace = NULL; + } + if (self->vf_colorspace) { + gst_bin_remove (GST_BIN (self), self->vf_colorspace); + gst_element_set_state (self->vf_colorspace, GST_STATE_NULL); + gst_object_unref (self->vf_colorspace); + self->vf_colorspace = NULL; + } + iter = gst_bin_iterate_elements (GST_BIN (self)); + done = FALSE; + while (!done) { + GstElement *item = NULL; + + switch (gst_iterator_next (iter, (gpointer *) & item)) { + case GST_ITERATOR_OK: + if (item != self->v4l2_src) { + gst_bin_remove (GST_BIN (self), item); + gst_element_set_state (item, GST_STATE_NULL); + } + gst_object_unref (item); + break; + case GST_ITERATOR_RESYNC: + gst_iterator_resync (iter); + break; + case GST_ITERATOR_ERROR: + done = TRUE; + break; + case GST_ITERATOR_DONE: + done = TRUE; + break; + } + } + gst_iterator_free (iter); +} + +static gboolean +ensure_v4l2src (GstUvcH264Src * self) +{ + gchar *device = NULL; + GstClock *v4l2_clock = NULL; + + if (self->v4l2_src == NULL) { + /* Create v4l2 source and set it up */ + self->v4l2_src = gst_element_factory_make ("v4l2src", NULL); + if (!self->v4l2_src || !gst_bin_add (GST_BIN (self), self->v4l2_src)) + goto error; + gst_object_ref (self->v4l2_src); + g_signal_connect (self->v4l2_src, "prepare-format", + (GCallback) v4l2src_prepare_format, self); + } + + g_object_get (self->v4l2_src, "device", &device, NULL); + g_object_set (self->v4l2_src, + "device", self->device, "num-buffers", self->num_buffers, NULL); + + v4l2_clock = gst_element_get_clock (self->v4l2_src); + + /* Set to NULL if the device changed */ + if (g_strcmp0 (device, self->device)) + gst_element_set_state (self->v4l2_src, GST_STATE_NULL); + g_free (device); + + if (gst_element_set_state (self->v4l2_src, GST_STATE_READY) != + GST_STATE_CHANGE_SUCCESS) { + GST_DEBUG_OBJECT (self, "Unable to set v4l2src to READY state"); + goto error_remove; + } + + /* Set/Update the fd and unit id after we go to READY */ + g_object_get (self->v4l2_src, "device-fd", &self->v4l2_fd, NULL); + self->h264_unit_id = xu_get_id (self); + + if (self->h264_unit_id == 0) { + GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS, + ("Device is not a valid UVC H264 camera"), (NULL)); + goto error_remove; + } + + /* going to state READY makes v4l2src lose its reference to the clock */ + if (v4l2_clock) { + gst_element_set_clock (self->v4l2_src, v4l2_clock); + gst_element_set_base_time (self->v4l2_src, + gst_element_get_base_time (GST_ELEMENT (self))); + gst_object_unref (v4l2_clock); + } + + return TRUE; + +error_remove: + gst_element_set_state (self->v4l2_src, GST_STATE_NULL); + gst_bin_remove (GST_BIN (self), self->v4l2_src); + +error: + if (self->v4l2_src) + gst_object_unref (self->v4l2_src); + self->v4l2_src = NULL; + self->v4l2_fd = -1; + self->h264_unit_id = 0; + + return FALSE; +} + +static gboolean +gst_uvc_h264_src_construct_pipeline (GstBaseCameraSrc * bcamsrc) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (bcamsrc); + GstIterator *iter = NULL; + gboolean iter_done = FALSE; + GstPad *vf_pad = NULL; + GstCaps *vf_caps = NULL; + GstStructure *vf_struct = NULL; + GstPad *vid_pad = NULL; + GstCaps *vid_caps = NULL; + GstStructure *vid_struct = NULL; + GstCaps *src_caps = NULL; + GstPad *v4l_pad = NULL; + GstCaps *v4l_caps = NULL; + gboolean jpg2raw = FALSE; + + enum + { + RAW_NONE, ENCODED_NONE, NONE_RAW, NONE_ENCODED, + H264_JPG, H264_RAW, H264_JPG2RAW, NONE_NONE, + RAW_RAW, ENCODED_ENCODED, + } type; + + GST_DEBUG_OBJECT (self, "Construct pipeline"); + self->reconfiguring = TRUE; + + if (self->v4l2_src) { + uvcx_encoder_reset req = { 0 }; + + if (!xu_query (self, UVCX_ENCODER_RESET, UVC_SET_CUR, (guchar *) & req)) + GST_WARNING_OBJECT (self, " UVCX_ENCODER_RESET SET_CUR error"); + } + + if (!ensure_v4l2src (self)) + goto error; + + gst_uvc_h264_src_destroy_pipeline (self, FALSE); + + /* Potentially unlink v4l2src to the ghost pads */ + gst_ghost_pad_set_target (GST_GHOST_PAD (self->vidsrc), NULL); + gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), NULL); + + vf_caps = gst_pad_peer_get_caps (self->vfsrc); + vid_caps = gst_pad_peer_get_caps (self->vidsrc); + + GST_DEBUG_OBJECT (self, "vfsrc caps : %" GST_PTR_FORMAT, vf_caps); + GST_DEBUG_OBJECT (self, "vidsrc caps : %" GST_PTR_FORMAT, vid_caps); + if (!self->started) { + GST_DEBUG_OBJECT (self, "video not started. Ignoring vidsrc caps"); + if (vid_caps) + gst_caps_unref (vid_caps); + vid_caps = NULL; + } + + v4l_pad = gst_element_get_static_pad (self->v4l2_src, "src"); + v4l_caps = gst_pad_get_caps (v4l_pad); + GST_DEBUG_OBJECT (self, "v4l2src caps : %" GST_PTR_FORMAT, v4l_caps); + if (vid_caps) { + GstCaps *trans_caps = gst_uvc_h264_src_transform_caps (self, vid_caps); + + gst_caps_unref (vid_caps); + vid_caps = gst_uvc_h264_src_fixate_caps (self, v4l_pad, v4l_caps, + trans_caps, TRUE); + gst_caps_unref (trans_caps); + + if (vid_caps) { + vid_struct = gst_caps_get_structure (vid_caps, 0); + } else { + GST_WARNING_OBJECT (self, "Could not negotiate vidsrc caps format"); + gst_object_unref (v4l_pad); + gst_caps_unref (v4l_caps); + goto error_remove; + } + } + GST_DEBUG_OBJECT (self, "Fixated vidsrc caps : %" GST_PTR_FORMAT, vid_caps); + + if (vid_caps && gst_structure_has_name (vid_struct, "video/x-h264")) { + self->main_format = UVC_H264_SRC_FORMAT_H264; + if (!_extract_caps_info (vid_struct, &self->main_width, + &self->main_height, &self->main_frame_interval)) { + gst_object_unref (v4l_pad); + gst_caps_unref (v4l_caps); + goto error_remove; + } + + self->main_stream_format = _extract_stream_format (vid_struct); + self->main_profile = _extract_profile (vid_struct); + } else { + self->main_format = UVC_H264_SRC_FORMAT_NONE; + } + + if (vf_caps) { + GstCaps *trans_caps = gst_uvc_h264_src_transform_caps (self, vf_caps); + + gst_caps_unref (vf_caps); + vf_caps = gst_uvc_h264_src_fixate_caps (self, v4l_pad, v4l_caps, + trans_caps, FALSE); + + /* If we couldn't find a suitable vf cap, try the jpeg2raw pipeline */ + if (!vf_caps && self->main_format == UVC_H264_SRC_FORMAT_H264) { + GstCaps *jpg_caps; + + jpg2raw = TRUE; + jpg_caps = _transform_caps (self, trans_caps, self->jpeg_decoder_name); + + vf_caps = gst_uvc_h264_src_fixate_caps (self, v4l_pad, v4l_caps, + jpg_caps, FALSE); + gst_caps_unref (jpg_caps); + } + gst_caps_unref (trans_caps); + if (vf_caps) { + vf_struct = gst_caps_get_structure (vf_caps, 0); + } else { + GST_WARNING_OBJECT (self, "Could not negotiate vfsrc caps format"); + gst_object_unref (v4l_pad); + gst_caps_unref (v4l_caps); + goto error_remove; + } + } + GST_DEBUG_OBJECT (self, "Fixated vfsrc caps : %" GST_PTR_FORMAT, vf_caps); + gst_object_unref (v4l_pad); + gst_caps_unref (v4l_caps); + + if (vf_caps && vid_caps && + !gst_structure_has_name (vid_struct, "video/x-h264")) { + /* Allow for vfsrc+vidsrc to both be raw or jpeg */ + if (gst_structure_has_name (vid_struct, "image/jpeg") && + gst_structure_has_name (vf_struct, "image/jpeg")) { + self->main_format = UVC_H264_SRC_FORMAT_JPG; + self->secondary_format = UVC_H264_SRC_FORMAT_JPG; + type = ENCODED_ENCODED; + } else if (!gst_structure_has_name (vid_struct, "image/jpeg") && + !gst_structure_has_name (vf_struct, "image/jpeg")) { + self->main_format = UVC_H264_SRC_FORMAT_RAW; + self->secondary_format = UVC_H264_SRC_FORMAT_RAW; + type = RAW_RAW; + } else { + goto error_remove; + } + } else if (vf_caps && vid_caps) { + guint32 smallest_frame_interval; + + if (!_extract_caps_info (vf_struct, &self->secondary_width, + &self->secondary_height, &self->secondary_frame_interval)) + goto error_remove; + + if (jpg2raw == FALSE && gst_structure_has_name (vf_struct, "image/jpeg")) { + type = H264_JPG; + self->secondary_format = UVC_H264_SRC_FORMAT_JPG; + } else { + if (jpg2raw) { + type = H264_JPG2RAW; + self->secondary_format = UVC_H264_SRC_FORMAT_JPG; + } else { + type = H264_RAW; + self->secondary_format = UVC_H264_SRC_FORMAT_RAW; + } + } + smallest_frame_interval = MIN (self->main_frame_interval, + self->secondary_frame_interval); + /* Just to avoid a potential division by zero, set interval to 30 fps */ + if (smallest_frame_interval == 0) + smallest_frame_interval = 333333; + + /* Frame interval is in 100ns units */ + src_caps = gst_caps_new_simple ("image/jpeg", + "width", G_TYPE_INT, self->secondary_width, + "height", G_TYPE_INT, self->secondary_height, + "framerate", GST_TYPE_FRACTION, + NSEC_PER_SEC / smallest_frame_interval, 100, NULL); + } else if (vf_caps || vid_caps) { + self->secondary_format = UVC_H264_SRC_FORMAT_NONE; + if (vid_struct && gst_structure_has_name (vid_struct, "video/x-h264")) { + type = ENCODED_NONE; + } else if (vid_struct && gst_structure_has_name (vid_struct, "image/jpeg")) { + type = ENCODED_NONE; + self->main_format = UVC_H264_SRC_FORMAT_JPG; + } else if (vf_struct && gst_structure_has_name (vf_struct, "image/jpeg")) { + type = NONE_ENCODED; + self->secondary_format = UVC_H264_SRC_FORMAT_JPG; + } else if (vid_struct) { + type = RAW_NONE; + self->main_format = UVC_H264_SRC_FORMAT_RAW; + } else if (vf_struct) { + type = NONE_RAW; + self->secondary_format = UVC_H264_SRC_FORMAT_RAW; + } else { + g_assert_not_reached (); + } + } else { + type = NONE_NONE; + self->main_format = UVC_H264_SRC_FORMAT_NONE; + self->secondary_format = UVC_H264_SRC_FORMAT_NONE; + } + + switch (type) { + case NONE_NONE: + GST_DEBUG_OBJECT (self, "None+None"); + vf_pad = gst_element_get_static_pad (self->v4l2_src, "src"); + break; + case RAW_NONE: + GST_DEBUG_OBJECT (self, "Raw+None"); + self->vid_colorspace = gst_element_factory_make (self->colorspace_name, + NULL); + if (!self->vid_colorspace || + !gst_bin_add (GST_BIN (self), self->vid_colorspace)) + goto error_remove; + gst_object_ref (self->vid_colorspace); + if (!gst_element_link (self->v4l2_src, self->vid_colorspace)) + goto error_remove_all; + vid_pad = gst_element_get_static_pad (self->vid_colorspace, "src"); + break; + case NONE_RAW: + GST_DEBUG_OBJECT (self, "None+Raw"); + self->vf_colorspace = gst_element_factory_make (self->colorspace_name, + NULL); + if (!self->vf_colorspace || + !gst_bin_add (GST_BIN (self), self->vf_colorspace)) + goto error_remove; + gst_object_ref (self->vf_colorspace); + if (!gst_element_link (self->v4l2_src, self->vf_colorspace)) + goto error_remove_all; + vf_pad = gst_element_get_static_pad (self->vf_colorspace, "src"); + break; + case ENCODED_NONE: + GST_DEBUG_OBJECT (self, "Encoded+None"); + vid_pad = gst_element_get_static_pad (self->v4l2_src, "src"); + break; + case NONE_ENCODED: + GST_DEBUG_OBJECT (self, "None+Encoded"); + vf_pad = gst_element_get_static_pad (self->v4l2_src, "src"); + break; + case H264_JPG: + GST_DEBUG_OBJECT (self, "H264+JPG"); + self->mjpg_demux = gst_element_factory_make ("uvch264_mjpgdemux", NULL); + if (!self->mjpg_demux || !gst_bin_add (GST_BIN (self), self->mjpg_demux)) + goto error_remove; + gst_object_ref (self->mjpg_demux); + g_object_set (self->mjpg_demux, "device-fd", self->v4l2_fd, + "num-clock-samples", self->num_clock_samples, NULL); + if (!gst_element_link_filtered (self->v4l2_src, self->mjpg_demux, + src_caps)) + goto error_remove_all; + vid_pad = gst_element_get_static_pad (self->mjpg_demux, "h264"); + vf_pad = gst_element_get_static_pad (self->mjpg_demux, "jpeg"); + break; + case H264_RAW: + GST_DEBUG_OBJECT (self, "H264+Raw"); + self->mjpg_demux = gst_element_factory_make ("uvch264_mjpgdemux", NULL); + self->vf_colorspace = gst_element_factory_make (self->colorspace_name, + NULL); + if (!self->mjpg_demux || !self->vf_colorspace) + goto error_remove; + if (!gst_bin_add (GST_BIN (self), self->mjpg_demux)) + goto error_remove; + gst_object_ref (self->mjpg_demux); + g_object_set (self->mjpg_demux, "device-fd", self->v4l2_fd, + "num-clock-samples", self->num_clock_samples, NULL); + if (!gst_bin_add (GST_BIN (self), self->vf_colorspace)) { + gst_object_unref (self->vf_colorspace); + self->vf_colorspace = NULL; + goto error_remove_all; + } + gst_object_ref (self->vf_colorspace); + if (!gst_element_link_filtered (self->v4l2_src, self->mjpg_demux, + src_caps)) + goto error_remove_all; + if (!gst_element_link_pads (self->mjpg_demux, "yuy2", + self->vf_colorspace, "sink")) + goto error_remove_all; + vid_pad = gst_element_get_static_pad (self->mjpg_demux, "h264"); + vf_pad = gst_element_get_static_pad (self->vf_colorspace, "src"); + break; + case H264_JPG2RAW: + GST_DEBUG_OBJECT (self, "H264+Raw(jpegdec)"); + self->mjpg_demux = gst_element_factory_make ("uvch264_mjpgdemux", NULL); + self->jpeg_dec = gst_element_factory_make (self->jpeg_decoder_name, NULL); + self->vf_colorspace = gst_element_factory_make (self->colorspace_name, + NULL); + if (!self->mjpg_demux || !self->jpeg_dec || !self->vf_colorspace) + goto error_remove; + if (!gst_bin_add (GST_BIN (self), self->mjpg_demux)) + goto error_remove; + gst_object_ref (self->mjpg_demux); + g_object_set (self->mjpg_demux, "device-fd", self->v4l2_fd, + "num-clock-samples", self->num_clock_samples, NULL); + if (!gst_bin_add (GST_BIN (self), self->jpeg_dec)) { + gst_object_unref (self->jpeg_dec); + self->jpeg_dec = NULL; + gst_object_unref (self->vf_colorspace); + self->vf_colorspace = NULL; + goto error_remove_all; + } + gst_object_ref (self->jpeg_dec); + if (!gst_bin_add (GST_BIN (self), self->vf_colorspace)) { + gst_object_unref (self->vf_colorspace); + self->vf_colorspace = NULL; + goto error_remove_all; + } + gst_object_ref (self->vf_colorspace); + if (!gst_element_link_filtered (self->v4l2_src, self->mjpg_demux, + src_caps)) + goto error_remove_all; + if (!gst_element_link_pads (self->mjpg_demux, "jpeg", self->jpeg_dec, + "sink")) + goto error_remove_all; + if (!gst_element_link (self->jpeg_dec, self->vf_colorspace)) + goto error_remove_all; + vid_pad = gst_element_get_static_pad (self->mjpg_demux, "h264"); + vf_pad = gst_element_get_static_pad (self->vf_colorspace, "src"); + break; + case RAW_RAW: + { + GstElement *tee = NULL; + + GST_DEBUG_OBJECT (self, "Raw+Raw"); + tee = gst_element_factory_make ("tee", NULL); + if (!tee || !gst_bin_add (GST_BIN (self), tee)) { + if (tee) + gst_object_unref (tee); + goto error_remove; + } + self->vf_colorspace = gst_element_factory_make (self->colorspace_name, + NULL); + self->vid_colorspace = gst_element_factory_make (self->colorspace_name, + NULL); + if (!self->vf_colorspace || !self->vid_colorspace) + goto error_remove; + if (!gst_bin_add (GST_BIN (self), self->vf_colorspace)) + goto error_remove; + gst_object_ref (self->vf_colorspace); + if (!gst_bin_add (GST_BIN (self), self->vid_colorspace)) { + gst_object_unref (self->vid_colorspace); + self->vid_colorspace = NULL; + goto error_remove_all; + } + gst_object_ref (self->vid_colorspace); + if (!gst_element_link (self->v4l2_src, tee)) + goto error_remove_all; + if (!gst_element_link (tee, self->vf_colorspace)) + goto error_remove_all; + if (!gst_element_link (tee, self->vid_colorspace)) + goto error_remove_all; + vf_pad = gst_element_get_static_pad (self->vf_colorspace, "src"); + vid_pad = gst_element_get_static_pad (self->vid_colorspace, "src"); + } + break; + case ENCODED_ENCODED: + { + GstElement *tee = NULL; + + GST_DEBUG_OBJECT (self, "Encoded+Encoded"); + tee = gst_element_factory_make ("tee", NULL); + if (!tee || !gst_bin_add (GST_BIN (self), tee)) { + if (tee) + gst_object_unref (tee); + goto error_remove; + } + if (!gst_element_link (self->v4l2_src, tee)) + goto error_remove_all; + vf_pad = gst_element_get_request_pad (tee, "src%d"); + vid_pad = gst_element_get_request_pad (tee, "src%d"); + } + break; + } + + if (!gst_ghost_pad_set_target (GST_GHOST_PAD (self->vidsrc), vid_pad) || + !gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad)) + goto error_remove_all; + if (vid_pad) + gst_object_unref (vid_pad); + if (vf_pad) + gst_object_unref (vf_pad); + vid_pad = vf_pad = NULL; + + if (vf_caps) + gst_caps_unref (vf_caps); + if (vid_caps) + gst_caps_unref (vid_caps); + if (src_caps) + gst_caps_unref (src_caps); + vf_caps = vid_caps = src_caps = NULL; + + /* Sync children states, in sink to source order */ + if (self->vid_colorspace && + !gst_element_sync_state_with_parent (self->vid_colorspace)) + goto error_remove_all; + if (self->vf_colorspace && + !gst_element_sync_state_with_parent (self->vf_colorspace)) + goto error_remove_all; + if (self->jpeg_dec && !gst_element_sync_state_with_parent (self->jpeg_dec)) + goto error_remove_all; + if (self->mjpg_demux && + !gst_element_sync_state_with_parent (self->mjpg_demux)) + goto error_remove_all; + if (self->v4l2_src && !gst_element_sync_state_with_parent (self->v4l2_src)) + goto error_remove_all; + + /* Sync any remaining children states with bin's state */ + iter = gst_bin_iterate_elements (GST_BIN (self)); + iter_done = FALSE; + while (!iter_done) { + GstElement *item = NULL; + + switch (gst_iterator_next (iter, (gpointer *) & item)) { + case GST_ITERATOR_OK: + if (!gst_element_sync_state_with_parent (item)) { + gst_object_unref (item); + gst_iterator_free (iter); + goto error_remove_all; + } + gst_object_unref (item); + break; + case GST_ITERATOR_RESYNC: + gst_iterator_resync (iter); + break; + case GST_ITERATOR_ERROR: + iter_done = TRUE; + break; + case GST_ITERATOR_DONE: + iter_done = TRUE; + break; + } + } + gst_iterator_free (iter); + + self->reconfiguring = FALSE; + return TRUE; + +error_remove_all: + gst_uvc_h264_src_destroy_pipeline (self, FALSE); +error_remove: + gst_element_set_state (self->v4l2_src, GST_STATE_NULL); + gst_bin_remove (GST_BIN (self), self->v4l2_src); + +error: + if (self->v4l2_src) + gst_object_unref (self->v4l2_src); + self->v4l2_src = NULL; + self->v4l2_fd = -1; + self->h264_unit_id = 0; + + if (self->mjpg_demux) + gst_object_unref (self->mjpg_demux); + self->mjpg_demux = NULL; + if (self->jpeg_dec) + gst_object_unref (self->jpeg_dec); + self->jpeg_dec = NULL; + if (self->vid_colorspace) + gst_object_unref (self->vid_colorspace); + self->vid_colorspace = NULL; + if (self->vf_colorspace) + gst_object_unref (self->vf_colorspace); + self->vf_colorspace = NULL; + + if (src_caps) + gst_caps_unref (src_caps); + + if (vf_caps) + gst_caps_unref (vf_caps); + if (vid_caps) + gst_caps_unref (vid_caps); + + if (vid_pad) + gst_object_unref (vid_pad); + if (vf_pad) + gst_object_unref (vf_pad); + + self->reconfiguring = FALSE; + return FALSE; +} + +static GstCaps * +gst_uvc_h264_src_getcaps (GstPad * pad) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (GST_OBJECT_PARENT (pad)); + GstCaps *template = NULL; + GstCaps *result = NULL; + + if (pad == self->vfsrc) + template = gst_static_pad_template_get_caps (&vfsrc_template); + else if (pad == self->vidsrc) + template = gst_static_pad_template_get_caps (&vidsrc_template); + else + template = gst_caps_new_empty (); + + if (self->v4l2_src) { + GstPad *v4l_pad = gst_element_get_static_pad (self->v4l2_src, "src"); + GstCaps *v4l_caps = gst_pad_get_caps (v4l_pad); + GstCaps *new_caps = gst_uvc_h264_src_transform_caps (self, v4l_caps); + + result = gst_caps_intersect (new_caps, template); + gst_object_unref (v4l_pad); + gst_caps_unref (v4l_caps); + gst_caps_unref (new_caps); + gst_caps_unref (template); + } else { + result = template; + } + + return result; +} + +static gboolean +gst_uvc_h264_src_set_mode (GstBaseCameraSrc * bcamsrc, GstCameraBinMode mode) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (bcamsrc); + + GST_DEBUG_OBJECT (self, "set mode to %d", mode); + + return (mode == MODE_VIDEO); +} + +static gboolean +gst_uvc_h264_src_start_capture (GstBaseCameraSrc * camerasrc) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (camerasrc); + gboolean ret = TRUE; + + GST_DEBUG_OBJECT (self, "start capture"); + + if (!self->started) { + self->started = TRUE; + if (GST_STATE (self) >= GST_STATE_READY) { + ret = gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self)); + if (!ret) { + GST_DEBUG_OBJECT (self, "Could not start capture"); + self->started = FALSE; + gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self)); + } + } + } + + return ret; +} + +static void +gst_uvc_h264_src_stop_capture (GstBaseCameraSrc * camerasrc) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (camerasrc); + + GST_DEBUG_OBJECT (self, "stop capture"); + + if (self->started) { + self->started = FALSE; + if (GST_STATE (self) >= GST_STATE_READY) + gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self)); + gst_base_camera_src_finish_capture (camerasrc); + } +} + +static void +gst_uvc_h264_src_pad_linking_cb (GstPad * pad, + GstPad * peer, gpointer user_data) +{ + GstUvcH264Src *self = GST_UVC_H264_SRC (user_data); + gchar *pad_name = gst_pad_get_name (pad); + + GST_DEBUG_OBJECT (self, "Pad %s was (un)linked. Renegotiating", pad_name); + g_free (pad_name); + if (GST_STATE (self) >= GST_STATE_READY) + gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self)); +} + + +static GstStateChangeReturn +gst_uvc_h264_src_change_state (GstElement * element, GstStateChange trans) +{ + GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; + GstUvcH264Src *self = GST_UVC_H264_SRC (element); + + switch (trans) { + case GST_STATE_CHANGE_NULL_TO_READY: + if (!ensure_v4l2src (self)) { + ret = GST_STATE_CHANGE_FAILURE; + goto end; + } + gst_segment_init (&self->segment, GST_FORMAT_UNDEFINED); + break; + case GST_STATE_CHANGE_READY_TO_PAUSED: + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + if (!self->v4l2_src) + gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self)); + break; + default: + break; + } + + ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, trans); + + if (ret == GST_STATE_CHANGE_FAILURE) + goto end; + + switch (trans) { + case GST_STATE_CHANGE_PAUSED_TO_READY: + self->vid_newseg = FALSE; + self->vf_newseg = FALSE; + break; + case GST_STATE_CHANGE_READY_TO_NULL: + gst_uvc_h264_src_destroy_pipeline (self, TRUE); + break; + default: + break; + } + + +end: + return ret; +} diff --git a/sys/uvch264/gstuvch264_src.h b/sys/uvch264/gstuvch264_src.h new file mode 100644 index 000000000..3eb846bc0 --- /dev/null +++ b/sys/uvch264/gstuvch264_src.h @@ -0,0 +1,166 @@ +/* + * GStreamer + * + * Copyright (C) 2012 Cisco Systems, Inc. + * Author: Youness Alaoui <youness.alaoui@collabora.co.uk> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + + +#ifndef __GST_UVC_H264_SRC_H__ +#define __GST_UVC_H264_SRC_H__ + +#ifdef HAVE_CONFIG_H +# include <config.h> +#endif + +#include <gst/gst.h> +#include <gst/basecamerabinsrc/gstbasecamerasrc.h> +#if defined (HAVE_GUDEV) && defined (HAVE_LIBUSB) +#include <libusb.h> +#endif + +#include "uvc_h264.h" + +G_BEGIN_DECLS +#define GST_TYPE_UVC_H264_SRC \ + (gst_uvc_h264_src_get_type()) +#define GST_UVC_H264_SRC(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_UVC_H264_SRC, GstUvcH264Src)) +#define GST_UVC_H264_SRC_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_UVC_H264_SRC, GstUvcH264SrcClass)) +#define GST_IS_UVC_H264_SRC(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_UVC_H264_SRC)) +#define GST_IS_UVC_H264_SRC_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_UVC_H264_SRC)) + GType gst_uvc_h264_src_get_type (void); + +typedef struct _GstUvcH264Src GstUvcH264Src; +typedef struct _GstUvcH264SrcClass GstUvcH264SrcClass; + +enum GstVideoRecordingStatus { + GST_VIDEO_RECORDING_STATUS_DONE, + GST_VIDEO_RECORDING_STATUS_STARTING, + GST_VIDEO_RECORDING_STATUS_RUNNING, + GST_VIDEO_RECORDING_STATUS_FINISHING +}; + +enum { + QP_I_FRAME = 0, + QP_P_FRAME, + QP_B_FRAME, + QP_FRAMES +}; + +typedef enum { + UVC_H264_SRC_FORMAT_NONE, + UVC_H264_SRC_FORMAT_JPG, + UVC_H264_SRC_FORMAT_H264, + UVC_H264_SRC_FORMAT_RAW +} GstUvcH264SrcFormat; + +/** + * GstUcH264Src: + * + */ +struct _GstUvcH264Src +{ + GstBaseCameraSrc parent; + + GstPad *vfsrc; + GstPad *imgsrc; + GstPad *vidsrc; + + /* source elements */ + GstElement *v4l2_src; + GstElement *mjpg_demux; + GstElement *jpeg_dec; + GstElement *vid_colorspace; + GstElement *vf_colorspace; + + GstUvcH264SrcFormat main_format; + guint16 main_width; + guint16 main_height; + guint32 main_frame_interval; + UvcH264StreamFormat main_stream_format; + guint16 main_profile; + GstUvcH264SrcFormat secondary_format; + guint16 secondary_width; + guint16 secondary_height; + guint32 secondary_frame_interval; + + int v4l2_fd; + guint8 h264_unit_id; +#if defined (HAVE_GUDEV) && defined (HAVE_LIBUSB) + libusb_context *usb_ctx; +#endif + + GstPadEventFunction srcpad_event_func; + GstEvent *key_unit_event; + GstSegment segment; + + gboolean started; + + /* When restarting the source */ + gboolean reconfiguring; + gboolean vid_newseg; + gboolean vf_newseg; + + gchar *colorspace_name; + gchar *jpeg_decoder_name; + int num_clock_samples; + + /* v4l2src proxied properties */ + guint32 num_buffers; + gchar *device; + + /* Static controls */ + guint32 initial_bitrate; + guint16 slice_units; + UvcH264SliceMode slice_mode; + guint16 iframe_period; + UvcH264UsageType usage_type; + UvcH264Entropy entropy; + gboolean enable_sei; + guint8 num_reorder_frames; + gboolean preview_flipped; + guint16 leaky_bucket_size; + + /* Dynamic controls */ + UvcH264RateControl rate_control; + gboolean fixed_framerate; + guint8 level_idc; + guint32 peak_bitrate; + guint32 average_bitrate; + gint8 min_qp[QP_FRAMES]; + gint8 max_qp[QP_FRAMES]; + guint8 ltr_buffer_size; + guint8 ltr_encoder_control; +}; + + +/** + * GstUvcH264SrcClass: + * + */ +struct _GstUvcH264SrcClass +{ + GstBaseCameraSrcClass parent; +}; + + +#endif /* __GST_UVC_H264_SRC_H__ */ diff --git a/sys/uvch264/uvc_h264.c b/sys/uvch264/uvc_h264.c new file mode 100644 index 000000000..1c26ae437 --- /dev/null +++ b/sys/uvch264/uvc_h264.c @@ -0,0 +1,122 @@ +/* + * GStreamer + * + * Copyright (C) 2012 Cisco Systems, Inc. + * Author: Youness Alaoui <youness.alaoui@collabora.co.uk> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + + +#ifdef HAVE_CONFIG_H +# include <config.h> +#endif + +#include "uvc_h264.h" + +GType +uvc_h264_slicemode_get_type (void) +{ + static GType type = 0; + + static const GEnumValue types[] = { + {UVC_H264_SLICEMODE_IGNORED, "Ignored", "ignored"}, + {UVC_H264_SLICEMODE_BITSPERSLICE, "Bits per slice", "bits/slice"}, + {UVC_H264_SLICEMODE_MBSPERSLICE, "MBs per Slice", "MBs/slice"}, + {UVC_H264_SLICEMODE_SLICEPERFRAME, "Slice Per Frame", "slice/frame"}, + {0, NULL, NULL} + }; + + if (!type) { + type = g_enum_register_static ("UvcH264SliceMode", types); + } + return type; +} + +GType +uvc_h264_usagetype_get_type (void) +{ + static GType type = 0; + + static const GEnumValue types[] = { + {UVC_H264_USAGETYPE_REALTIME, "Realtime (video conferencing)", "realtime"}, + {UVC_H264_USAGETYPE_BROADCAST, "Broadcast", "broadcast"}, + {UVC_H264_USAGETYPE_STORAGE, "Storage", "storage"}, + {UVC_H264_USAGETYPE_UCCONFIG_0, "UCConfig 0", "ucconfig0"}, + {UVC_H264_USAGETYPE_UCCONFIG_1, "UCConfig 1", "ucconfig1"}, + {UVC_H264_USAGETYPE_UCCONFIG_2Q, "UCConfig 2Q", "ucconfig2q"}, + {UVC_H264_USAGETYPE_UCCONFIG_2S, "UCConfig 2S", "ucconfig2s"}, + {UVC_H264_USAGETYPE_UCCONFIG_3, "UCConfig 3", "ucconfig3"}, + {0, NULL, NULL} + }; + + if (!type) { + type = g_enum_register_static ("UvcH264UsageType", types); + } + return type; +} + +GType +uvc_h264_ratecontrol_get_type (void) +{ + static GType type = 0; + + static const GEnumValue types[] = { + {UVC_H264_RATECONTROL_CBR, "Constant bit rate", "cbr"}, + {UVC_H264_RATECONTROL_VBR, "Variable bit rate", "vbr"}, + {UVC_H264_RATECONTROL_CONST_QP, "Constant QP", "qp"}, + {0, NULL, NULL} + }; + + if (!type) { + type = g_enum_register_static ("UvcH264RateControl", types); + } + return type; +} + +GType +uvc_h264_streamformat_get_type (void) +{ + static GType type = 0; + + static const GEnumValue types[] = { + {UVC_H264_STREAMFORMAT_ANNEXB, "Byte stream format (Annex B)", "byte"}, + {UVC_H264_STREAMFORMAT_NAL, "NAL stream format", "nal"}, + {0, NULL, NULL} + }; + + if (!type) { + type = g_enum_register_static ("UvcH264StreamFormat", types); + } + return type; +} + +GType +uvc_h264_entropy_get_type (void) +{ + static GType type = 0; + + static const GEnumValue types[] = { + {UVC_H264_ENTROPY_CAVLC, "CAVLC", "cavlc"}, + {UVC_H264_ENTROPY_CABAC, "CABAC", "cabac"}, + {0, NULL, NULL} + }; + + if (!type) { + type = g_enum_register_static ("UvcH264Entropy", types); + } + return type; +} diff --git a/sys/uvch264/uvc_h264.h b/sys/uvch264/uvc_h264.h new file mode 100644 index 000000000..d27104ecf --- /dev/null +++ b/sys/uvch264/uvc_h264.h @@ -0,0 +1,335 @@ +/* + * uvc_h264.h - Definitions of the UVC H.264 Payload specification Version 1.0 + * + * Copyright (c) 2011 USB Implementers Forum, Inc. + * + * Modification into glib-like header by : + * Copyright (C) 2012 Cisco Systems, Inc. + * Author: Youness Alaoui <youness.alaoui@collabora.co.uk> + * + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ + +#ifndef _UVC_H264_H_ +#define _UVC_H264_H_ + +/* Header File for the little-endian platform */ + +#include <glib.h> +#include <glib-object.h> + +/* bmHints defines */ + +#define UVC_H264_BMHINTS_RESOLUTION (0x0001) +#define UVC_H264_BMHINTS_PROFILE (0x0002) +#define UVC_H264_BMHINTS_RATECONTROL (0x0004) +#define UVC_H264_BMHINTS_USAGE (0x0008) +#define UVC_H264_BMHINTS_SLICEMODE (0x0010) +#define UVC_H264_BMHINTS_SLICEUNITS (0x0020) +#define UVC_H264_BMHINTS_MVCVIEW (0x0040) +#define UVC_H264_BMHINTS_TEMPORAL (0x0080) +#define UVC_H264_BMHINTS_SNR (0x0100) +#define UVC_H264_BMHINTS_SPATIAL (0x0200) +#define UVC_H264_BMHINTS_SPATIAL_RATIO (0x0400) +#define UVC_H264_BMHINTS_FRAME_INTERVAL (0x0800) +#define UVC_H264_BMHINTS_LEAKY_BKT_SIZE (0x1000) +#define UVC_H264_BMHINTS_BITRATE (0x2000) +#define UVC_H264_BMHINTS_ENTROPY (0x4000) +#define UVC_H264_BMHINTS_IFRAMEPERIOD (0x8000) + + +#define UVC_H264_QP_STEPS_I_FRAME_TYPE (0x01) +#define UVC_H264_QP_STEPS_P_FRAME_TYPE (0x02) +#define UVC_H264_QP_STEPS_B_FRAME_TYPE (0x04) +#define UVC_H264_QP_STEPS_ALL_FRAME_TYPES (UVC_H264_QP_STEPS_I_FRAME_TYPE | \ + UVC_H264_QP_STEPS_P_FRAME_TYPE | UVC_H264_QP_STEPS_B_FRAME_TYPE) + +/* wSliceMode defines */ + +typedef enum +{ + UVC_H264_SLICEMODE_IGNORED = 0x0000, + UVC_H264_SLICEMODE_BITSPERSLICE = 0x0001, + UVC_H264_SLICEMODE_MBSPERSLICE = 0x0002, + UVC_H264_SLICEMODE_SLICEPERFRAME = 0x0003 +} UvcH264SliceMode; + +#define UVC_H264_SLICEMODE_TYPE (uvc_h264_slicemode_get_type()) + +GType uvc_h264_slicemode_get_type (void); + +/* bUsageType defines */ + +typedef enum { + UVC_H264_USAGETYPE_REALTIME = 0x01, + UVC_H264_USAGETYPE_BROADCAST = 0x02, + UVC_H264_USAGETYPE_STORAGE = 0x03, + UVC_H264_USAGETYPE_UCCONFIG_0 = 0x04, + UVC_H264_USAGETYPE_UCCONFIG_1 = 0x05, + UVC_H264_USAGETYPE_UCCONFIG_2Q = 0x06, + UVC_H264_USAGETYPE_UCCONFIG_2S = 0x07, + UVC_H264_USAGETYPE_UCCONFIG_3 = 0x08, +} UvcH264UsageType; + +#define UVC_H264_USAGETYPE_TYPE (uvc_h264_usagetype_get_type()) + +GType uvc_h264_usagetype_get_type (void); + +/* bRateControlMode defines */ + +typedef enum { + UVC_H264_RATECONTROL_CBR = 0x01, + UVC_H264_RATECONTROL_VBR = 0x02, + UVC_H264_RATECONTROL_CONST_QP = 0x03, +} UvcH264RateControl; + +#define UVC_H264_RATECONTROL_FIXED_FRM_FLG (0x10) + +#define UVC_H264_RATECONTROL_TYPE (uvc_h264_ratecontrol_get_type()) + +GType uvc_h264_ratecontrol_get_type (void); + +/* bStreamFormat defines */ + +typedef enum { + UVC_H264_STREAMFORMAT_ANNEXB = 0x00, + UVC_H264_STREAMFORMAT_NAL = 0x01, +} UvcH264StreamFormat; + +#define UVC_H264_STREAMFORMAT_TYPE (uvc_h264_streamformat_get_type()) + +GType uvc_h264_streamformat_get_type (void); + +/* bEntropyCABAC defines */ + +typedef enum { + UVC_H264_ENTROPY_CAVLC = 0x00, + UVC_H264_ENTROPY_CABAC = 0x01, +} UvcH264Entropy; + +#define UVC_H264_ENTROPY_TYPE (uvc_h264_entropy_get_type()) + +GType uvc_h264_entropy_get_type (void); + +/* bProfile defines */ +#define UVC_H264_PROFILE_CONSTRAINED_BASELINE 0x4240 +#define UVC_H264_PROFILE_BASELINE 0x4200 +#define UVC_H264_PROFILE_MAIN 0x4D00 +#define UVC_H264_PROFILE_HIGH 0x6400 + +/* bTimingstamp defines */ + +#define UVC_H264_TIMESTAMP_SEI_DISABLE (0x00) +#define UVC_H264_TIMESTAMP_SEI_ENABLE (0x01) + +/* bPreviewFlipped defines */ + +#define UVC_H264_PREFLIPPED_DISABLE (0x00) +#define UVC_H264_PREFLIPPED_HORIZONTAL (0x01) + +/* wPicType defines */ +#define UVC_H264_PICTYPE_I_FRAME (0x00) +#define UVC_H264_PICTYPE_IDR (0x01) +#define UVC_H264_PICTYPE_IDR_WITH_PPS_SPS (0x02) + + +/* wLayerID Macro */ + +/* wLayerID + |------------+------------+------------+----------------+------------| + | Reserved | StreamID | QualityID | DependencyID | TemporalID | + | (3 bits) | (3 bits) | (3 bits) | (4 bits) | (3 bits) | + |------------+------------+------------+----------------+------------| + |15 13|12 10|9 7|6 3|2 0| + |------------+------------+------------+----------------+------------| +*/ + +#define xLayerID(stream_id, quality_id, dependency_id, temporal_id) \ + ((((stream_id) & 7) << 10) | \ + (((quality_id) & 7) << 7) | \ + (((dependency_id) & 15) << 3) | \ + ((temporal_id) & 7)) + +/* id extraction from wLayerID */ + +#define xStream_id(layer_id) (((layer_id) >> 10) & 7) +#define xQuality_id(layer_id) (((layer_id) >> 7) & 7) +#define xDependency_id(layer_id) (((layer_id) >> 3) & 15) +#define xTemporal_id(layer_id) ((layer_id)&7) + +/* UVC H.264 control selectors */ + +typedef enum _uvcx_control_selector_t +{ + UVCX_VIDEO_CONFIG_PROBE = 0x01, + UVCX_VIDEO_CONFIG_COMMIT = 0x02, + UVCX_RATE_CONTROL_MODE = 0x03, + UVCX_TEMPORAL_SCALE_MODE = 0x04, + UVCX_SPATIAL_SCALE_MODE = 0x05, + UVCX_SNR_SCALE_MODE = 0x06, + UVCX_LTR_BUFFER_SIZE_CONTROL = 0x07, + UVCX_LTR_PICTURE_CONTROL = 0x08, + UVCX_PICTURE_TYPE_CONTROL = 0x09, + UVCX_VERSION = 0x0A, + UVCX_ENCODER_RESET = 0x0B, + UVCX_FRAMERATE_CONFIG = 0x0C, + UVCX_VIDEO_ADVANCE_CONFIG = 0x0D, + UVCX_BITRATE_LAYERS = 0x0E, + UVCX_QP_STEPS_LAYERS = 0x0F, +} uvcx_control_selector_t; + + +typedef struct _uvcx_video_config_probe_commit_t +{ + guint32 dwFrameInterval; + guint32 dwBitRate; + guint16 bmHints; + guint16 wConfigurationIndex; + guint16 wWidth; + guint16 wHeight; + guint16 wSliceUnits; + guint16 wSliceMode; + guint16 wProfile; + guint16 wIFramePeriod; + guint16 wEstimatedVideoDelay; + guint16 wEstimatedMaxConfigDelay; + guint8 bUsageType; + guint8 bRateControlMode; + guint8 bTemporalScaleMode; + guint8 bSpatialScaleMode; + guint8 bSNRScaleMode; + guint8 bStreamMuxOption; + guint8 bStreamFormat; + guint8 bEntropyCABAC; + guint8 bTimestamp; + guint8 bNumOfReorderFrames; + guint8 bPreviewFlipped; + guint8 bView; + guint8 bReserved1; + guint8 bReserved2; + guint8 bStreamID; + guint8 bSpatialLayerRatio; + guint16 wLeakyBucketSize; +} __attribute__((packed)) uvcx_video_config_probe_commit_t; + + +typedef struct _uvcx_rate_control_mode_t +{ + guint16 wLayerID; + guint8 bRateControlMode; +} __attribute__((packed)) uvcx_rate_control_mode_t; + + +typedef struct _uvcx_temporal_scale_mode_t +{ + guint16 wLayerID; + guint8 bTemporalScaleMode; +} __attribute__((packed)) uvcx_temporal_scale_mode_t; + + +typedef struct _uvcx_spatial_scale_mode_t +{ + guint16 wLayerID; + guint8 bSpatialScaleMode; +} __attribute__((packed)) uvcx_spatial_scale_mode_t; + + +typedef struct _uvcx_snr_scale_mode_t +{ + guint16 wLayerID; + guint8 bSNRScaleMode; + guint8 bMGSSublayerMode; +} __attribute__((packed)) uvcx_snr_scale_mode_t; + + +typedef struct _uvcx_ltr_buffer_size_control_t +{ + guint16 wLayerID; + guint8 bLTRBufferSize; + guint8 bLTREncoderControl; +} __attribute__((packed)) uvcx_ltr_buffer_size_control_t; + +typedef struct _uvcx_ltr_picture_control +{ + guint16 wLayerID; + guint8 bPutAtPositionInLTRBuffer; + guint8 bEncodeUsingLTR; +} __attribute__((packed)) uvcx_ltr_picture_control; + + +typedef struct _uvcx_picture_type_control_t +{ + guint16 wLayerID; + guint16 wPicType; +} __attribute__((packed)) uvcx_picture_type_control_t; + + +typedef struct _uvcx_version_t +{ + guint16 wVersion; +} __attribute__((packed)) uvcx_version_t; + + +typedef struct _uvcx_encoder_reset +{ + guint16 wLayerID; +} __attribute__((packed)) uvcx_encoder_reset; + + +typedef struct _uvcx_framerate_config_t +{ + guint16 wLayerID; + guint32 dwFrameInterval; +} __attribute__((packed)) uvcx_framerate_config_t; + + +typedef struct _uvcx_video_advance_config_t +{ + guint16 wLayerID; + guint32 dwMb_max; + guint8 blevel_idc; + guint8 bReserved; +} __attribute__((packed)) uvcx_video_advance_config_t; + + +typedef struct _uvcx_bitrate_layers_t +{ + guint16 wLayerID; + guint32 dwPeakBitrate; + guint32 dwAverageBitrate; +} __attribute__((packed)) uvcx_bitrate_layers_t; + + +typedef struct _uvcx_qp_steps_layers_t +{ + guint16 wLayerID; + guint8 bFrameType; + guint8 bMinQp; + guint8 bMaxQp; +} __attribute__((packed)) uvcx_qp_steps_layers_t; + + +#ifdef _WIN32 +// GUID of the UVC H.264 extension unit: {A29E7641-DE04-47E3-8B2B-F4341AFF003B} +DEFINE_GUID(GUID_UVCX_H264_XU, 0xA29E7641, 0xDE04, 0x47E3, 0x8B, 0x2B, 0xF4, 0x34, 0x1A, 0xFF, 0x00, 0x3B); +#else +#define GUID_UVCX_H264_XU \ + {0x41, 0x76, 0x9e, 0xa2, 0x04, 0xde, 0xe3, 0x47, 0x8b, 0x2b, 0xF4, 0x34, 0x1A, 0xFF, 0x00, 0x3B} +#endif + +#endif /*_UVC_H264_H_*/ diff --git a/tests/check/Makefile.am b/tests/check/Makefile.am index 84cee4d25..57adac24d 100644 --- a/tests/check/Makefile.am +++ b/tests/check/Makefile.am @@ -156,6 +156,20 @@ else check_curl = endif +if USE_UVCH264 +check_uvch264=elements/uvch264demux +else +check_uvch264= +endif +uvch264_dist_data = elements/uvch264demux_data/valid_h264_jpg.mjpg \ + elements/uvch264demux_data/valid_h264_jpg.jpg \ + elements/uvch264demux_data/valid_h264_jpg.h264 \ + elements/uvch264demux_data/valid_h264_yuy2.mjpg \ + elements/uvch264demux_data/valid_h264_yuy2.h264 \ + elements/uvch264demux_data/valid_h264_yuy2.yuy2 + + + VALGRIND_TO_FIX = \ elements/mpeg2enc \ elements/mplex \ @@ -209,6 +223,7 @@ check_PROGRAMS = \ elements/rtpmux \ libs/mpegvideoparser \ libs/h264parser \ + $(check_uvch264) \ libs/vc1parser \ $(check_schro) \ $(check_vp8) \ @@ -332,8 +347,10 @@ elements_assrender_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_API_VERSION elements_mpegtsmux_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(AM_CFLAGS) elements_mpegtsmux_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_API_VERSION) $(GST_BASE_LIBS) $(LDADD) +elements_uvch264demux_CFLAGS = -DUVCH264DEMUX_DATADIR="$(srcdir)/elements/uvch264demux_data" \ + $(AM_CFLAGS) -EXTRA_DIST = gst-plugins-bad.supp +EXTRA_DIST = gst-plugins-bad.supp $(uvch264_dist_data) orc_bayer_CFLAGS = $(ORC_CFLAGS) orc_bayer_LDADD = $(ORC_LIBS) -lorc-test-0.4 diff --git a/tests/check/elements/.gitignore b/tests/check/elements/.gitignore index b72807696..48604ef9a 100644 --- a/tests/check/elements/.gitignore +++ b/tests/check/elements/.gitignore @@ -44,6 +44,7 @@ schroenc spectrum timidity y4menc +uvch264demux videorecordingbin viewfinderbin voaacenc diff --git a/tests/check/elements/uvch264demux.c b/tests/check/elements/uvch264demux.c new file mode 100644 index 000000000..a633c694a --- /dev/null +++ b/tests/check/elements/uvch264demux.c @@ -0,0 +1,696 @@ +/* GStreamer + * + * unit test for uvch264_demux + * + * Copyright (C) <2012> Collabora Ltd. + * Author: Youness Alaoui <youness.alaoui@collabora.co.uk> + * Copyright (C) <2008> Sebastian Dröge <sebastian.droege@collabora.co.uk> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#include <gst/check/gstcheck.h> +#include <string.h> + +static GstElement *demux; +static GstPad *mjpg_pad, *h264_pad, *yuy2_pad, *nv12_pad, *jpg_pad; +static gboolean have_h264_eos, have_yuy2_eos, have_nv12_eos, have_jpg_eos; +static GstBuffer *buffer_h264, *buffer_yuy2, *buffer_nv12, *buffer_jpg; +static GError *gerror; +static gchar *error_debug; + +static GstStaticPadTemplate mjpg_template = +GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS, + GST_STATIC_CAPS ("image/jpeg, width=640, height=480, framerate=15/1")); + +static GstStaticPadTemplate sink_template = +GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, + GST_STATIC_CAPS_ANY); + +#define STRINGIFY_(x) #x +#define STRINGIFY(x) STRINGIFY_ (x) +#define DATADIR STRINGIFY (UVCH264DEMUX_DATADIR) +#define VALID_H264_JPG_MJPG_FILENAME DATADIR "/valid_h264_jpg.mjpg" +#define VALID_H264_JPG_JPG_FILENAME DATADIR "/valid_h264_jpg.jpg" +#define VALID_H264_JPG_H264_FILENAME DATADIR "/valid_h264_jpg.h264" +#define VALID_H264_YUY2_MJPG_FILENAME DATADIR "/valid_h264_yuy2.mjpg" +#define VALID_H264_YUY2_YUY2_FILENAME DATADIR "/valid_h264_yuy2.yuy2" +#define VALID_H264_YUY2_H264_FILENAME DATADIR "/valid_h264_yuy2.h264" + +#define _sink_chain_func(type) \ +static GstFlowReturn \ + _sink_##type##_chain (GstPad * pad, GstBuffer * buffer) \ +{ \ + fail_unless (GST_BUFFER_CAPS (buffer) != NULL); \ + \ + buffer_##type = buffer; \ + \ + return GST_FLOW_OK; \ +} + +#define _sink_event_func(type) \ +static gboolean \ + _sink_##type##_event (GstPad * pad, GstEvent * event) \ +{ \ + if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) \ + have_##type##_eos = TRUE; \ + \ + gst_event_unref (event); \ + \ + return TRUE; \ +} + + +_sink_chain_func (h264); +_sink_chain_func (yuy2); +_sink_chain_func (nv12); +_sink_chain_func (jpg); + +_sink_event_func (h264); +_sink_event_func (yuy2); +_sink_event_func (nv12); +_sink_event_func (jpg); + + +static GstBusSyncReply +_bus_sync_handler (GstBus * bus, GstMessage * message, gpointer data) +{ + if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ERROR) { + fail_unless (gerror == NULL && error_debug == NULL); + fail_unless (GST_MESSAGE_SRC (message) == GST_OBJECT (demux)); + gst_message_parse_error (message, &gerror, &error_debug); + } + return GST_BUS_PASS; +} + +static void +_teardown_test (void) +{ + GstBus *bus; + gst_element_set_state (demux, GST_STATE_NULL); + + bus = GST_ELEMENT_BUS (demux); + gst_bus_set_flushing (bus, TRUE); + gst_object_unref (bus); + + gst_pad_set_active (mjpg_pad, FALSE); + gst_object_unref (mjpg_pad); + if (h264_pad) { + gst_pad_set_active (h264_pad, FALSE); + gst_object_unref (h264_pad); + } + if (yuy2_pad) { + gst_pad_set_active (yuy2_pad, FALSE); + gst_object_unref (yuy2_pad); + } + if (nv12_pad) { + gst_pad_set_active (nv12_pad, FALSE); + gst_object_unref (nv12_pad); + } + if (jpg_pad) { + gst_pad_set_active (jpg_pad, FALSE); + gst_object_unref (jpg_pad); + } + if (gerror) { + g_error_free (gerror); + gerror = NULL; + } + if (error_debug) { + g_free (error_debug); + error_debug = NULL; + } + + gst_object_unref (demux); + mjpg_pad = h264_pad = yuy2_pad = nv12_pad = jpg_pad = NULL; + demux = NULL; +} + +static void +_setup_test (gboolean link_h264, gboolean link_yuy2, gboolean link_nv12, + gboolean link_jpg) +{ + GstBus *bus = gst_bus_new (); + GstPad *sinkpad, *h264pad, *yuy2pad, *nv12pad, *jpgpad; + + have_h264_eos = have_yuy2_eos = have_nv12_eos = have_jpg_eos = FALSE; + buffer_h264 = buffer_yuy2 = buffer_nv12 = buffer_jpg = NULL; + + demux = gst_element_factory_make ("uvch264_mjpgdemux", NULL); + fail_unless (demux != NULL); + + gst_element_set_bus (demux, bus); + gst_bus_set_sync_handler (bus, _bus_sync_handler, NULL); + + mjpg_pad = gst_pad_new_from_static_template (&mjpg_template, "src"); + fail_unless (mjpg_pad != NULL); + sinkpad = gst_element_get_static_pad (demux, "sink"); + fail_unless (sinkpad != NULL); + fail_unless (gst_pad_link (mjpg_pad, sinkpad) == GST_PAD_LINK_OK); + gst_object_unref (sinkpad); + + gst_pad_set_active (mjpg_pad, TRUE); + + if (link_h264) { + h264pad = gst_element_get_static_pad (demux, "h264"); + fail_unless (h264pad != NULL); + + h264_pad = gst_pad_new_from_static_template (&sink_template, "h264"); + fail_unless (h264_pad != NULL); + gst_pad_set_chain_function (h264_pad, _sink_h264_chain); + gst_pad_set_event_function (h264_pad, _sink_h264_event); + + fail_unless (gst_pad_link (h264pad, h264_pad) == GST_PAD_LINK_OK); + gst_object_unref (h264pad); + + gst_pad_set_active (h264_pad, TRUE); + } + if (link_yuy2) { + yuy2pad = gst_element_get_static_pad (demux, "yuy2"); + fail_unless (yuy2pad != NULL); + + yuy2_pad = gst_pad_new_from_static_template (&sink_template, "yuy2"); + fail_unless (yuy2_pad != NULL); + gst_pad_set_chain_function (yuy2_pad, _sink_yuy2_chain); + gst_pad_set_event_function (yuy2_pad, _sink_yuy2_event); + + fail_unless (gst_pad_link (yuy2pad, yuy2_pad) == GST_PAD_LINK_OK); + gst_object_unref (yuy2pad); + + gst_pad_set_active (yuy2_pad, TRUE); + } + if (link_nv12) { + nv12pad = gst_element_get_static_pad (demux, "nv12"); + fail_unless (nv12pad != NULL); + + nv12_pad = gst_pad_new_from_static_template (&sink_template, "nv12"); + fail_unless (nv12_pad != NULL); + gst_pad_set_chain_function (nv12_pad, _sink_nv12_chain); + gst_pad_set_event_function (nv12_pad, _sink_nv12_event); + + fail_unless (gst_pad_link (nv12pad, nv12_pad) == GST_PAD_LINK_OK); + gst_object_unref (nv12pad); + gst_pad_set_active (nv12_pad, TRUE); + } + if (link_jpg) { + jpgpad = gst_element_get_static_pad (demux, "jpeg"); + fail_unless (jpgpad != NULL); + + jpg_pad = gst_pad_new_from_static_template (&sink_template, "jpeg"); + fail_unless (jpg_pad != NULL); + gst_pad_set_chain_function (jpg_pad, _sink_jpg_chain); + gst_pad_set_event_function (jpg_pad, _sink_jpg_event); + + fail_unless (gst_pad_link (jpgpad, jpg_pad) == GST_PAD_LINK_OK); + gst_object_unref (jpgpad); + + gst_pad_set_active (jpg_pad, TRUE); + } + + gst_element_set_state (demux, GST_STATE_PLAYING); +} + +static GstBuffer * +_buffer_from_file (const gchar * filename) +{ + GstBuffer *buffer = gst_buffer_new (); + gchar *contents = NULL; + gsize length = 0; + + fail_unless (g_file_get_contents (filename, &contents, &length, NULL)); + + GST_BUFFER_MALLOCDATA (buffer) = (guint8 *) contents; + GST_BUFFER_DATA (buffer) = (guint8 *) contents; + GST_BUFFER_SIZE (buffer) = length; + GST_BUFFER_OFFSET (buffer) = 0; + + return buffer; +} + +GST_START_TEST (test_valid_h264_jpg) +{ + GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template); + GstCaps *h264_caps; + GstBuffer *buffer; + gchar *h264_data, *jpg_data; + gsize h264_size, jpg_size; + + _setup_test (TRUE, TRUE, TRUE, TRUE); + + h264_caps = gst_caps_new_simple ("video/x-h264", + "width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480, + "framerate", GST_TYPE_FRACTION, 15, 1, NULL); + buffer = _buffer_from_file (VALID_H264_JPG_MJPG_FILENAME); + gst_buffer_set_caps (buffer, mjpg_caps); + fail_unless (g_file_get_contents (VALID_H264_JPG_H264_FILENAME, + &h264_data, &h264_size, NULL)); + fail_unless (g_file_get_contents (VALID_H264_JPG_JPG_FILENAME, + &jpg_data, &jpg_size, NULL)); + + fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_OK); + fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ())); + + fail_unless (have_h264_eos); + fail_unless (have_yuy2_eos); + fail_unless (have_nv12_eos); + fail_unless (have_jpg_eos); + fail_unless (buffer_h264 != NULL); + fail_unless (buffer_jpg != NULL); + fail_unless (buffer_nv12 == NULL); + fail_unless (buffer_yuy2 == NULL); + fail_unless (gerror == NULL && error_debug == NULL); + fail_unless (gst_caps_is_always_compatible (GST_BUFFER_CAPS (buffer_h264), + h264_caps)); + fail_unless (gst_caps_is_always_compatible (GST_BUFFER_CAPS (buffer_jpg), + mjpg_caps)); + fail_unless (GST_BUFFER_SIZE (buffer_h264) == h264_size); + fail_unless (GST_BUFFER_SIZE (buffer_jpg) == jpg_size); + fail_unless (memcmp (GST_BUFFER_DATA (buffer_h264), h264_data, + h264_size) == 0); + fail_unless (memcmp (GST_BUFFER_DATA (buffer_jpg), jpg_data, jpg_size) == 0); + + gst_caps_unref (mjpg_caps); + gst_caps_unref (h264_caps); + g_free (h264_data); + g_free (jpg_data); + gst_buffer_unref (buffer_h264); + gst_buffer_unref (buffer_jpg); + _teardown_test (); +} + +GST_END_TEST; + +GST_START_TEST (test_valid_h264_yuy2) +{ + GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template); + GstCaps *h264_caps; + GstCaps *yuy2_caps; + GstBuffer *buffer; + gchar *h264_data, *yuy2_data; + gsize h264_size, yuy2_size; + + _setup_test (TRUE, TRUE, TRUE, TRUE); + + h264_caps = gst_caps_new_simple ("video/x-h264", + "width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480, + "framerate", GST_TYPE_FRACTION, 15, 1, NULL); + yuy2_caps = gst_caps_new_simple ("video/x-raw-yuv", + "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'), + "width", G_TYPE_INT, 160, "height", G_TYPE_INT, 90, + "framerate", GST_TYPE_FRACTION, 15, 1, NULL); + buffer = _buffer_from_file (VALID_H264_YUY2_MJPG_FILENAME); + gst_buffer_set_caps (buffer, mjpg_caps); + fail_unless (g_file_get_contents (VALID_H264_YUY2_H264_FILENAME, + &h264_data, &h264_size, NULL)); + fail_unless (g_file_get_contents (VALID_H264_YUY2_YUY2_FILENAME, + &yuy2_data, &yuy2_size, NULL)); + + fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_OK); + fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ())); + + fail_unless (have_h264_eos); + fail_unless (have_yuy2_eos); + fail_unless (have_nv12_eos); + fail_unless (have_jpg_eos); + fail_unless (buffer_h264 != NULL); + fail_unless (buffer_jpg == NULL); + fail_unless (buffer_nv12 == NULL); + fail_unless (buffer_yuy2 != NULL); + fail_unless (gerror == NULL && error_debug == NULL); + fail_unless (gst_caps_is_always_compatible (GST_BUFFER_CAPS (buffer_h264), + h264_caps)); + fail_unless (gst_caps_is_always_compatible (GST_BUFFER_CAPS (buffer_yuy2), + yuy2_caps)); + fail_unless (GST_BUFFER_SIZE (buffer_h264) == h264_size); + fail_unless (GST_BUFFER_SIZE (buffer_yuy2) == yuy2_size); + fail_unless (memcmp (GST_BUFFER_DATA (buffer_h264), h264_data, + h264_size) == 0); + fail_unless (memcmp (GST_BUFFER_DATA (buffer_yuy2), yuy2_data, + yuy2_size) == 0); + + gst_caps_unref (mjpg_caps); + gst_caps_unref (yuy2_caps); + gst_caps_unref (h264_caps); + g_free (h264_data); + g_free (yuy2_data); + gst_buffer_unref (buffer_h264); + gst_buffer_unref (buffer_yuy2); + _teardown_test (); +} + +GST_END_TEST; + +GST_START_TEST (test_no_data) +{ + GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template); + GstBuffer *buffer = gst_buffer_new (); + + _setup_test (TRUE, TRUE, TRUE, TRUE); + + gst_buffer_set_caps (buffer, mjpg_caps); + fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_OK); + fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ())); + + fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos); + fail_unless (buffer_h264 == NULL && buffer_jpg != NULL); + fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL); + fail_unless (gerror == NULL && error_debug == NULL); + + _teardown_test (); +} + +GST_END_TEST; + +GST_START_TEST (test_data_zero) +{ + GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template); + GstBuffer *buffer = gst_buffer_new_and_alloc (1024); + + _setup_test (TRUE, TRUE, TRUE, TRUE); + + memset (GST_BUFFER_DATA (buffer), 0, 1024); + GST_BUFFER_SIZE (buffer) = 1024; + gst_buffer_set_caps (buffer, mjpg_caps); + fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_OK); + fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ())); + + fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos); + fail_unless (buffer_h264 == NULL && buffer_jpg == NULL); + fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL); + + _teardown_test (); +} + +GST_END_TEST; + +GST_START_TEST (test_no_marker_size) +{ + GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template); + GstBuffer *buffer = gst_buffer_new_and_alloc (1024); + const guchar data[] = { + 0xff, 0xd8, 0xff, 0xe4, 0x00 + }; + + _setup_test (TRUE, TRUE, TRUE, TRUE); + + memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data)); + GST_BUFFER_SIZE (buffer) = sizeof (data); + gst_buffer_set_caps (buffer, mjpg_caps); + fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_ERROR); + fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ())); + + fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos); + fail_unless (buffer_h264 == NULL && buffer_jpg == NULL); + fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL); + fail_unless (gerror != NULL); + fail_unless (gerror->domain == GST_STREAM_ERROR); + fail_unless (gerror->code == GST_STREAM_ERROR_DEMUX); + fail_unless (memcmp (gerror->message, + "Not enough data to read marker size", + strlen (gerror->message)) == 0); + + _teardown_test (); +} + +GST_END_TEST; + +GST_START_TEST (test_not_enough_data) +{ + GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template); + GstBuffer *buffer = gst_buffer_new_and_alloc (1024); + const guchar data[] = { + 0xff, 0xd8, 0xff, 0xe4, 0x00, 0xff, 0x00, 0x00 + }; + + _setup_test (TRUE, TRUE, TRUE, TRUE); + + memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data)); + GST_BUFFER_SIZE (buffer) = sizeof (data); + gst_buffer_set_caps (buffer, mjpg_caps); + fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_ERROR); + fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ())); + + fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos); + fail_unless (buffer_h264 == NULL && buffer_jpg == NULL); + fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL); + fail_unless (gerror != NULL); + fail_unless (gerror->domain == GST_STREAM_ERROR); + fail_unless (gerror->code == GST_STREAM_ERROR_DEMUX); + fail_unless (memcmp (gerror->message, + "Not enough data to read marker content", + strlen (gerror->message)) == 0); + + _teardown_test (); +} + +GST_END_TEST; + +GST_START_TEST (test_no_aux_header) +{ + GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template); + GstBuffer *buffer = gst_buffer_new_and_alloc (1024); + const guchar data[] = { + 0xff, 0xd8, 0xff, 0xe4, 0x00, 0x02, 0x00, 0x00, + 0xff, 0xd9 + }; + + _setup_test (TRUE, TRUE, TRUE, TRUE); + + memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data)); + GST_BUFFER_SIZE (buffer) = sizeof (data); + gst_buffer_set_caps (buffer, mjpg_caps); + fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_ERROR); + fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ())); + + fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos); + fail_unless (buffer_h264 == NULL && buffer_jpg == NULL); + fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL); + fail_unless (gerror != NULL); + fail_unless (gerror->domain == GST_STREAM_ERROR); + fail_unless (gerror->code == GST_STREAM_ERROR_DEMUX); + fail_unless (memcmp (gerror->message, + "Not enough data to read aux header", strlen (gerror->message)) == 0); + + _teardown_test (); +} + +GST_END_TEST; + +GST_START_TEST (test_empty_aux_data) +{ + GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template); + GstBuffer *buffer = gst_buffer_new_and_alloc (1024); + const guchar data[] = { + 0xff, 0xd8, 0xff, 0xe4, 0x00, 0x1C, 0x00, 0x01, + 0x16, 0x00, 0x48, 0x32, 0x36, 0x34, 0x80, 0x07, + 0x38, 0x04, 0x2a, 0x2c, 0x0a, 0x00, 0x1b, 0x00, + 0x40, 0x62, 0xcb, 0x0a, 0x00, 0x00, 0x00, 0x00, + 0xff, 0xd9 + }; + + _setup_test (TRUE, TRUE, TRUE, TRUE); + + memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data)); + GST_BUFFER_SIZE (buffer) = sizeof (data); + gst_buffer_set_caps (buffer, mjpg_caps); + fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_OK); + fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ())); + + fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos); + fail_unless (buffer_h264 == NULL && buffer_jpg == NULL); + fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL); + fail_unless (gerror == NULL); + + _teardown_test (); +} + +GST_END_TEST; + +GST_START_TEST (test_unknown_fcc) +{ + GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template); + GstBuffer *buffer = gst_buffer_new_and_alloc (1024); + const guchar data[] = { + 0xff, 0xd8, 0xff, 0xe4, 0x00, 0x2C, 0x00, 0x01, + 0x16, 0x00, 0x48, 0x30, 0x30, 0x30, 0x80, 0x07, + 0x38, 0x04, 0x2a, 0x2c, 0x0a, 0x00, 0x1b, 0x00, + 0x40, 0x62, 0xcb, 0x0a, 0x10, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xff, 0xd9 + }; + + _setup_test (TRUE, TRUE, TRUE, TRUE); + + memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data)); + GST_BUFFER_SIZE (buffer) = sizeof (data); + gst_buffer_set_caps (buffer, mjpg_caps); + fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_ERROR); + fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ())); + + fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos); + fail_unless (buffer_h264 == NULL && buffer_jpg == NULL); + fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL); + fail_unless (gerror != NULL); + fail_unless (gerror->domain == GST_STREAM_ERROR); + fail_unless (gerror->code == GST_STREAM_ERROR_DEMUX); + fail_unless (memcmp (gerror->message, + "Unknown auxiliary stream format : H000", + strlen (gerror->message)) == 0); + + _teardown_test (); +} + +GST_END_TEST; + +GST_START_TEST (test_not_enough_aux_data) +{ + GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template); + GstBuffer *buffer = gst_buffer_new_and_alloc (1024); + const guchar data[] = { + 0xff, 0xd8, 0xff, 0xe4, 0x00, 0x1C, 0x00, 0x01, + 0x16, 0x00, 0x48, 0x32, 0x36, 0x34, 0x80, 0x07, + 0x38, 0x04, 0x2a, 0x2c, 0x0a, 0x00, 0x1b, 0x00, + 0x40, 0x62, 0xcb, 0x0a, 0x10, 0x00, 0x00, 0x00, + 0xff, 0xd9 + }; + + _setup_test (TRUE, TRUE, TRUE, TRUE); + + memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data)); + GST_BUFFER_SIZE (buffer) = sizeof (data); + gst_buffer_set_caps (buffer, mjpg_caps); + fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_ERROR); + fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ())); + + fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos); + fail_unless (buffer_h264 == NULL && buffer_jpg == NULL); + fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL); + fail_unless (gerror != NULL); + fail_unless (gerror->domain == GST_STREAM_ERROR); + fail_unless (gerror->code == GST_STREAM_ERROR_DEMUX); + fail_unless (memcmp (gerror->message, + "Incomplete auxiliary stream. 16 bytes missing", + strlen (gerror->message)) == 0); + + _teardown_test (); +} + +GST_END_TEST; + +GST_START_TEST (test_too_much_aux_data) +{ + GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template); + GstBuffer *buffer = gst_buffer_new_and_alloc (1024); + const guchar data[] = { + 0xff, 0xd8, 0xff, 0xe4, 0x00, 0x3C, 0x00, 0x01, + 0x16, 0x00, 0x48, 0x32, 0x36, 0x34, 0x80, 0x07, + 0x38, 0x04, 0x2a, 0x2c, 0x0a, 0x00, 0x1b, 0x00, + 0x40, 0x62, 0xcb, 0x0a, 0x10, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xff, 0xd9 + }; + + _setup_test (TRUE, TRUE, TRUE, TRUE); + + memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data)); + GST_BUFFER_SIZE (buffer) = sizeof (data); + gst_buffer_set_caps (buffer, mjpg_caps); + fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_ERROR); + fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ())); + + fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos); + fail_unless (buffer_h264 == NULL && buffer_jpg == NULL); + fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL); + fail_unless (gerror != NULL); + fail_unless (gerror->domain == GST_STREAM_ERROR); + fail_unless (gerror->code == GST_STREAM_ERROR_DEMUX); + fail_unless (memcmp (gerror->message, + "Expected 16 auxiliary data, got 32 bytes", + strlen (gerror->message)) == 0); + + _teardown_test (); +} + +GST_END_TEST; + + +GST_START_TEST (test_no_sos_marker) +{ + GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template); + GstBuffer *buffer = gst_buffer_new_and_alloc (1024); + const guchar data[] = { + 0xff, 0xd8, 0xff, 0xe4, 0x00, 0x2C, 0x00, 0x01, + 0x16, 0x00, 0x48, 0x32, 0x36, 0x34, 0x80, 0x07, + 0x38, 0x04, 0x2a, 0x2c, 0x0a, 0x00, 0x1b, 0x00, + 0x40, 0x62, 0xcb, 0x0a, 0x10, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xff, 0xd9 + }; + const guchar h264_data[] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 + }; + + _setup_test (TRUE, TRUE, TRUE, TRUE); + + memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data)); + GST_BUFFER_SIZE (buffer) = sizeof (data); + gst_buffer_set_caps (buffer, mjpg_caps); + fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_OK); + fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ())); + + fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos); + fail_unless (buffer_h264 != NULL && buffer_jpg == NULL); + fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL); + fail_unless (gerror == NULL); + fail_unless (GST_BUFFER_SIZE (buffer_h264) == sizeof (h264_data)); + fail_unless (memcmp (GST_BUFFER_DATA (buffer_h264), h264_data, + sizeof (h264_data)) == 0); + + _teardown_test (); +} + +GST_END_TEST; + +static Suite * +uvch264demux_suite (void) +{ + Suite *s = suite_create ("uvch264demux"); + TCase *tc_chain = tcase_create ("general"); + + suite_add_tcase (s, tc_chain); + tcase_set_timeout (tc_chain, 180); + tcase_add_test (tc_chain, test_valid_h264_jpg); + tcase_add_test (tc_chain, test_valid_h264_yuy2); + tcase_add_test (tc_chain, test_no_data); + tcase_add_test (tc_chain, test_data_zero); + tcase_add_test (tc_chain, test_no_marker_size); + tcase_add_test (tc_chain, test_not_enough_data); + tcase_add_test (tc_chain, test_no_aux_header); + tcase_add_test (tc_chain, test_empty_aux_data); + tcase_add_test (tc_chain, test_unknown_fcc); + tcase_add_test (tc_chain, test_no_sos_marker); + tcase_add_test (tc_chain, test_not_enough_aux_data); + tcase_add_test (tc_chain, test_too_much_aux_data); + + return s; +} + +GST_CHECK_MAIN (uvch264demux); diff --git a/tests/check/elements/uvch264demux_data/valid_h264_jpg.h264 b/tests/check/elements/uvch264demux_data/valid_h264_jpg.h264 Binary files differnew file mode 100644 index 000000000..f57e002d9 --- /dev/null +++ b/tests/check/elements/uvch264demux_data/valid_h264_jpg.h264 diff --git a/tests/check/elements/uvch264demux_data/valid_h264_jpg.jpg b/tests/check/elements/uvch264demux_data/valid_h264_jpg.jpg Binary files differnew file mode 100644 index 000000000..e23067b90 --- /dev/null +++ b/tests/check/elements/uvch264demux_data/valid_h264_jpg.jpg diff --git a/tests/check/elements/uvch264demux_data/valid_h264_jpg.mjpg b/tests/check/elements/uvch264demux_data/valid_h264_jpg.mjpg Binary files differnew file mode 100644 index 000000000..f36b514a8 --- /dev/null +++ b/tests/check/elements/uvch264demux_data/valid_h264_jpg.mjpg diff --git a/tests/check/elements/uvch264demux_data/valid_h264_yuy2.h264 b/tests/check/elements/uvch264demux_data/valid_h264_yuy2.h264 Binary files differnew file mode 100644 index 000000000..ae68b8b6a --- /dev/null +++ b/tests/check/elements/uvch264demux_data/valid_h264_yuy2.h264 diff --git a/tests/check/elements/uvch264demux_data/valid_h264_yuy2.mjpg b/tests/check/elements/uvch264demux_data/valid_h264_yuy2.mjpg Binary files differnew file mode 100644 index 000000000..c59e7088f --- /dev/null +++ b/tests/check/elements/uvch264demux_data/valid_h264_yuy2.mjpg diff --git a/tests/check/elements/uvch264demux_data/valid_h264_yuy2.yuy2 b/tests/check/elements/uvch264demux_data/valid_h264_yuy2.yuy2 new file mode 100644 index 000000000..b62889135 --- /dev/null +++ b/tests/check/elements/uvch264demux_data/valid_h264_yuy2.yuy2 @@ -0,0 +1 @@ + !!!! ! ##"!"#"#!""""!"$##$#"$$#%$"#$"%#"##$"""%#%!"%""##"#" # "!!!!"!"!%~~~ ! !! !! !#!!!"!####""!#""$"$#!#"$#%$~##"" "!"!""! !!"!#!!! ""|!! !!!" "!!""!#!!! "!~$"$"""!#$""##%#$!#$!" "~ #!#! ! !~!"! ! " } ! ! !! ! "!! ! !""%! """""#"$! ##$""""$""$""!"" ! ! ! !!! "! ! ! ~ ! ! ! ! ~!! "###"!!" !""!"""!"&$!"!"# !! ! " !! !~!! ~ !!!!" ! "! !! ! !"! !"!!"!""#""$#""""$""$##%#"##%#"! ""#"" " " ! ~ ""##!##""$#"!"##""!##"!"#"!!!! !!""!""!$"#$#"#$$###"!$%$%&&%(~'+&$%""## !! "!" ~ !! "~ "#"$%$&%%$%%(%%$%%'%%%%&%%&#%$$%%$~#&(%$%&$%&'~('(.~3?EU|cs ~ "!#$""%%%'&'&'(,+,1~589;ADRT~Yd~kv}}}~(%%%$"###"!$#! " "! " !"!! ~ "}%*{-1{=D|NQ{Xazfmzrwzx}z}}}|||}}~}~~~~~~~$#""!# !!!~!"! ! !!hxxxyzzz{z|z}}}}}}~~}~}~~~~~~&$ ! ! " ! \x}z{zzyz|{~}}}}|}~~~~~~~~#""! ! "! ! ~ Hv}x|z{y|||~{~~~~}}~~}~~$"#" "!! " 7||zzzy}}{~|~}|~|~~}~}~~~~$#!~ | ~# ~%}{|zz{z~|||}}}}~}|~~~~~~~~~!!! !$ }yzzz|{}}}~~}}}}~}~}~~~~#!!! " wzy{|{{{{||}~||}}~~~~?|97v7:v{~|"" ! k~{zz{z|~|}~}}~~~}|}}j46w35v53t26x|! ~" W|{zz{|{|}~||~}~~~~~~}~~~~6}27wfizfhyT2~1lx~~!A|y~{{{||}}~~|}}}}~|}|~}}~}?4w9hyggvhh{jdw<4x{~} !~-{x~|}{||||~~|}|}~}|~}~~}40x_^yfjzghyhf|e:y/v~p #yy}z{}|{~}}}}~~~~~}~~~~21ybguhf|hdwhgshey/Twfw{{{z|{~|~~}~}}}}}~~~~40wccxhhzee|egxffz<5w~_ k}{}{{}}{}}|}}~~|~~~:1ydbygfxegyegwhhzX1zS _z{~zz{|{}|~}}~}~}~~~~.Mfsegzedyef|fhx^0z6}6yJ~~Ny{~y}z{{}}~}~}~~}82fwgcyae|ff{dgxP.sJfzC>uC~;|{}{z||}||}}~~}~~~}}|~}15va_zgcyaf{fe{.8x:cs9Su9~*}z}z{z}||}}~~}~~|}}}}~~~).f}_YvxdIy/dzjt~0~x}{~{}z|z}}}~~}}}~}~~~~~~~//z6JxQIs0-x-y(s|{}|}}}||~}}~}~}~|}~}}~~~C/z/,x0-y/{PeNz~"~lz}}|~|z||}|z}}~~|~|}~}}~~~].ezWcxfby~~~Wxz{{||p~A|28{h{}|}~0*}U~R1y3_z|~~~5{29y~~~~~w:R{M<|w~,cxgFw~>zz|}{z|P*)|(+|+0|z{}}}.-z,z;-y,N~~~2~11{y~~\M^{PFA{2+.}-/w:wA`]te[x->vr-)]z)u|{}}{}|)&(|Z~.+,|^|}}PA|W~|,*~+}{I,|-=|~5.}.1{D{0/2}B.|.7w1-z.0y/{y0,-{+)w+*z1,w.1x]`bygb{1t~/ s{x|{{~}(&'{z|Q**{6c*v)){*+|`*(z*j|b*|+3{}1+*{0,|0My.A,xm/-.z3.,x,,.z~-*+~(&x*(y~)(+yYbbwd\~u.J<~~pzu{{{~}*&'|kzk(&{+-&|'~{+*)*(z+X}{(y)-z**1z+*1y~,-+z},,+}.-,y},,,x-**x))w++z~,'+z};ZawTDyX\|f\}^azG~k{z|{|}}{.'%}S}{')~)|*(|(}{)*|(xz>&q'G}(+)y~+*0}**+z}++*|~7,-|,+,}~.+)y1))zJ~~7(y),ylBu~-dzfCy6`|~]wzy}~|||5'&}@}{**z(n3'x*d{V'o(6}~+)*y~X))|),y~/*,{}X+,{~.,/x/**x1,)w~/)y*-z*/w~6bz3uRA}G~}~Jyu{{|~z}~}N'%|1{|6'{(Z}B&{'Ryt'y(*~}((){p)'z/+~)+{/++{w~/4+yz2,*y~2)*y6,+x\('x8+,w}w2-u~}2|y{z~}z{g&'|*~.%{(c}N&}'F}d'{&>{)'(|)*(z}:)'z*(+z6**|o1Z,q4,)xs2*,y:+)yM((z:++v~~}#~xz{|{{|}{x%%}'}4&}(P}e%}&5}v({'2y~)))y.)*zx}B))|}*)+{|B*+}dj@9{lY2.~f3|+.x~N,Cyw9,yM2|-+v~~rz{||{}~z}%%}%t}:${)P~~&&+||&~(8z}*,({}5+*|a\))y8+~*){6.}+-}:mz1,yy~-+~/+z*8yB)'yo*'y)6z+-z}~mr}y{z{}{|L%{&%~%%|&p||7(({*'|(p~**){~D+*|L-*|,D|-,x.-~/1|Um~FU|V@E}v}~~}~~~gw|z{{|~}}~|Q&'|%*}f}{a}/-?w}}y~}~~~}~~~~}~~~~}~~~~Su|wxzz||~~|~||}}}|~~~~~}~6uzxz||{~~||}||}|}~}}~~~~~}K~u}~eXD|~~}#u{wx||~{{|z}{|^`_~^}v}\`sQk~\\|qzi~~M~Ldl~m[S}SlUY]z~r}v{y{}|}~}|{|{{d}U>~~}Uym~F}N~}imR~~a}zs_K{zbqz|O`eRWw~pnzxy|{|z~{{||~{t}NU{|t~\x_|~WYyj_~hPifu~}c~~~~cgyxx}z{{|}|}~{|Egx|fm}}|}}}}~~}~~~~~~~~~UXxuxxyz}z{}z{zz}|~{}~}}}}|~~}~~~~p}<>2*#~B|twzyzz}||}{}{|~{||}||~|~}~~}~~~~~~~~~~~u|1|rwzyz|{{{~~z}{{}|{}}}|}}~}~~~~~~~~}%ru{yy{x{||}|}~|}~}}|}}~~~|~~us~K~~oT\|~~~~y| pt}wy{y{{|{~~~|{~|}}}}~~vhz~^R~]F{Nk}ap}<6~@M}Wmmo]|myU[}~~~juzvw|z{y|{{}}~~}|||~~|}}=AzA8}{D~`U|gy{m_@@|JCoyMutel}~~v|dqzuxzxz{z|~|}{}|~~||||~|{~}~}~x~~~~~~~|}Os|uuzxw{xz{{}}}~{}~~{|}~~~~|}~~~~}}~~~z|5q|utyuv{wy|{{|{}|}z~||~|}}||~}~~~~~~~~~~{}"p{rtyuwzwvzzz{|}|}}{}{~{|}{|||~}}~~~}}~~}}~~l|mzqqytu|uw|yz{{{|}|z}~|}~{}||z{}~~~~}}}~}|~~}~~~~|zw~pk_4iupq|qs{uuyvx{xzy{{{|}{~||~}{|}||{~|}||||zy~}|~|{|~xr}ul|d^SMFB950,'$$\ymnzqqzqrzss}uu|uu|tuyrtyqm|jc}a\|PH<5.&&~'0.~)(&" ~
\ No newline at end of file diff --git a/tests/examples/Makefile.am b/tests/examples/Makefile.am index eef73e6c9..d166a6605 100644 --- a/tests/examples/Makefile.am +++ b/tests/examples/Makefile.am @@ -1,5 +1,11 @@ +if USE_UVCH264 +UVCH264_DIR=uvch264 +else +UVCH264_DIR= +endif + if HAVE_GTK -GTK_EXAMPLES=mxf scaletempo camerabin2 +GTK_EXAMPLES=mxf scaletempo camerabin2 $(UVCH264_DIR) else GTK_EXAMPLES= endif @@ -13,6 +19,6 @@ endif OPENCV_EXAMPLES=opencv SUBDIRS= $(DIRECTFB_DIR) $(GTK_EXAMPLES) $(OPENCV_EXAMPLES) -DIST_SUBDIRS= camerabin2 directfb mxf scaletempo opencv +DIST_SUBDIRS= camerabin2 directfb mxf scaletempo opencv uvch264 include $(top_srcdir)/common/parallel-subdirs.mak diff --git a/tests/examples/uvch264/Makefile.am b/tests/examples/uvch264/Makefile.am new file mode 100644 index 000000000..e02b88821 --- /dev/null +++ b/tests/examples/uvch264/Makefile.am @@ -0,0 +1,36 @@ +TEST_UVCH264_GLADE_FILES = window.glade \ + boolean_property.glade \ + enum_property.glade \ + int_property.glade + +if HAVE_GTK + +TEST_UVCH264_EXAMPLES = test-uvch264 + +test_uvch264_SOURCES = test-uvch264.c +test_uvch264_CFLAGS = \ + $(GST_PLUGINS_BAD_CFLAGS) \ + $(GST_PLUGINS_BASE_CFLAGS) \ + $(GST_VIDEO_CFLAGS) \ + $(GST_CFLAGS) \ + $(GTK_CFLAGS) \ + $(GMODULE_EXPORT_CFLAGS) \ + -DGST_USE_UNSTABLE_API +test_uvch264_LDADD = \ + $(GST_PLUGINS_BASE_LIBS) \ + $(GST_VIDEO_LIBS) \ + $(GST_LIBS) \ + -lgstinterfaces-@GST_MAJORMINOR@ \ + $(GTK_LIBS) \ + $(GMODULE_EXPORT_LIBS) + +noinst_DATA = $(TEST_UVCH264_GLADE_FILES) + +else +TEST_UVCH264_EXAMPLES = +endif + +noinst_PROGRAMS = $(TEST_UVCH264_EXAMPLES) + +EXTRA_DIST = $(TEST_UVCH264_GLADE_FILES) + diff --git a/tests/examples/uvch264/boolean_property.glade b/tests/examples/uvch264/boolean_property.glade new file mode 100644 index 000000000..d391a58c6 --- /dev/null +++ b/tests/examples/uvch264/boolean_property.glade @@ -0,0 +1,94 @@ +<?xml version="1.0" encoding="UTF-8"?> +<interface> + <!-- interface-requires gtk+ 3.0 --> + <object class="GtkHBox" id="boolean-property"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <child> + <object class="GtkLabel" id="label"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="width_chars">18</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">0</property> + </packing> + </child> + <child> + <object class="GtkToggleButton" id="value"> + <property name="label" translatable="yes"> Disabled </property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="toggled" handler="on_button_toggled" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">1</property> + </packing> + </child> + <child> + <object class="GtkButton" id="get"> + <property name="label" translatable="yes">Get</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_get_button_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">2</property> + </packing> + </child> + <child> + <object class="GtkButton" id="set"> + <property name="label" translatable="yes">Set</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_set_button_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">3</property> + </packing> + </child> + <child> + <object class="GtkLabel" id="label66"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="label" translatable="yes">Default</property> + <property name="width_chars">8</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">4</property> + </packing> + </child> + <child> + <object class="GtkToggleButton" id="default"> + <property name="label" translatable="yes"> Disabled </property> + <property name="visible">True</property> + <property name="sensitive">False</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="toggled" handler="on_button_toggled" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">5</property> + </packing> + </child> + </object> +</interface> diff --git a/tests/examples/uvch264/enum_property.glade b/tests/examples/uvch264/enum_property.glade new file mode 100644 index 000000000..0dfb9747f --- /dev/null +++ b/tests/examples/uvch264/enum_property.glade @@ -0,0 +1,88 @@ +<?xml version="1.0" encoding="UTF-8"?> +<interface> + <!-- interface-requires gtk+ 3.0 --> + <object class="GtkHBox" id="enum-property"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <child> + <object class="GtkLabel" id="label"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="width_chars">18</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">0</property> + </packing> + </child> + <child> + <object class="GtkComboBoxText" id="value"> + <property name="visible">True</property> + <property name="can_focus">False</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">1</property> + </packing> + </child> + <child> + <object class="GtkButton" id="get"> + <property name="label" translatable="yes">Get</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_get_button_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">2</property> + </packing> + </child> + <child> + <object class="GtkButton" id="set"> + <property name="label" translatable="yes">Set</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_set_button_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">3</property> + </packing> + </child> + <child> + <object class="GtkLabel" id="label72"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="label" translatable="yes">Default</property> + <property name="width_chars">8</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">4</property> + </packing> + </child> + <child> + <object class="GtkEntry" id="default"> + <property name="visible">True</property> + <property name="sensitive">False</property> + <property name="can_focus">True</property> + <property name="invisible_char">●</property> + <property name="invisible_char_set">True</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">5</property> + </packing> + </child> + </object> +</interface> diff --git a/tests/examples/uvch264/enum_property_gtk2.glade b/tests/examples/uvch264/enum_property_gtk2.glade new file mode 100644 index 000000000..487efbe5c --- /dev/null +++ b/tests/examples/uvch264/enum_property_gtk2.glade @@ -0,0 +1,88 @@ +<?xml version="1.0" encoding="UTF-8"?> +<interface> + <!-- interface-requires gtk+ 3.0 --> + <object class="GtkHBox" id="enum-property"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <child> + <object class="GtkLabel" id="label"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="width_chars">18</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">0</property> + </packing> + </child> + <child> + <object class="GtkComboBox" id="value"> + <property name="visible">True</property> + <property name="can_focus">False</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">1</property> + </packing> + </child> + <child> + <object class="GtkButton" id="get"> + <property name="label" translatable="yes">Get</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_get_button_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">2</property> + </packing> + </child> + <child> + <object class="GtkButton" id="set"> + <property name="label" translatable="yes">Set</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_set_button_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">3</property> + </packing> + </child> + <child> + <object class="GtkLabel" id="label72"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="label" translatable="yes">Default</property> + <property name="width_chars">8</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">4</property> + </packing> + </child> + <child> + <object class="GtkEntry" id="default"> + <property name="visible">True</property> + <property name="sensitive">False</property> + <property name="can_focus">True</property> + <property name="invisible_char">●</property> + <property name="invisible_char_set">True</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">5</property> + </packing> + </child> + </object> +</interface> diff --git a/tests/examples/uvch264/int_property.glade b/tests/examples/uvch264/int_property.glade new file mode 100644 index 000000000..422ce1cdc --- /dev/null +++ b/tests/examples/uvch264/int_property.glade @@ -0,0 +1,147 @@ +<?xml version="1.0" encoding="UTF-8"?> +<interface> + <!-- interface-requires gtk+ 3.0 --> + <object class="GtkHBox" id="int-property"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <child> + <object class="GtkLabel" id="label"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="width_chars">18</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">0</property> + </packing> + </child> + <child> + <object class="GtkEntry" id="value"> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="invisible_char">●</property> + <property name="width_chars">10</property> + <property name="invisible_char_set">True</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">1</property> + </packing> + </child> + <child> + <object class="GtkButton" id="get"> + <property name="label" translatable="yes">Get</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_get_button_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">2</property> + </packing> + </child> + <child> + <object class="GtkButton" id="set"> + <property name="label" translatable="yes">Set</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_set_button_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">3</property> + </packing> + </child> + <child> + <object class="GtkLabel" id="label2"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="label" translatable="yes">Minimum</property> + <property name="width_chars">8</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">4</property> + </packing> + </child> + <child> + <object class="GtkEntry" id="minimum"> + <property name="visible">True</property> + <property name="sensitive">False</property> + <property name="can_focus">True</property> + <property name="invisible_char">●</property> + <property name="width_chars">10</property> + <property name="invisible_char_set">True</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">5</property> + </packing> + </child> + <child> + <object class="GtkLabel" id="label3"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="label" translatable="yes">Default</property> + <property name="width_chars">8</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">6</property> + </packing> + </child> + <child> + <object class="GtkEntry" id="default"> + <property name="visible">True</property> + <property name="sensitive">False</property> + <property name="can_focus">True</property> + <property name="invisible_char">●</property> + <property name="width_chars">10</property> + <property name="invisible_char_set">True</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">7</property> + </packing> + </child> + <child> + <object class="GtkLabel" id="label4"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="label" translatable="yes">Maximum</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">8</property> + </packing> + </child> + <child> + <object class="GtkEntry" id="maximum"> + <property name="visible">True</property> + <property name="sensitive">False</property> + <property name="can_focus">True</property> + <property name="invisible_char">●</property> + <property name="width_chars">10</property> + <property name="invisible_char_set">True</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">9</property> + </packing> + </child> + </object> +</interface> diff --git a/tests/examples/uvch264/test-uvch264.c b/tests/examples/uvch264/test-uvch264.c new file mode 100644 index 000000000..78d8ac110 --- /dev/null +++ b/tests/examples/uvch264/test-uvch264.c @@ -0,0 +1,673 @@ +#include <gst/gst.h> +#include <gtk/gtk.h> +#include <gdk/gdkx.h> +#include <gst/interfaces/xoverlay.h> +#include <gst/video/video.h> + +#define WINDOW_GLADE "window.glade" +#define INT_PROPERTY_GLADE "int_property.glade" +#define ENUM_PROPERTY_GLADE "enum_property.glade" +#define BOOL_PROPERTY_GLADE "boolean_property.glade" + +#define PROPERTY_TO_VBOX \ + properties[i].dynamic ? GTK_BOX (dynamic_vbox) : GTK_BOX (static_vbox) + +#define GET_WIDGET(object, type, name) \ + type (gtk_builder_get_object ((object)->builder, name)) + +#define GET_PROP_WIDGET(type, name) GET_WIDGET (&(properties[i]), type, name) + +static guint h264_xid, preview_xid; + +typedef struct +{ + GtkBuilder *builder; + GstElement *src; + enum + { NONE, INT, ENUM, BOOL } type; + const gchar *property_name; + gboolean readonly; + gboolean dynamic; +} Prop; + +typedef struct +{ + GtkBuilder *builder; + GstElement *bin; + GstElement *src; + GstElement *identity; + GstElement *vid_capsfilter; + GstElement *vf_capsfilter; +} Main; + +Prop properties[] = { + {NULL, NULL, INT, "initial-bitrate", FALSE, FALSE}, + {NULL, NULL, INT, "slice-units", FALSE, FALSE}, + {NULL, NULL, ENUM, "slice-mode", FALSE, FALSE}, + {NULL, NULL, INT, "iframe-period", FALSE, FALSE}, + {NULL, NULL, ENUM, "usage-type", FALSE, FALSE}, + {NULL, NULL, ENUM, "entropy", FALSE, FALSE}, + {NULL, NULL, BOOL, "enable-sei", FALSE, FALSE}, + {NULL, NULL, INT, "num-reorder-frames", FALSE, FALSE}, + {NULL, NULL, BOOL, "preview-flipped", FALSE, FALSE}, + {NULL, NULL, INT, "leaky-bucket-size", FALSE, FALSE}, + {NULL, NULL, INT, "num-clock-samples", FALSE, TRUE}, + {NULL, NULL, ENUM, "rate-control", FALSE, TRUE}, + {NULL, NULL, BOOL, "fixed-framerate", FALSE, TRUE}, + {NULL, NULL, INT, "max-mbps", TRUE, TRUE}, + {NULL, NULL, INT, "level-idc", FALSE, TRUE}, + {NULL, NULL, INT, "peak-bitrate", FALSE, TRUE}, + {NULL, NULL, INT, "average-bitrate", FALSE, TRUE}, + {NULL, NULL, INT, "min-iframe-qp", FALSE, TRUE}, + {NULL, NULL, INT, "max-iframe-qp", FALSE, TRUE}, + {NULL, NULL, INT, "min-pframe-qp", FALSE, TRUE}, + {NULL, NULL, INT, "max-pframe-qp", FALSE, TRUE}, + {NULL, NULL, INT, "min-bframe-qp", FALSE, TRUE}, + {NULL, NULL, INT, "max-bframe-qp", FALSE, TRUE}, + {NULL, NULL, INT, "ltr-buffer-size", FALSE, TRUE}, + {NULL, NULL, INT, "ltr-encoder-control", FALSE, TRUE}, +}; + +static void set_drop_probability (Main * self); +static void get_all_properties (void); +static void probe_all_properties (gboolean playing); + +/* Callbacks */ +void on_button_toggled (GtkToggleButton * button, gpointer user_data); +void on_get_button_clicked (GtkButton * button, gpointer user_data); +void on_set_button_clicked (GtkButton * button, gpointer user_data); +void on_button_ready_clicked (GtkButton * button, gpointer user_data); +void on_button_null_clicked (GtkButton * button, gpointer user_data); +void on_button_playing_clicked (GtkButton * button, gpointer user_data); +void on_iframe_button_clicked (GtkButton * button, gpointer user_data); +void on_renegotiate_button_clicked (GtkButton * button, gpointer user_data); +void on_start_capture_button_clicked (GtkButton * button, gpointer user_data); +void on_stop_capture_button_clicked (GtkButton * button, gpointer user_data); +void on_window_destroyed (GtkWindow * window, gpointer user_data); + +static GstEvent * +new_upstream_force_key_unit (GstClockTime running_time, + gboolean all_headers, guint count) +{ + GstEvent *force_key_unit_event; + GstStructure *s; + + s = gst_structure_new ("GstForceKeyUnit", + "running-time", GST_TYPE_CLOCK_TIME, running_time, + "all-headers", G_TYPE_BOOLEAN, all_headers, + "count", G_TYPE_UINT, count, NULL); + force_key_unit_event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, s); + + return force_key_unit_event; +} + +void +on_get_button_clicked (GtkButton * button, gpointer user_data) +{ + Prop *property = user_data; + + switch (property->type) { + case INT: + { + gchar *val; + gint val_int; + g_object_get (property->src, property->property_name, &val_int, NULL); + val = g_strdup_printf ("%d", val_int); + gtk_entry_set_text (GET_WIDGET (property, GTK_ENTRY, "value"), val); + g_free (val); + } + break; + case ENUM: + { + GParamSpec *param; + gint val; + + g_object_get (property->src, property->property_name, &val, NULL); + param = g_object_class_find_property (G_OBJECT_GET_CLASS (property->src), + property->property_name); + if (G_IS_PARAM_SPEC_ENUM (param)) { + GEnumValue *values; + guint i = 0; + + values = G_ENUM_CLASS (g_type_class_ref (param->value_type))->values; + + while (values[i].value_name) { + if (values[i].value == val) { + gtk_combo_box_set_active (GET_WIDGET (property, + (GtkComboBox *), "value"), i); + break; + } + i++; + } + } + } + break; + case BOOL: + { + gboolean val; + + g_object_get (property->src, property->property_name, &val, NULL); + gtk_toggle_button_set_active (GET_WIDGET (property, + (GtkToggleButton *), "value"), val); + } + break; + case NONE: + default: + break; + } +} + +void +on_set_button_clicked (GtkButton * button, gpointer user_data) +{ + Prop *property = user_data; + + switch (property->type) { + case INT: + { + int val_int; + const gchar *val; + + val = gtk_entry_get_text (GET_WIDGET (property, GTK_ENTRY, "value")); + val_int = (int) g_ascii_strtoll (val, NULL, 0); + g_object_set (property->src, property->property_name, val_int, NULL); + } + break; + case ENUM: + { + GParamSpec *param; + + param = g_object_class_find_property (G_OBJECT_GET_CLASS (property->src), + property->property_name); + if (G_IS_PARAM_SPEC_ENUM (param)) { + GEnumValue *values; + guint val = 0; + + values = G_ENUM_CLASS (g_type_class_ref (param->value_type))->values; + + val = gtk_combo_box_get_active (GET_WIDGET (property, + (GtkComboBox *), "value")); + g_object_set (property->src, property->property_name, + values[val].value, NULL); + } + } + break; + case BOOL: + { + gboolean val; + + val = gtk_toggle_button_get_active (GET_WIDGET (property, + (GtkToggleButton *), "value")); + g_object_set (property->src, property->property_name, val, NULL); + } + break; + case NONE: + default: + break; + } + get_all_properties (); +} + +void +on_button_toggled (GtkToggleButton * button, gpointer user_data) +{ + if (gtk_toggle_button_get_active (button)) + gtk_button_set_label (GTK_BUTTON (button), " Enabled "); + else + gtk_button_set_label (GTK_BUTTON (button), " Disabled "); +} + +static gboolean +set_caps (Main * self, gboolean send_event) +{ + const gchar *h264_filter; + const gchar *raw_filter; + GstCaps *h264_caps = NULL; + GstCaps *raw_caps = NULL; + gboolean ret = TRUE; + + h264_filter = gtk_entry_get_text (GET_WIDGET (self, GTK_ENTRY, "h264_caps")); + raw_filter = + gtk_entry_get_text (GET_WIDGET (self, GTK_ENTRY, "preview_caps")); + if (h264_filter) + h264_caps = gst_caps_from_string (h264_filter); + if (raw_filter) + raw_caps = gst_caps_from_string (raw_filter); + + g_debug ("H264 caps : %s", gst_caps_to_string (h264_caps)); + g_debug ("Preview caps : %s", gst_caps_to_string (raw_caps)); + if (!h264_caps || !raw_caps) { + g_debug ("Invalid caps"); + ret = FALSE; + goto end; + } + + g_object_set (self->vid_capsfilter, "caps", h264_caps, NULL); + g_object_set (self->vf_capsfilter, "caps", raw_caps, NULL); + + if (send_event) { + gst_element_send_event (GST_ELEMENT (self->src), + gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, + gst_structure_new ("renegotiate", NULL))); + } + +end: + if (h264_caps) + gst_caps_unref (h264_caps); + if (raw_caps) + gst_caps_unref (raw_caps); + + return ret; +} + +void +on_button_ready_clicked (GtkButton * button, gpointer user_data) +{ + Main *self = user_data; + + set_caps (self, FALSE); + gst_element_set_state (self->bin, GST_STATE_READY); + probe_all_properties (FALSE); + get_all_properties (); +} + +void +on_button_null_clicked (GtkButton * button, gpointer user_data) +{ + Main *self = user_data; + + gst_element_set_state (self->bin, GST_STATE_NULL); + probe_all_properties (FALSE); + get_all_properties (); +} + +void +on_button_playing_clicked (GtkButton * button, gpointer user_data) +{ + Main *self = user_data; + + if (gst_element_set_state (self->bin, GST_STATE_PLAYING) == + GST_STATE_CHANGE_FAILURE) { + g_debug ("Unable to go to state PLAYING"); + } + set_caps (self, FALSE); + probe_all_properties (TRUE); + get_all_properties (); + + set_drop_probability (self); +} + +void +on_iframe_button_clicked (GtkButton * button, gpointer user_data) +{ + Main *self = user_data; + GstEvent *event; + gboolean pps_sps; + + set_drop_probability (self); + pps_sps = gtk_toggle_button_get_active (GET_WIDGET (self, (GtkToggleButton *), + "pps_sps")); + + event = new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, pps_sps, 0); + gst_element_send_event (GST_ELEMENT (self->src), event); +} + +void +on_renegotiate_button_clicked (GtkButton * button, gpointer user_data) +{ + Main *self = user_data; + + set_caps (self, TRUE); + probe_all_properties (GST_STATE (self->bin) >= GST_STATE_PAUSED); + get_all_properties (); +} + +void +on_start_capture_button_clicked (GtkButton * button, gpointer user_data) +{ + Main *self = user_data; + + set_caps (self, FALSE); + g_signal_emit_by_name (G_OBJECT (self->src), "start-capture", NULL); + probe_all_properties (GST_STATE (self->bin) >= GST_STATE_PAUSED); + get_all_properties (); +} + +void +on_stop_capture_button_clicked (GtkButton * button, gpointer user_data) +{ + Main *self = user_data; + + set_caps (self, FALSE); + g_signal_emit_by_name (G_OBJECT (self->src), "stop-capture", NULL); + probe_all_properties (GST_STATE (self->bin) >= GST_STATE_PAUSED); + get_all_properties (); +} + +void +on_window_destroyed (GtkWindow * window, gpointer user_data) +{ + gtk_main_quit (); +} + +static gboolean +_bus_callback (GstBus * bus, GstMessage * message, gpointer user_data) +{ + const GstStructure *s = gst_message_get_structure (message); + GstObject *source = NULL; + + if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ELEMENT && + gst_structure_has_name (s, "prepare-xwindow-id")) { + source = GST_MESSAGE_SRC (message); + if (!g_strcmp0 (gst_object_get_name (source), "h264_sink")) + gst_x_overlay_set_window_handle (GST_X_OVERLAY (source), h264_xid); + else + gst_x_overlay_set_window_handle (GST_X_OVERLAY (source), preview_xid); + } + + return TRUE; +} + +static void +set_drop_probability (Main * self) +{ + const gchar *drop; + gdouble drop_probability = 0.0; + + drop = gtk_entry_get_text (GET_WIDGET (self, GTK_ENTRY, "drop")); + drop_probability = g_ascii_strtod (drop, NULL); + g_debug ("Setting drop probability to : %f", drop_probability); + g_object_set (self->identity, "drop-probability", drop_probability, NULL); +} + +static void +get_all_properties (void) +{ + int i; + + for (i = 0; i < G_N_ELEMENTS (properties); i++) + on_get_button_clicked (NULL, &properties[i]); + +} + +static void +probe_all_properties (gboolean playing) +{ + int i; + + for (i = 0; i < G_N_ELEMENTS (properties); i++) { + gboolean return_value, changeable, default_bool; + guint mask, minimum, maximum, default_int; + GParamSpec *param; + + /* When playing, ignore static controls */ + if (playing && !properties[i].dynamic) + continue; + + switch (properties[i].type) { + case INT: + g_signal_emit_by_name (G_OBJECT (properties[i].src), "get-int-setting", + properties[i].property_name, &minimum, &default_int, &maximum, + &return_value, NULL); + if (return_value) { + gchar *min, *def, *max; + + min = g_strdup_printf ("%d", minimum); + def = g_strdup_printf ("%d", default_int); + max = g_strdup_printf ("%d", maximum); + gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "minimum"), min); + gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "default"), def); + gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "maximum"), max); + g_free (min); + g_free (def); + g_free (max); + } else { + gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "minimum"), ""); + gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "default"), ""); + gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "maximum"), ""); + } + break; + case ENUM: + g_signal_emit_by_name (G_OBJECT (properties[i].src), "get-enum-setting", + properties[i].property_name, &mask, &default_int, &return_value, + NULL); + param = + g_object_class_find_property (G_OBJECT_GET_CLASS (properties + [i].src), properties[i].property_name); + if (G_IS_PARAM_SPEC_ENUM (param)) { + GEnumValue *values; + guint j = 0; + + values = G_ENUM_CLASS (g_type_class_ref (param->value_type))->values; + + if (return_value) { + while (values[j].value_name) { + if (values[j].value == default_int) { + gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "default"), + values[j].value_name); + break; + } + j++; + } + } else { + gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "default"), ""); + } + + j = 0; + while (values[j].value_name) { +#if !GTK_CHECK_VERSION (2, 24, 0) + gtk_combo_box_remove_text (GET_PROP_WIDGET ((GtkComboBox *), + "value"), 0); +#else + gtk_combo_box_text_remove (GET_PROP_WIDGET ((GtkComboBoxText *), + "value"), 0); +#endif + j++; + } + + j = 0; + while (values[j].value_name) { + gchar *val; + if (return_value && (mask & (1 << values[j].value)) != 0) + val = g_strdup_printf ("**%s**", values[j].value_name); + else + val = g_strdup (values[j].value_name); + +#if !GTK_CHECK_VERSION (2, 24, 0) + gtk_combo_box_append_text (GET_PROP_WIDGET ((GtkComboBox *), + "value"), val); +#else + gtk_combo_box_text_append_text (GET_PROP_WIDGET ((GtkComboBoxText + *), "value"), val); +#endif + g_free (val); + j++; + } + } + break; + case BOOL: + g_signal_emit_by_name (G_OBJECT (properties[i].src), + "get-boolean-setting", properties[i].property_name, + &changeable, &default_bool, &return_value, NULL); + if (return_value) { + gtk_widget_set_sensitive (GET_PROP_WIDGET (GTK_WIDGET, "value"), + changeable); + gtk_widget_set_sensitive (GET_PROP_WIDGET (GTK_WIDGET, "get"), + changeable); + gtk_widget_set_sensitive (GET_PROP_WIDGET (GTK_WIDGET, "set"), + changeable); + gtk_toggle_button_set_active (GET_PROP_WIDGET ((GtkToggleButton *), + "default"), default_bool); + } + break; + case NONE: + default: + break; + } + } +} + +int +main (int argc, char *argv[]) +{ + Main self = { NULL, NULL, NULL, NULL }; + GstBus *bus = NULL; + GtkWidget *window, *static_vbox, *dynamic_vbox, *da; + gchar *drop; + gdouble drop_probability; + GdkWindow *gdk_win = NULL; + const char *device = "/dev/video0"; + GError *error = NULL; + int i; + + gtk_init (&argc, &argv); + gst_init (&argc, &argv); + + if (argc > 1) + device = argv[1]; + else + g_print ("Usage : %s [device]\nUsing default device : %s\n", + argv[0], device); + + + self.bin = gst_parse_launch ("uvch264_src name=src src.vidsrc ! queue ! " + "capsfilter name=vid_cf ! identity name=identity ! ffdec_h264 ! " + "xvimagesink name=h264_sink async=false " + "src.vfsrc ! queue ! capsfilter name=vf_cf ! " + "xvimagesink name=preview_sink async=false", NULL); + + if (!self.bin) + return -1; + + /* Listen to the bus for messages */ + bus = gst_element_get_bus (self.bin); + gst_bus_add_watch (bus, _bus_callback, self.bin); + gst_object_unref (bus); + + self.src = gst_bin_get_by_name (GST_BIN (self.bin), "src"); + self.identity = gst_bin_get_by_name (GST_BIN (self.bin), "identity"); + self.vid_capsfilter = gst_bin_get_by_name (GST_BIN (self.bin), "vid_cf"); + self.vf_capsfilter = gst_bin_get_by_name (GST_BIN (self.bin), "vf_cf"); + + self.builder = gtk_builder_new (); + gtk_builder_add_from_file (self.builder, WINDOW_GLADE, &error); + if (error) { + g_debug ("Unable to load glade file : %s", error->message); + goto end; + } + gtk_builder_connect_signals (self.builder, &self); + + g_object_get (self.identity, "drop-probability", &drop_probability, NULL); + drop = g_strdup_printf ("%f", drop_probability); + gtk_entry_set_text (GET_WIDGET (&self, GTK_ENTRY, "drop"), drop); + g_free (drop); + window = GET_WIDGET (&self, GTK_WIDGET, "window"); + static_vbox = GET_WIDGET (&self, GTK_WIDGET, "static"); + dynamic_vbox = GET_WIDGET (&self, GTK_WIDGET, "dynamic"); + da = GET_WIDGET (&self, GTK_WIDGET, "h264"); + gtk_widget_realize (da); + gdk_win = gtk_widget_get_window (da); + h264_xid = GDK_WINDOW_XID (gdk_win); + da = GET_WIDGET (&self, GTK_WIDGET, "preview"); + gtk_widget_realize (da); + gdk_win = gtk_widget_get_window (da); + preview_xid = GDK_WINDOW_XID (gdk_win); + + set_caps (&self, FALSE); + + g_object_set (self.src, "device", device, NULL); + if (gst_element_set_state (self.bin, GST_STATE_READY) == + GST_STATE_CHANGE_FAILURE) { + g_debug ("Unable to go to state READY"); + goto end; + } + + for (i = 0; i < G_N_ELEMENTS (properties); i++) { + switch (properties[i].type) { + case INT: + properties[i].src = self.src; + properties[i].builder = gtk_builder_new (); + gtk_builder_add_from_file (properties[i].builder, INT_PROPERTY_GLADE, + NULL); + gtk_builder_connect_signals (properties[i].builder, &properties[i]); + gtk_box_pack_start (PROPERTY_TO_VBOX, + GET_PROP_WIDGET (GTK_WIDGET, "int-property"), TRUE, TRUE, 2); + gtk_label_set_label (GET_PROP_WIDGET (GTK_LABEL, "label"), + properties[i].property_name); + if (properties[i].readonly) + gtk_widget_set_sensitive (GET_PROP_WIDGET (GTK_WIDGET, "set"), FALSE); + break; + case ENUM: + properties[i].src = self.src; + properties[i].builder = gtk_builder_new (); +#if !GTK_CHECK_VERSION (2, 24, 0) + gtk_builder_add_from_file (properties[i].builder, + "enum_property_gtk2.glade", NULL); +#else + gtk_builder_add_from_file (properties[i].builder, ENUM_PROPERTY_GLADE, + NULL); +#endif + gtk_builder_connect_signals (properties[i].builder, &properties[i]); + gtk_box_pack_start (PROPERTY_TO_VBOX, + GET_PROP_WIDGET (GTK_WIDGET, "enum-property"), TRUE, TRUE, 2); + gtk_label_set_label (GET_PROP_WIDGET (GTK_LABEL, "label"), + properties[i].property_name); +#if !GTK_CHECK_VERSION (2, 24, 0) + { + GtkComboBox *combo_box; + GtkCellRenderer *cell; + GtkListStore *store; + + combo_box = GET_PROP_WIDGET ((GtkComboBox *), "value"); + store = gtk_list_store_new (1, G_TYPE_STRING); + gtk_combo_box_set_model (combo_box, GTK_TREE_MODEL (store)); + g_object_unref (store); + + cell = gtk_cell_renderer_text_new (); + gtk_cell_layout_pack_start (GTK_CELL_LAYOUT (combo_box), cell, TRUE); + gtk_cell_layout_set_attributes (GTK_CELL_LAYOUT (combo_box), cell, + "text", 0, NULL); + } +#endif + if (properties[i].readonly) + gtk_widget_set_sensitive (GET_PROP_WIDGET (GTK_WIDGET, "set"), FALSE); + break; + case BOOL: + properties[i].src = self.src; + properties[i].builder = gtk_builder_new (); + gtk_builder_add_from_file (properties[i].builder, BOOL_PROPERTY_GLADE, + NULL); + gtk_builder_connect_signals (properties[i].builder, &properties[i]); + gtk_box_pack_start (PROPERTY_TO_VBOX, + GET_PROP_WIDGET (GTK_WIDGET, "boolean-property"), TRUE, TRUE, 2); + gtk_label_set_label (GET_PROP_WIDGET (GTK_LABEL, "label"), + properties[i].property_name); + if (properties[i].readonly) + gtk_widget_set_sensitive (GET_PROP_WIDGET (GTK_WIDGET, "set"), FALSE); + break; + case NONE: + default: + break; + } + } + probe_all_properties (FALSE); + get_all_properties (); + + gtk_widget_show (window); + gtk_main (); + +end: + g_object_unref (G_OBJECT (self.builder)); + for (i = 0; i < G_N_ELEMENTS (properties); i++) { + if (properties[i].builder) + g_object_unref (G_OBJECT (properties[i].builder)); + } + gst_element_set_state (self.bin, GST_STATE_NULL); + gst_object_unref (self.src); + gst_object_unref (self.identity); + gst_object_unref (self.vid_capsfilter); + gst_object_unref (self.vf_capsfilter); + gst_object_unref (self.bin); + + return 0; +} diff --git a/tests/examples/uvch264/window.glade b/tests/examples/uvch264/window.glade new file mode 100644 index 000000000..8b7624a9f --- /dev/null +++ b/tests/examples/uvch264/window.glade @@ -0,0 +1,345 @@ +<?xml version="1.0" encoding="UTF-8"?> +<interface> + <!-- interface-requires gtk+ 3.0 --> + <object class="GtkWindow" id="window"> + <property name="can_focus">False</property> + <property name="title" translatable="yes">Test for uvch264_src</property> + <signal name="destroy" handler="on_window_destroyed" swapped="no"/> + <child> + <object class="GtkHBox" id="hbox1"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <child> + <object class="GtkVBox" id="vbox"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="orientation">vertical</property> + <child> + <object class="GtkHBox" id="box26"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="homogeneous">True</property> + <child> + <object class="GtkButton" id="button2"> + <property name="label" translatable="yes">State NULL</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_button_null_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">0</property> + </packing> + </child> + <child> + <object class="GtkButton" id="button51"> + <property name="label" translatable="yes">State READY</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_button_ready_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">1</property> + </packing> + </child> + <child> + <object class="GtkButton" id="button52"> + <property name="label" translatable="yes">State PLAYING</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_button_playing_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">2</property> + </packing> + </child> + <child> + <object class="GtkButton" id="button3"> + <property name="label" translatable="yes">Start capture</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_start_capture_button_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">3</property> + </packing> + </child> + <child> + <object class="GtkButton" id="button4"> + <property name="label" translatable="yes">Stop capture</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_stop_capture_button_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">4</property> + </packing> + </child> + <child> + <object class="GtkButton" id="button5"> + <property name="label" translatable="yes">Renegotiate</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_renegotiate_button_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">5</property> + </packing> + </child> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">0</property> + </packing> + </child> + <child> + <object class="GtkLabel" id="label3"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="label" translatable="yes">Static controls</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">1</property> + </packing> + </child> + <child> + <object class="GtkVBox" id="static"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="orientation">vertical</property> + <child> + <placeholder/> + </child> + </object> + <packing> + <property name="expand">True</property> + <property name="fill">True</property> + <property name="position">2</property> + </packing> + </child> + <child> + <object class="GtkLabel" id="label4"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="label" translatable="yes">Dynamic controls</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">3</property> + </packing> + </child> + <child> + <object class="GtkVBox" id="dynamic"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="orientation">vertical</property> + <child> + <placeholder/> + </child> + </object> + <packing> + <property name="expand">True</property> + <property name="fill">True</property> + <property name="position">4</property> + </packing> + </child> + <child> + <object class="GtkHBox" id="box1"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <child> + <object class="GtkLabel" id="label5"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="label" translatable="yes">Drop probability % (between 0.0 and 1.0)</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">0</property> + </packing> + </child> + <child> + <object class="GtkEntry" id="drop"> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="invisible_char">●</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">1</property> + </packing> + </child> + <child> + <object class="GtkCheckButton" id="pps_sps"> + <property name="label" translatable="yes">With SPS/PPS</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">False</property> + <property name="use_action_appearance">False</property> + <property name="xalign">0</property> + <property name="draw_indicator">True</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">2</property> + </packing> + </child> + <child> + <object class="GtkButton" id="button1"> + <property name="label" translatable="yes">Request keyframe</property> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="receives_default">True</property> + <property name="use_action_appearance">False</property> + <signal name="clicked" handler="on_iframe_button_clicked" swapped="no"/> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">3</property> + </packing> + </child> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">5</property> + </packing> + </child> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">0</property> + </packing> + </child> + <child> + <object class="GtkVBox" id="box2"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="orientation">vertical</property> + <child> + <object class="GtkLabel" id="label1"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="label" translatable="yes">H264</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">0</property> + </packing> + </child> + <child> + <object class="GtkEntry" id="h264_caps"> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="invisible_char">●</property> + <property name="text" translatable="yes">video/x-h264,width=640,height=480,profile=constrained-baseline,stream-format=bytestream,framerate=15/1</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">1</property> + </packing> + </child> + <child> + <object class="GtkDrawingArea" id="h264"> + <property name="width_request">320</property> + <property name="height_request">240</property> + <property name="visible">True</property> + <property name="app_paintable">True</property> + <property name="can_focus">False</property> + <property name="double_buffered">False</property> + <property name="halign">center</property> + <property name="valign">center</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">False</property> + <property name="position">2</property> + </packing> + </child> + <child> + <object class="GtkLabel" id="label2"> + <property name="visible">True</property> + <property name="can_focus">False</property> + <property name="label" translatable="yes">Preview</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">3</property> + </packing> + </child> + <child> + <object class="GtkEntry" id="preview_caps"> + <property name="visible">True</property> + <property name="can_focus">True</property> + <property name="invisible_char">●</property> + <property name="text" translatable="yes">video/x-raw-yuv,width=320,height=240,format=(fourcc)YUY2,framerate=15/1</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">True</property> + <property name="position">4</property> + </packing> + </child> + <child> + <object class="GtkDrawingArea" id="preview"> + <property name="width_request">320</property> + <property name="height_request">240</property> + <property name="visible">True</property> + <property name="app_paintable">True</property> + <property name="can_focus">False</property> + <property name="double_buffered">False</property> + <property name="halign">center</property> + <property name="valign">center</property> + </object> + <packing> + <property name="expand">False</property> + <property name="fill">False</property> + <property name="position">5</property> + </packing> + </child> + </object> + <packing> + <property name="expand">True</property> + <property name="fill">True</property> + <property name="position">1</property> + </packing> + </child> + </object> + </child> + </object> +</interface> |