summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEdward Hervey <edward.hervey@collabora.co.uk>2011-12-30 11:41:17 +0100
committerEdward Hervey <edward.hervey@collabora.co.uk>2011-12-30 11:41:17 +0100
commitf70a623418a52eada0278002f8d266d49957b3d5 (patch)
treefc54fd9fab400e886b11a9a2b313478b66dd7969
parentd5aaefa59f9ef4153644a2aa254c39a3a9d108e3 (diff)
parentd465188879cd4dd0735e3fbcaeb83d98f217ed88 (diff)
downloadgstreamer-plugins-bad-f70a623418a52eada0278002f8d266d49957b3d5.tar.gz
Merge remote-tracking branch 'origin/master' into 0.11-premerge
Conflicts: docs/libs/Makefile.am ext/kate/gstkatetiger.c ext/opus/gstopusdec.c ext/xvid/gstxvidenc.c gst-libs/gst/basecamerabinsrc/Makefile.am gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.c gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.h gst-libs/gst/video/gstbasevideocodec.c gst-libs/gst/video/gstbasevideocodec.h gst-libs/gst/video/gstbasevideodecoder.c gst-libs/gst/video/gstbasevideoencoder.c gst/asfmux/gstasfmux.c gst/audiovisualizers/gstwavescope.c gst/camerabin2/gstcamerabin2.c gst/debugutils/gstcompare.c gst/frei0r/gstfrei0rmixer.c gst/mpegpsmux/mpegpsmux.c gst/mpegtsmux/mpegtsmux.c gst/mxf/mxfmux.c gst/videomeasure/gstvideomeasure_ssim.c gst/videoparsers/gsth264parse.c gst/videoparsers/gstmpeg4videoparse.c
-rw-r--r--Makefile.am2
-rw-r--r--configure.ac9
-rw-r--r--docs/libs/Makefile.am7
-rw-r--r--docs/libs/gst-plugins-bad-libs-docs.sgml1
-rw-r--r--docs/libs/gst-plugins-bad-libs-sections.txt35
-rw-r--r--ext/Makefile.am4
-rw-r--r--ext/dts/Makefile.am7
-rw-r--r--ext/dts/gstdtsdec.c9
-rw-r--r--ext/faac/gstfaac.c86
-rw-r--r--ext/faac/gstfaac.h4
-rw-r--r--ext/kate/gstkatetiger.c486
-rw-r--r--ext/kate/gstkatetiger.h4
-rw-r--r--ext/opencv/gstmotioncells.c51
-rw-r--r--ext/opencv/gstmotioncells.h1
-rw-r--r--ext/opus/Makefile.am5
-rw-r--r--ext/opus/gstopus.c11
-rw-r--r--ext/opus/gstopuscommon.c18
-rw-r--r--ext/opus/gstopuscommon.h3
-rw-r--r--ext/opus/gstopusdec.c77
-rw-r--r--ext/opus/gstopusenc.c279
-rw-r--r--ext/opus/gstopusenc.h4
-rw-r--r--ext/opus/gstopusheader.c46
-rw-r--r--ext/opus/gstopusheader.h2
-rw-r--r--ext/opus/gstopusparse.c2
-rw-r--r--ext/opus/gstrtpopusdepay.c120
-rw-r--r--ext/opus/gstrtpopusdepay.h57
-rw-r--r--ext/opus/gstrtpopuspay.c137
-rw-r--r--ext/opus/gstrtpopuspay.h58
-rw-r--r--ext/resindvd/rsndec.c3
-rw-r--r--ext/voaacenc/gstvoaacenc.c10
-rw-r--r--ext/voamrwbenc/gstvoamrwbenc.c5
-rw-r--r--ext/xvid/gstxvidenc.c192
-rw-r--r--ext/xvid/gstxvidenc.h1
-rw-r--r--gst-libs/gst/basecamerabinsrc/Makefile.am3
-rw-r--r--gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.c51
-rw-r--r--gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.h6
-rw-r--r--gst-libs/gst/codecparsers/Makefile.am7
-rw-r--r--gst-libs/gst/codecparsers/gsth264parser.c151
-rw-r--r--gst-libs/gst/codecparsers/gsth264parser.h50
-rw-r--r--gst-libs/gst/codecparsers/gstmpeg4parser.c1735
-rw-r--r--gst-libs/gst/codecparsers/gstmpeg4parser.h578
-rw-r--r--gst-libs/gst/codecparsers/gstmpegvideoparser.c32
-rw-r--r--gst-libs/gst/codecparsers/gstvc1parser.c102
-rw-r--r--gst-libs/gst/codecparsers/parserutils.c57
-rw-r--r--gst-libs/gst/codecparsers/parserutils.h108
-rw-r--r--gst-libs/gst/video/gstbasevideocodec.c10
-rw-r--r--gst-libs/gst/video/gstbasevideocodec.h1
-rw-r--r--gst-libs/gst/video/gstbasevideoencoder.c257
-rw-r--r--gst-libs/gst/video/gstbasevideoencoder.h17
-rw-r--r--gst/asfmux/gstasfmux.c36
-rw-r--r--gst/asfmux/gstasfmux.h6
-rw-r--r--gst/asfmux/gstasfobjects.h4
-rw-r--r--gst/audiovisualizers/README36
-rw-r--r--gst/audiovisualizers/gstdrawhelpers.h58
-rw-r--r--gst/audiovisualizers/gstspacescope.c210
-rw-r--r--gst/audiovisualizers/gstspacescope.h6
-rw-r--r--gst/audiovisualizers/gstwavescope.c179
-rw-r--r--gst/audiovisualizers/gstwavescope.h3
-rw-r--r--gst/autoconvert/gstautoconvert.c91
-rw-r--r--gst/autoconvert/gstautoconvert.h2
-rw-r--r--gst/camerabin2/camerabingeneral.c29
-rw-r--r--gst/camerabin2/camerabingeneral.h2
-rw-r--r--gst/camerabin2/gstcamerabin2.c78
-rw-r--r--gst/camerabin2/gstcamerabin2.h1
-rw-r--r--gst/camerabin2/gstviewfinderbin.c24
-rw-r--r--gst/camerabin2/gstwrappercamerabinsrc.c20
-rw-r--r--gst/debugutils/gstcompare.c28
-rw-r--r--gst/debugutils/gstcompare.h2
-rw-r--r--gst/frei0r/gstfrei0rmixer.c42
-rw-r--r--gst/frei0r/gstfrei0rmixer.h4
-rw-r--r--gst/liveadder/liveadder.c5
-rw-r--r--gst/mpeg4videoparse/Makefile.am24
-rw-r--r--gst/mpeg4videoparse/mpeg4parse.c294
-rw-r--r--gst/mpeg4videoparse/mpeg4parse.h63
-rw-r--r--gst/mpegdemux/gstmpegdemux.c2
-rw-r--r--gst/mpegdemux/gstmpegtsdemux.c19
-rw-r--r--gst/mpegdemux/mpegtsparse.c8
-rw-r--r--gst/mpegpsmux/mpegpsmux.c144
-rw-r--r--gst/mpegpsmux/mpegpsmux.h11
-rw-r--r--gst/mpegpsmux/psmux.c81
-rw-r--r--gst/mpegpsmux/psmux.h6
-rw-r--r--gst/mpegpsmux/psmuxstream.c3
-rw-r--r--gst/mpegpsmux/psmuxstream.h5
-rw-r--r--gst/mpegtsmux/Makefile.am5
-rw-r--r--gst/mpegtsmux/mpegtsmux.c279
-rw-r--r--gst/mpegtsmux/mpegtsmux.h11
-rw-r--r--gst/mxf/mxfmux.c73
-rw-r--r--gst/mxf/mxfmux.h6
-rw-r--r--gst/videomeasure/gstvideomeasure_ssim.c43
-rw-r--r--gst/videomeasure/gstvideomeasure_ssim.h4
-rw-r--r--gst/videoparsers/Makefile.am9
-rw-r--r--gst/videoparsers/gsth264parse.c474
-rw-r--r--gst/videoparsers/gsth264parse.h8
-rw-r--r--gst/videoparsers/gstmpeg4videoparse.c (renamed from gst/mpeg4videoparse/mpeg4videoparse.c)533
-rw-r--r--gst/videoparsers/gstmpeg4videoparse.h (renamed from gst/mpeg4videoparse/mpeg4videoparse.h)43
-rw-r--r--gst/videoparsers/plugin.c3
-rw-r--r--pkgconfig/gstreamer-plugins-bad.pc.in4
-rw-r--r--po/LINGUAS2
-rw-r--r--po/eo.po71
-rw-r--r--po/es.po369
-rw-r--r--po/gl.po14
-rw-r--r--po/ky.po2
-rw-r--r--po/lv.po12
-rw-r--r--po/sr.po93
-rw-r--r--po/sv.po441
-rw-r--r--tests/check/Makefile.am8
-rw-r--r--tests/check/elements/.gitignore3
-rw-r--r--tests/check/elements/camerabin2.c13
-rw-r--r--tests/check/elements/mpegtsmux.c323
-rw-r--r--tests/check/libs/.gitignore3
-rw-r--r--tests/check/pipelines/colorspace.c9
111 files changed, 6842 insertions, 2443 deletions
diff --git a/Makefile.am b/Makefile.am
index 55670a3dd..153fef57f 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -52,6 +52,7 @@ CRUFT_FILES = \
$(top_builddir)/gst/audioparsers/.libs/*.{so,dll,DLL,dylib} \
$(top_builddir)/gst/flacparse/.libs/*.{so,dll,DLL,dylib} \
$(top_builddir)/gst/imagefreeze/.libs/*.{so,dll,DLL,dylib} \
+ $(top_builddir)/gst/mpeg4videoparse/.libs/*.{so,dll,DLL,dylib} \
$(top_builddir)/gst/qtmux/.libs/*.{so,dll,DLL,dylib} \
$(top_builddir)/gst/selector/.libs/*.{so,dll,DLL,dylib} \
$(top_builddir)/gst/shapewipe/.libs/*.{so,dll,DLL,dylib} \
@@ -79,6 +80,7 @@ CRUFT_DIRS = \
$(top_srcdir)/gst/flacparse \
$(top_srcdir)/gst/imagefreeze \
$(top_srcdir)/gst/invtelecine \
+ $(top_srcdir)/gst/mpeg4videoparse \
$(top_srcdir)/gst/qtmux \
$(top_srcdir)/gst/selector \
$(top_srcdir)/gst/shapewipe \
diff --git a/configure.ac b/configure.ac
index 687edd1f8..4d179209c 100644
--- a/configure.ac
+++ b/configure.ac
@@ -308,7 +308,7 @@ GST_PLUGINS_NONPORTED=" adpcmdec adpcmenc aiff asfmux \
decklink fbdev linsys shm vcd \
voaacenc apexsink bz2 cdaudio celt cog curl dc1394 dirac directfb dts resindvd \
gsettings gsm jp2k ladspa modplug mpeg2enc mplex mimic \
- musepack musicbrainz nas neon ofa openal rsvg schro sdl smooth sndfile soundtouch spandsp timidity \
+ musepack musicbrainz nas neon ofa openal opencv rsvg schro sdl smooth sndfile soundtouch spandsp timidity \
wildmidi xvid apple_media "
AC_SUBST(GST_PLUGINS_NONPORTED)
@@ -354,7 +354,6 @@ AG_GST_CHECK_PLUGIN(mpegdemux)
AG_GST_CHECK_PLUGIN(mpegtsdemux)
AG_GST_CHECK_PLUGIN(mpegtsmux)
AG_GST_CHECK_PLUGIN(mpegpsmux)
-AG_GST_CHECK_PLUGIN(mpeg4videoparse)
AG_GST_CHECK_PLUGIN(mpegvideoparse)
AG_GST_CHECK_PLUGIN(mve)
AG_GST_CHECK_PLUGIN(mxf)
@@ -1382,7 +1381,7 @@ AG_GST_CHECK_FEATURE(OPENCV, [opencv plugins], opencv, [
dnl a new version and the no-backward-compatibility define. (There doesn't
dnl seem to be a switch to suppress the warnings the cvcompat.h header
dnl causes.)
- PKG_CHECK_MODULES(OPENCV, opencv >= 2.0.0 opencv <= 2.2.0 , [
+ PKG_CHECK_MODULES(OPENCV, opencv >= 2.0.0 opencv <= 2.3.1 , [
AC_PROG_CXX
AC_LANG_CPLUSPLUS
OLD_CPPFLAGS=$CPPFLAGS
@@ -1612,6 +1611,9 @@ AG_GST_CHECK_FEATURE(XVID, [xvid plugins], xvid, [
#if XVID_API_MAJOR(XVID_API) != 4
#error "Incompatible XviD API version"
#endif
+ #if XVID_API_MAJOR(XVID_API) == 4 && XVID_API_MINOR(XVID_API) < 3
+ #error "Incompatible XviD API version"
+ #endif
],[ AC_MSG_RESULT(yes)
XVID_LIBS="-lxvidcore $LIBM"
AC_SUBST(XVID_LIBS)
@@ -1965,7 +1967,6 @@ gst/mpegtsdemux/Makefile
gst/mpegtsmux/Makefile
gst/mpegtsmux/tsmux/Makefile
gst/mpegpsmux/Makefile
-gst/mpeg4videoparse/Makefile
gst/mpegvideoparse/Makefile
gst/mve/Makefile
gst/mxf/Makefile
diff --git a/docs/libs/Makefile.am b/docs/libs/Makefile.am
index cd3438778..016b843c8 100644
--- a/docs/libs/Makefile.am
+++ b/docs/libs/Makefile.am
@@ -49,22 +49,21 @@ extra_files =
# CFLAGS and LDFLAGS for compiling scan program. Only needed if your app/lib
# contains GtkObjects/GObjects and you want to document signals and properties.
-GTKDOC_CFLAGS = -DGST_USE_UNSTABLE_API $(GST_PLUGINS_BAD_CFLAGS) $(GST_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS)
+GTKDOC_CFLAGS = -DGST_USE_UNSTABLE_API $(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS)
GTKDOC_LIBS = \
$(top_builddir)/gst-libs/gst/codecparsers/libgstcodecparsers-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/basecamerabinsrc/libgstbasecamerabinsrc-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/interfaces/libgstphotography-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/signalprocessor/libgstsignalprocessor-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/video/libgstbasevideo-@GST_MAJORMINOR@.la \
- $(GST_BASE_LIBS) $(GST_BAD_LIBS)
+ $(GST_BASE_LIBS)
GTKDOC_CC=$(LIBTOOL) --tag=CC --mode=compile $(CC)
GTKDOC_LD=$(LIBTOOL) --tag=CC --mode=link $(CC)
# If you need to override some of the declarations, place them in this file
# and uncomment this line.
-#DOC_OVERRIDES = $(DOC_MODULE)-overrides.txt
-DOC_OVERRIDES =
+DOC_OVERRIDES = $(DOC_MODULE)-overrides.txt
include $(top_srcdir)/common/gtk-doc.mak
diff --git a/docs/libs/gst-plugins-bad-libs-docs.sgml b/docs/libs/gst-plugins-bad-libs-docs.sgml
index 0c9994160..cf6885b44 100644
--- a/docs/libs/gst-plugins-bad-libs-docs.sgml
+++ b/docs/libs/gst-plugins-bad-libs-docs.sgml
@@ -30,6 +30,7 @@
</para>
<xi:include href="xml/gsth264parser.xml" />
<xi:include href="xml/gstmpegvideoparser.xml" />
+ <xi:include href="xml/gstmpeg4parser.xml" />
<xi:include href="xml/gstvc1parser.xml" />
</chapter>
diff --git a/docs/libs/gst-plugins-bad-libs-sections.txt b/docs/libs/gst-plugins-bad-libs-sections.txt
index 92e620ac5..ca341d3e6 100644
--- a/docs/libs/gst-plugins-bad-libs-sections.txt
+++ b/docs/libs/gst-plugins-bad-libs-sections.txt
@@ -118,6 +118,41 @@ gst_mpeg_video_parse_quant_matrix_extension
</SECTION>
<SECTION>
+<FILE>gstmpeg4parser</FILE>
+<TITLE>mpeg4parser</TITLE>
+<INCLUDE>gst/codecparsers/gstmpeg4parser.h</INCLUDE>
+GstMpeg4StartCode
+GstMpeg4VisualObjectType
+GstMpeg4AspectRatioInfo
+GstMpeg4ParseResult
+GstMpeg4VideoObjectCodingType
+GstMpeg4ChromaFormat
+GstMpeg4VideoObjectLayerShape
+GstMpeg4SpriteEnable
+GstMpeg4Profile
+GstMpeg4Level
+GstMpeg4VisualObjectSequence
+GstMpeg4VisualObject
+GstMpeg4VideoSignalType
+GstMpeg4VideoPlaneShortHdr
+GstMpeg4VideoObjectLayer
+GstMpeg4SpriteTrajectory
+GstMpeg4GroupOfVOP
+GstMpeg4VideoObjectPlane
+GstMpeg4Packet
+GstMpeg4VideoPacketHdr
+gst_mpeg4_parse
+gst_mpeg4_parse_video_object_plane
+gst_mpeg4_parse_group_of_vop
+gst_mpeg4_parse_video_object_layer
+gst_mpeg4_parse_visual_object
+gst_mpeg4_parse_visual_object_sequence
+gst_mpeg4_parse_video_packet_header
+<SUBSECTION Standard>
+<SUBSECTION Private>
+</SECTION>
+
+<SECTION>
<FILE>gstphotography</FILE>
GST_PHOTOGRAPHY_AUTOFOCUS_DONE
GST_PHOTOGRAPHY_SHAKE_RISK
diff --git a/ext/Makefile.am b/ext/Makefile.am
index 95ba3da75..dc62386ca 100644
--- a/ext/Makefile.am
+++ b/ext/Makefile.am
@@ -349,9 +349,9 @@ SWFDEC_DIR=
endif
if USE_TELETEXTDEC
-TELETEXT_DIR=teletextdec
+TELETEXTDEC_DIR=teletextdec
else
-TELETEXT_DIR=
+TELETEXTDEC_DIR=
endif
if USE_VP8
diff --git a/ext/dts/Makefile.am b/ext/dts/Makefile.am
index f58f14972..97a838531 100644
--- a/ext/dts/Makefile.am
+++ b/ext/dts/Makefile.am
@@ -2,9 +2,10 @@ plugin_LTLIBRARIES = libgstdtsdec.la
libgstdtsdec_la_SOURCES = gstdtsdec.c
libgstdtsdec_la_CFLAGS = -DGST_USE_UNSTABLE_API \
- $(GST_CFLAGS) $(ORC_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS)
-libgstdtsdec_la_LIBADD = $(DTS_LIBS) $(ORC_LIBS) $(GST_PLUGINS_BASE_LIBS) \
- -lgstaudio-@GST_MAJORMINOR@
+ $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(ORC_CFLAGS)
+libgstdtsdec_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) \
+ -lgstaudio-@GST_MAJORMINOR@ \
+ $(DTS_LIBS) $(ORC_LIBS)
libgstdtsdec_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstdtsdec_la_LIBTOOLFLAGS = --tag=disable-static
diff --git a/ext/dts/gstdtsdec.c b/ext/dts/gstdtsdec.c
index 2a762e903..f71219478 100644
--- a/ext/dts/gstdtsdec.c
+++ b/ext/dts/gstdtsdec.c
@@ -494,11 +494,15 @@ gst_dtsdec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buffer)
guint8 *data;
gint size, chans;
gint length = 0, flags, sample_rate, bit_rate, frame_length;
- GstFlowReturn result = GST_FLOW_UNEXPECTED;
+ GstFlowReturn result = GST_FLOW_OK;
GstBuffer *outbuf;
dts = GST_DTSDEC (bdec);
+ /* no fancy draining */
+ if (G_UNLIKELY (!buffer))
+ return GST_FLOW_OK;
+
/* parsed stuff already, so this should work out fine */
data = GST_BUFFER_DATA (buffer);
size = GST_BUFFER_SIZE (buffer);
@@ -599,7 +603,8 @@ gst_dtsdec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buffer)
/* negotiate if required */
if (need_renegotiation) {
- GST_DEBUG ("dtsdec: sample_rate:%d stream_chans:0x%x using_chans:0x%x",
+ GST_DEBUG_OBJECT (dts,
+ "dtsdec: sample_rate:%d stream_chans:0x%x using_chans:0x%x",
dts->sample_rate, dts->stream_channels, dts->using_channels);
if (!gst_dtsdec_renegotiate (dts))
goto failed_negotiation;
diff --git a/ext/faac/gstfaac.c b/ext/faac/gstfaac.c
index ddfae6d52..c4b34f544 100644
--- a/ext/faac/gstfaac.c
+++ b/ext/faac/gstfaac.c
@@ -369,9 +369,8 @@ static gboolean
gst_faac_set_format (GstAudioEncoder * enc, GstAudioInfo * info)
{
GstFaac *faac = GST_FAAC (enc);
- faacEncHandle *handle;
gint channels, samplerate, width;
- gulong samples, bytes, fmt = 0, bps = 0;
+ gulong fmt = 0, bps = 0;
gboolean result = FALSE;
/* base class takes care */
@@ -398,41 +397,24 @@ gst_faac_set_format (GstAudioEncoder * enc, GstAudioInfo * info)
bps = 4;
}
- /* clean up in case of re-configure */
- gst_faac_close_encoder (faac);
-
- if (!(handle = faacEncOpen (samplerate, channels, &samples, &bytes)))
- goto setup_failed;
-
- /* mind channel count */
- samples /= channels;
-
/* ok, record and set up */
faac->format = fmt;
faac->bps = bps;
- faac->handle = handle;
- faac->bytes = bytes;
- faac->samples = samples;
faac->channels = channels;
faac->samplerate = samplerate;
/* finish up */
result = gst_faac_configure_source_pad (faac);
+ if (!result)
+ goto done;
/* report needs to base class */
- gst_audio_encoder_set_frame_samples_min (enc, samples);
- gst_audio_encoder_set_frame_samples_max (enc, samples);
+ gst_audio_encoder_set_frame_samples_min (enc, faac->samples);
+ gst_audio_encoder_set_frame_samples_max (enc, faac->samples);
gst_audio_encoder_set_frame_max (enc, 1);
done:
return result;
-
- /* ERRORS */
-setup_failed:
- {
- GST_ELEMENT_ERROR (faac, LIBRARY, SETTINGS, (NULL), (NULL));
- goto done;
- }
}
/* check downstream caps to configure format */
@@ -494,15 +476,32 @@ gst_faac_negotiate (GstFaac * faac)
}
static gboolean
-gst_faac_configure_source_pad (GstFaac * faac)
+gst_faac_open_encoder (GstFaac * faac)
{
- GstCaps *srccaps;
- gboolean ret = FALSE;
+ faacEncHandle *handle;
faacEncConfiguration *conf;
guint maxbitrate;
+ gulong samples, bytes;
- /* negotiate stream format */
- gst_faac_negotiate (faac);
+ g_return_val_if_fail (faac->samplerate != 0 && faac->channels != 0, FALSE);
+
+ /* clean up in case of re-configure */
+ gst_faac_close_encoder (faac);
+
+ if (!(handle = faacEncOpen (faac->samplerate, faac->channels,
+ &samples, &bytes)))
+ goto setup_failed;
+
+ /* mind channel count */
+ samples /= faac->channels;
+
+ /* record */
+ faac->handle = handle;
+ faac->samples = samples;
+ faac->bytes = bytes;
+
+ GST_DEBUG_OBJECT (faac, "faac needs samples %d, output size %d",
+ faac->samples, faac->bytes);
/* we negotiated caps update current configuration */
conf = faacEncGetCurrentConfiguration (faac->handle);
@@ -539,7 +538,7 @@ gst_faac_configure_source_pad (GstFaac * faac)
conf->bandWidth = 0;
if (!faacEncSetConfiguration (faac->handle, conf))
- goto set_failed;
+ goto setup_failed;
/* let's see what really happened,
* note that this may not really match desired rate */
@@ -548,6 +547,28 @@ gst_faac_configure_source_pad (GstFaac * faac)
GST_DEBUG_OBJECT (faac, "quantization quality: %ld", conf->quantqual);
GST_DEBUG_OBJECT (faac, "bandwidth: %d Hz", conf->bandWidth);
+ return TRUE;
+
+ /* ERRORS */
+setup_failed:
+ {
+ GST_ELEMENT_ERROR (faac, LIBRARY, SETTINGS, (NULL), (NULL));
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_faac_configure_source_pad (GstFaac * faac)
+{
+ GstCaps *srccaps;
+ gboolean ret;
+
+ /* negotiate stream format */
+ gst_faac_negotiate (faac);
+
+ if (!gst_faac_open_encoder (faac))
+ goto set_failed;
+
/* now create a caps for it all */
srccaps = gst_caps_new_simple ("audio/mpeg",
"mpegversion", G_TYPE_INT, faac->mpegversion,
@@ -665,6 +686,13 @@ gst_faac_handle_frame (GstAudioEncoder * enc, GstBuffer * in_buf)
} else {
gst_buffer_unmap (out_buf, out_data, 0);
gst_buffer_unref (out_buf);
+ /* re-create encoder after final flush */
+ if (!in_buf) {
+ GST_DEBUG_OBJECT (faac, "flushed; recreating encoder");
+ gst_faac_close_encoder (faac);
+ if (!gst_faac_open_encoder (faac))
+ ret = GST_FLOW_ERROR;
+ }
}
return ret;
diff --git a/ext/faac/gstfaac.h b/ext/faac/gstfaac.h
index 5bd057493..029e4fd05 100644
--- a/ext/faac/gstfaac.h
+++ b/ext/faac/gstfaac.h
@@ -51,9 +51,9 @@ struct _GstFaac {
bps;
/* input frame size */
- gulong samples;
+ gint samples;
/* required output buffer size */
- gulong bytes;
+ gint bytes;
/* negotiated */
gint mpegversion, outputformat;
diff --git a/ext/kate/gstkatetiger.c b/ext/kate/gstkatetiger.c
index f618b2e10..1504e112b 100644
--- a/ext/kate/gstkatetiger.c
+++ b/ext/kate/gstkatetiger.c
@@ -3,7 +3,6 @@
* Copyright 2005 Thomas Vander Stichele <thomas@apestaart.org>
* Copyright 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
* Copyright 2008 Vincent Penquerc'h <ogg.k.ogg.k@googlemail.com>
- * Copyright (C) <2009> Young-Ho Cha <ganadist@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
@@ -126,39 +125,6 @@ enum
ARG_SILENT
};
-/* RGB -> YUV blitting routines taken from textoverlay,
- original code from Young-Ho Cha <ganadist@gmail.com> */
-
-#define COMP_Y(ret, r, g, b) \
-{ \
- ret = (int) (((19595 * r) >> 16) + ((38470 * g) >> 16) + ((7471 * b) >> 16)); \
- ret = CLAMP (ret, 0, 255); \
-}
-
-#define COMP_U(ret, r, g, b) \
-{ \
- ret = (int) (-((11059 * r) >> 16) - ((21709 * g) >> 16) + ((32768 * b) >> 16) + 128); \
- ret = CLAMP (ret, 0, 255); \
-}
-
-#define COMP_V(ret, r, g, b) \
-{ \
- ret = (int) (((32768 * r) >> 16) - ((27439 * g) >> 16) - ((5329 * b) >> 16) + 128); \
- ret = CLAMP (ret, 0, 255); \
-}
-
-#define BLEND(ret, alpha, v0, v1) \
-{ \
- ret = (v0 * alpha + v1 * (255 - alpha)) / 255; \
-}
-
-#define OVER(ret, alphaA, Ca, alphaB, Cb, alphaNew) \
-{ \
- gint _tmp; \
- _tmp = (Ca * alphaA + Cb * alphaB * (255 - alphaA) / 255) / alphaNew; \
- ret = CLAMP (_tmp, 0, 255); \
-}
-
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
# define TIGER_ARGB_A 3
# define TIGER_ARGB_R 2
@@ -187,11 +153,16 @@ static GstStaticPadTemplate kate_sink_factory =
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
#define TIGER_VIDEO_CAPS \
GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_BGRx ";" \
- GST_VIDEO_CAPS_YUV ("{AYUV, I420, YV12, UYVY, NV12, NV21}")
+ GST_VIDEO_CAPS_YUV ("{I420, YV12, AYUV, YUY2, UYVY, v308, v210," \
+ " v216, Y41B, Y42B, Y444, Y800, Y16, NV12, NV21, UYVP, A420," \
+ " YUV9, IYU1}")
+
#else
#define TIGER_VIDEO_CAPS \
GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xRGB ";" \
- GST_VIDEO_CAPS_YUV ("{AYUV, I420, YV12, UYVY, NV12, NV21}")
+ GST_VIDEO_CAPS_YUV ("{I420, YV12, AYUV, YUY2, UYVY, v308, v210," \
+ " v216, Y41B, Y42B, Y444, Y800, Y16, NV12, NV21, UYVP, A420," \
+ " YUV9, IYU1}")
#endif
static GstStaticPadTemplate video_sink_factory =
@@ -417,6 +388,8 @@ gst_kate_tiger_init (GstKateTiger * tiger, GstKateTigerClass * gclass)
tiger->video_width = 0;
tiger->video_height = 0;
+ tiger->composition = NULL;
+
tiger->seen_header = FALSE;
}
@@ -432,8 +405,10 @@ gst_kate_tiger_dispose (GObject * object)
tiger->default_font_desc = NULL;
}
- g_free (tiger->render_buffer);
- tiger->render_buffer = NULL;
+ if (tiger->render_buffer) {
+ gst_buffer_unref (tiger->render_buffer);
+ tiger->render_buffer = NULL;
+ }
g_cond_free (tiger->cond);
tiger->cond = NULL;
@@ -441,6 +416,11 @@ gst_kate_tiger_dispose (GObject * object)
g_mutex_free (tiger->mutex);
tiger->mutex = NULL;
+ if (tiger->composition) {
+ gst_video_overlay_composition_unref (tiger->composition);
+ tiger->composition = NULL;
+ }
+
GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
}
@@ -789,404 +769,44 @@ gst_kate_tiger_get_time (GstKateTiger * tiger)
}
static inline void
-gst_kate_tiger_blit_1 (GstKateTiger * tiger, guchar * dest, gint xpos,
- gint ypos, const guint8 * image, gint image_width, gint image_height,
- guint dest_stride)
+gst_kate_tiger_set_composition (GstKateTiger * tiger)
{
- gint i, j = 0;
- gint x, y;
- guchar r, g, b, a;
- const guint8 *pimage;
- guchar *py;
- gint width = image_width;
- gint height = image_height;
-
- if (xpos < 0) {
- xpos = 0;
- }
-
- if (xpos + width > tiger->video_width) {
- width = tiger->video_width - xpos;
- }
-
- if (ypos + height > tiger->video_height) {
- height = tiger->video_height - ypos;
- }
-
- dest += (ypos / 1) * dest_stride;
-
- for (i = 0; i < height; i++) {
- pimage = image + 4 * (i * image_width);
- py = dest + i * dest_stride + xpos;
- for (j = 0; j < width; j++) {
- b = pimage[TIGER_ARGB_B];
- g = pimage[TIGER_ARGB_G];
- r = pimage[TIGER_ARGB_R];
- a = pimage[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a, r, g, b);
-
- pimage += 4;
- if (a == 0) {
- py++;
- continue;
- }
- COMP_Y (y, r, g, b);
- x = *py;
- BLEND (*py++, a, y, x);
- }
+ GstVideoOverlayRectangle *rectangle;
+
+ if (tiger->render_buffer) {
+ rectangle = gst_video_overlay_rectangle_new_argb (tiger->render_buffer,
+ tiger->video_width, tiger->video_height, 4 * tiger->video_width,
+ 0, 0, tiger->video_width, tiger->video_height,
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
+
+ if (tiger->composition)
+ gst_video_overlay_composition_unref (tiger->composition);
+ tiger->composition = gst_video_overlay_composition_new (rectangle);
+ gst_video_overlay_rectangle_unref (rectangle);
+
+ } else if (tiger->composition) {
+ gst_video_overlay_composition_unref (tiger->composition);
+ tiger->composition = NULL;
}
}
static inline void
-gst_kate_tiger_blit_sub2x2cbcr (GstKateTiger * tiger,
- guchar * destcb, guchar * destcr, gint xpos, gint ypos,
- const guint8 * image, gint image_width, gint image_height,
- guint destcb_stride, guint destcr_stride, guint pix_stride)
+gst_kate_tiger_unpremultiply (GstKateTiger * tiger)
{
- gint i, j;
- gint x, cb, cr;
- gushort r, g, b, a;
- gushort r1, g1, b1, a1;
- const guint8 *pimage1, *pimage2;
- guchar *pcb, *pcr;
- gint width = image_width - 2;
- gint height = image_height - 2;
-
- xpos *= pix_stride;
-
- if (xpos < 0) {
- xpos = 0;
- }
-
- if (xpos + width > tiger->video_width) {
- width = tiger->video_width - xpos;
- }
-
- if (ypos + height > tiger->video_height) {
- height = tiger->video_height - ypos;
- }
-
- destcb += (ypos / 2) * destcb_stride;
- destcr += (ypos / 2) * destcr_stride;
-
- for (i = 0; i < height; i += 2) {
- pimage1 = image + 4 * (i * image_width);
- pimage2 = pimage1 + 4 * image_width;
- pcb = destcb + (i / 2) * destcb_stride + xpos / 2;
- pcr = destcr + (i / 2) * destcr_stride + xpos / 2;
- for (j = 0; j < width; j += 2) {
- b = pimage1[TIGER_ARGB_B];
- g = pimage1[TIGER_ARGB_G];
- r = pimage1[TIGER_ARGB_R];
- a = pimage1[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a, r, g, b);
- pimage1 += 4;
-
- b1 = pimage1[TIGER_ARGB_B];
- g1 = pimage1[TIGER_ARGB_G];
- r1 = pimage1[TIGER_ARGB_R];
- a1 = pimage1[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a1, r1, g1, b1);
- b += b1;
- g += g1;
- r += r1;
- a += a1;
- pimage1 += 4;
-
- b1 = pimage2[TIGER_ARGB_B];
- g1 = pimage2[TIGER_ARGB_G];
- r1 = pimage2[TIGER_ARGB_R];
- a1 = pimage2[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a1, r1, g1, b1);
- b += b1;
- g += g1;
- r += r1;
- a += a1;
- pimage2 += 4;
-
- /* + 2 for rounding */
- b1 = pimage2[TIGER_ARGB_B];
- g1 = pimage2[TIGER_ARGB_G];
- r1 = pimage2[TIGER_ARGB_R];
- a1 = pimage2[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a1, r1, g1, b1);
- b += b1 + 2;
- g += g1 + 2;
- r += r1 + 2;
- a += a1 + 2;
- pimage2 += 4;
-
- b /= 4;
- g /= 4;
- r /= 4;
- a /= 4;
-
- if (a == 0) {
- pcb += pix_stride;
- pcr += pix_stride;
- continue;
- }
- COMP_U (cb, r, g, b);
- COMP_V (cr, r, g, b);
+ guint i, j;
+ guint8 *pimage, *text_image = GST_BUFFER_DATA (tiger->render_buffer);
- x = *pcb;
- BLEND (*pcb, a, cb, x);
- x = *pcr;
- BLEND (*pcr, a, cr, x);
-
- pcb += pix_stride;
- pcr += pix_stride;
- }
- }
-}
-
-/* FIXME:
- * - use proper strides and offset for I420
- */
-
-static inline void
-gst_kate_tiger_blit_NV12_NV21 (GstKateTiger * tiger,
- guint8 * yuv_pixels, gint xpos, gint ypos, const guint8 * image,
- gint image_width, gint image_height)
-{
- int y_stride, uv_stride;
- int u_offset, v_offset;
- int h, w;
-
- /* because U/V is 2x2 subsampled, we need to round, either up or down,
- * to a boundary of integer number of U/V pixels:
- */
- xpos = GST_ROUND_UP_2 (xpos);
- ypos = GST_ROUND_UP_2 (ypos);
-
- w = tiger->video_width;
- h = tiger->video_height;
-
- y_stride = gst_video_format_get_row_stride (tiger->video_format, 0, w);
- uv_stride = gst_video_format_get_row_stride (tiger->video_format, 1, w);
- u_offset =
- gst_video_format_get_component_offset (tiger->video_format, 1, w, h);
- v_offset =
- gst_video_format_get_component_offset (tiger->video_format, 2, w, h);
-
- gst_kate_tiger_blit_1 (tiger, yuv_pixels, xpos, ypos, image, image_width,
- image_height, y_stride);
- gst_kate_tiger_blit_sub2x2cbcr (tiger, yuv_pixels + u_offset,
- yuv_pixels + v_offset, xpos, ypos, image, image_width, image_height,
- uv_stride, uv_stride, 2);
-}
-
-static inline void
-gst_kate_tiger_blit_I420_YV12 (GstKateTiger * tiger,
- guint8 * yuv_pixels, gint xpos, gint ypos, const guint8 * image,
- gint image_width, gint image_height)
-{
- int y_stride, u_stride, v_stride;
- int u_offset, v_offset;
- int h, w;
-
- /* because U/V is 2x2 subsampled, we need to round, either up or down,
- * to a boundary of integer number of U/V pixels:
- */
- xpos = GST_ROUND_UP_2 (xpos);
- ypos = GST_ROUND_UP_2 (ypos);
-
- w = tiger->video_width;
- h = tiger->video_height;
-
- y_stride = gst_video_format_get_row_stride (tiger->video_format, 0, w);
- u_stride = gst_video_format_get_row_stride (tiger->video_format, 1, w);
- v_stride = gst_video_format_get_row_stride (tiger->video_format, 2, w);
- u_offset =
- gst_video_format_get_component_offset (tiger->video_format, 1, w, h);
- v_offset =
- gst_video_format_get_component_offset (tiger->video_format, 2, w, h);
-
- gst_kate_tiger_blit_1 (tiger, yuv_pixels, xpos, ypos, image, image_width,
- image_height, y_stride);
- gst_kate_tiger_blit_sub2x2cbcr (tiger, yuv_pixels + u_offset,
- yuv_pixels + v_offset, xpos, ypos, image, image_width, image_height,
- u_stride, v_stride, 1);
-}
-
-static inline void
-gst_kate_tiger_blit_UYVY (GstKateTiger * tiger,
- guint8 * yuv_pixels, gint xpos, gint ypos, const guint8 * image,
- gint image_width, gint image_height)
-{
- int a0, r0, g0, b0;
- int a1, r1, g1, b1;
- int y0, y1, u, v;
- int i, j;
- int h, w;
- const guint8 *pimage;
- guchar *dest;
-
- /* because U/V is 2x horizontally subsampled, we need to round to a
- * boundary of integer number of U/V pixels in x dimension:
- */
- xpos = GST_ROUND_UP_2 (xpos);
-
- w = image_width - 2;
- h = image_height - 2;
-
- if (xpos < 0) {
- xpos = 0;
- }
-
- if (xpos + w > tiger->video_width) {
- w = tiger->video_width - xpos;
- }
-
- if (ypos + h > tiger->video_height) {
- h = tiger->video_height - ypos;
- }
-
- for (i = 0; i < h; i++) {
- pimage = image + i * image_width * 4;
- dest = yuv_pixels + (i + ypos) * tiger->video_width * 2 + xpos * 2;
- for (j = 0; j < w; j += 2) {
- b0 = pimage[TIGER_ARGB_B];
- g0 = pimage[TIGER_ARGB_G];
- r0 = pimage[TIGER_ARGB_R];
- a0 = pimage[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a0, r0, g0, b0);
- pimage += 4;
-
- b1 = pimage[TIGER_ARGB_B];
- g1 = pimage[TIGER_ARGB_G];
- r1 = pimage[TIGER_ARGB_R];
- a1 = pimage[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a1, r1, g1, b1);
- pimage += 4;
-
- a0 += a1 + 2;
- a0 /= 2;
- if (a0 == 0) {
- dest += 4;
- continue;
- }
-
- COMP_Y (y0, r0, g0, b0);
- COMP_Y (y1, r1, g1, b1);
-
- b0 += b1 + 2;
- g0 += g1 + 2;
- r0 += r1 + 2;
-
- b0 /= 2;
- g0 /= 2;
- r0 /= 2;
-
- COMP_U (u, r0, g0, b0);
- COMP_V (v, r0, g0, b0);
-
- BLEND (*dest, a0, u, *dest);
- dest++;
- BLEND (*dest, a0, y0, *dest);
- dest++;
- BLEND (*dest, a0, v, *dest);
- dest++;
- BLEND (*dest, a0, y1, *dest);
- dest++;
- }
- }
-}
-
-static inline void
-gst_kate_tiger_blit_AYUV (GstKateTiger * tiger,
- guint8 * rgb_pixels, gint xpos, gint ypos, const guint8 * image,
- gint image_width, gint image_height)
-{
- int a, r, g, b, a1;
- int y, u, v;
- int i, j;
- int h, w;
- const guint8 *pimage;
- guchar *dest;
-
- w = image_width;
- h = image_height;
-
- if (xpos < 0) {
- xpos = 0;
- }
-
- if (xpos + w > tiger->video_width) {
- w = tiger->video_width - xpos;
- }
-
- if (ypos + h > tiger->video_height) {
- h = tiger->video_height - ypos;
- }
-
- for (i = 0; i < h; i++) {
- pimage = image + i * image_width * 4;
- dest = rgb_pixels + (i + ypos) * 4 * tiger->video_width + xpos * 4;
- for (j = 0; j < w; j++) {
- a = pimage[TIGER_ARGB_A];
- b = pimage[TIGER_ARGB_B];
- g = pimage[TIGER_ARGB_G];
- r = pimage[TIGER_ARGB_R];
-
- TIGER_UNPREMULTIPLY (a, r, g, b);
-
- // convert background to yuv
- COMP_Y (y, r, g, b);
- COMP_U (u, r, g, b);
- COMP_V (v, r, g, b);
-
- // preform text "OVER" background alpha compositing
- a1 = a + (dest[0] * (255 - a)) / 255 + 1; // add 1 to prevent divide by 0
- OVER (dest[1], a, y, dest[0], dest[1], a1);
- OVER (dest[2], a, u, dest[0], dest[2], a1);
- OVER (dest[3], a, v, dest[0], dest[3], a1);
- dest[0] = a1 - 1; // remove the temporary 1 we added
+ for (i = 0; i < tiger->video_height; i++) {
+ pimage = text_image + 4 * (i * tiger->video_width);
+ for (j = 0; j < tiger->video_width; j++) {
+ TIGER_UNPREMULTIPLY (pimage[TIGER_ARGB_A], pimage[TIGER_ARGB_R],
+ pimage[TIGER_ARGB_G], pimage[TIGER_ARGB_B]);
pimage += 4;
- dest += 4;
}
}
}
-static void
-gst_kate_tiger_blend_yuv (GstKateTiger * tiger, GstBuffer * video_frame,
- const guint8 * image, gint image_width, gint image_height)
-{
- gint xpos = 0, ypos = 0;
- gint width, height;
-
- width = image_width;
- height = image_height;
-
- switch (tiger->video_format) {
- case GST_VIDEO_FORMAT_I420:
- case GST_VIDEO_FORMAT_YV12:
- gst_kate_tiger_blit_I420_YV12 (tiger,
- GST_BUFFER_DATA (video_frame), xpos, ypos, image, image_width,
- image_height);
- break;
- case GST_VIDEO_FORMAT_NV12:
- case GST_VIDEO_FORMAT_NV21:
- gst_kate_tiger_blit_NV12_NV21 (tiger,
- GST_BUFFER_DATA (video_frame), xpos, ypos, image, image_width,
- image_height);
- break;
- case GST_VIDEO_FORMAT_UYVY:
- gst_kate_tiger_blit_UYVY (tiger,
- GST_BUFFER_DATA (video_frame), xpos, ypos, image, image_width,
- image_height);
- break;
- case GST_VIDEO_FORMAT_AYUV:
- gst_kate_tiger_blit_AYUV (tiger,
- GST_BUFFER_DATA (video_frame), xpos, ypos, image, image_width,
- image_height);
- break;
- default:
- g_assert_not_reached ();
- }
-}
-
static GstFlowReturn
gst_kate_tiger_video_chain (GstPad * pad, GstBuffer * buf)
{
@@ -1249,14 +869,12 @@ gst_kate_tiger_video_chain (GstPad * pad, GstBuffer * buf)
/* and setup that buffer before rendering */
if (gst_video_format_is_yuv (tiger->video_format)) {
- guint8 *tmp = g_realloc (tiger->render_buffer,
- tiger->video_width * tiger->video_height * 4);
- if (!tmp) {
- GST_WARNING_OBJECT (tiger, "Failed to allocate render buffer");
- goto pass;
+ if (!tiger->render_buffer) {
+ tiger->render_buffer =
+ gst_buffer_new_and_alloc (tiger->video_width * tiger->video_height *
+ 4);
}
- tiger->render_buffer = tmp;
- ptr = tiger->render_buffer;
+ ptr = GST_BUFFER_DATA (tiger->render_buffer);
tiger_renderer_set_surface_clear_color (tiger->tr, 1, 0.0, 0.0, 0.0, 0.0);
} else {
ptr = GST_BUFFER_DATA (buf);
@@ -1278,8 +896,12 @@ gst_kate_tiger_video_chain (GstPad * pad, GstBuffer * buf)
}
if (gst_video_format_is_yuv (tiger->video_format)) {
- gst_kate_tiger_blend_yuv (tiger, buf, tiger->render_buffer,
- tiger->video_width, tiger->video_height);
+ /* As the GstVideoOverlayComposition supports only unpremultiply ARGB,
+ * we need to unpermultiply it */
+ gst_kate_tiger_unpremultiply (tiger);
+ gst_kate_tiger_set_composition (tiger);
+ if (tiger->composition)
+ gst_video_overlay_composition_blend (tiger->composition, buf);
}
pass:
diff --git a/ext/kate/gstkatetiger.h b/ext/kate/gstkatetiger.h
index f966cbf15..ad7cb1433 100644
--- a/ext/kate/gstkatetiger.h
+++ b/ext/kate/gstkatetiger.h
@@ -50,6 +50,7 @@
#include <tiger/tiger.h>
#include <gst/gst.h>
#include <gst/video/video.h>
+#include <gst/video/video-overlay-composition.h>
#include "gstkateutil.h"
G_BEGIN_DECLS
@@ -95,7 +96,8 @@ struct _GstKateTiger
gint video_width;
gint video_height;
gboolean swap_rgb;
- guint8 *render_buffer;
+ GstBuffer *render_buffer;
+ GstVideoOverlayComposition *composition;
GMutex *mutex;
GCond *cond;
diff --git a/ext/opencv/gstmotioncells.c b/ext/opencv/gstmotioncells.c
index bf2eb7f60..d300f9d31 100644
--- a/ext/opencv/gstmotioncells.c
+++ b/ext/opencv/gstmotioncells.c
@@ -194,8 +194,6 @@ gst_motion_cells_finalize (GObject * obj)
GFREE (filter->basename_datafile);
GFREE (filter->datafile_extension);
- g_mutex_free (filter->propset_mutex);
-
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
@@ -328,7 +326,6 @@ gst_motion_cells_class_init (GstMotioncellsClass * klass)
static void
gst_motion_cells_init (GstMotioncells * filter, GstMotioncellsClass * gclass)
{
- filter->propset_mutex = g_mutex_new ();
filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_setcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_motion_cells_set_caps));
@@ -398,9 +395,7 @@ gst_motion_cells_init (GstMotioncells * filter, GstMotioncellsClass * gclass)
filter->thickness = THICKNESS_DEF;
filter->datafileidx = 0;
- g_mutex_lock (filter->propset_mutex);
filter->id = motion_cells_init ();
- g_mutex_unlock (filter->propset_mutex);
}
@@ -419,28 +414,19 @@ gst_motion_cells_set_property (GObject * object, guint prop_id,
int tmpuy = -1;
int tmplx = -1;
int tmply = -1;
- GstStateChangeReturn ret;
- g_mutex_lock (filter->propset_mutex);
+ GST_OBJECT_LOCK (filter);
switch (prop_id) {
case PROP_GRID_X:
- ret = gst_element_get_state (GST_ELEMENT (filter),
- &filter->state, NULL, 250 * GST_NSECOND);
filter->gridx = g_value_get_int (value);
- if (filter->prevgridx != filter->gridx
- && ret == GST_STATE_CHANGE_SUCCESS
- && filter->state == GST_STATE_PLAYING) {
+ if (filter->prevgridx != filter->gridx && !filter->firstframe) {
filter->changed_gridx = true;
}
filter->prevgridx = filter->gridx;
break;
case PROP_GRID_Y:
- ret = gst_element_get_state (GST_ELEMENT (filter),
- &filter->state, NULL, 250 * GST_NSECOND);
filter->gridy = g_value_get_int (value);
- if (filter->prevgridy != filter->gridy
- && ret == GST_STATE_CHANGE_SUCCESS
- && filter->state == GST_STATE_PLAYING) {
+ if (filter->prevgridy != filter->gridy && !filter->firstframe) {
filter->changed_gridy = true;
}
filter->prevgridy = filter->gridy;
@@ -473,9 +459,7 @@ gst_motion_cells_set_property (GObject * object, guint prop_id,
filter->calculate_motion = g_value_get_boolean (value);
break;
case PROP_DATE:
- ret = gst_element_get_state (GST_ELEMENT (filter),
- &filter->state, NULL, 250 * GST_NSECOND);
- if (ret == GST_STATE_CHANGE_SUCCESS && filter->state == GST_STATE_PLAYING) {
+ if (!filter->firstframe) {
filter->changed_startime = true;
}
filter->starttime = g_value_get_long (value);
@@ -629,7 +613,7 @@ gst_motion_cells_set_property (GObject * object, guint prop_id,
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
- g_mutex_unlock (filter->propset_mutex);
+ GST_OBJECT_UNLOCK (filter);
}
static void
@@ -640,6 +624,7 @@ gst_motion_cells_get_property (GObject * object, guint prop_id,
GString *str;
int i;
+ GST_OBJECT_LOCK (filter);
switch (prop_id) {
case PROP_GRID_X:
g_value_set_int (value, filter->gridx);
@@ -751,6 +736,7 @@ gst_motion_cells_get_property (GObject * object, guint prop_id,
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
+ GST_OBJECT_UNLOCK (filter);
}
static void
@@ -861,17 +847,17 @@ gst_motion_cells_set_caps (GstPad * pad, GstCaps * caps)
static GstFlowReturn
gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
{
-
GstMotioncells *filter;
-
filter = gst_motion_cells (GST_OBJECT_PARENT (pad));
+ GST_OBJECT_LOCK (filter);
if (filter->calculate_motion) {
double sensitivity;
int framerate, gridx, gridy, motionmaskcells_count, motionmaskcoord_count,
motioncells_count, i;
int thickness, success, motioncellsidxcnt, numberOfCells,
motioncellsnumber, cellsOfInterestNumber;
- int mincellsOfInterestNumber, motiondetect;
+ int mincellsOfInterestNumber, motiondetect, minimum_motion_frames,
+ postnomotion;
char *datafile;
bool display, changed_datafile, useAlpha;
gint64 starttime;
@@ -879,14 +865,14 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
motioncellidx *motionmaskcellsidx;
cellscolor motioncellscolor;
motioncellidx *motioncellsidx;
- g_mutex_lock (filter->propset_mutex);
buf = gst_buffer_make_writable (buf);
filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);
if (filter->firstframe) {
setPrevFrame (filter->cvImage, filter->id);
filter->firstframe = FALSE;
}
-
+ minimum_motion_frames = filter->minimum_motion_frames;
+ postnomotion = filter->postnomotion;
sensitivity = filter->sensitivity;
framerate = filter->framerate;
gridx = filter->gridx;
@@ -963,6 +949,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
motionmaskcoords, motionmaskcells_count, motionmaskcellsidx,
motioncellscolor, motioncells_count, motioncellsidx, starttime,
datafile, changed_datafile, thickness, filter->id);
+
if ((success == 1) && (filter->sent_init_error_msg == false)) {
char *initfailedreason;
int initerrorcode;
@@ -996,7 +983,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
GFREE (motionmaskcoords);
GFREE (motionmaskcellsidx);
GFREE (motioncellsidx);
- g_mutex_unlock (filter->propset_mutex);
+ GST_OBJECT_UNLOCK (filter);
return gst_pad_push (filter->srcpad, buf);
}
filter->changed_datafile = getChangedDataFile (filter->id);
@@ -1007,6 +994,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
(filter->motioncells_count) : (numberOfCells);
mincellsOfInterestNumber =
floor ((double) cellsOfInterestNumber * filter->threshold);
+ GST_OBJECT_UNLOCK (filter);
motiondetect = (motioncellsnumber >= mincellsOfInterestNumber) ? 1 : 0;
if ((motioncellsidxcnt > 0) && (motiondetect == 1)) {
char *detectedmotioncells;
@@ -1015,7 +1003,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
if (detectedmotioncells) {
filter->consecutive_motion++;
if ((filter->previous_motion == false)
- && (filter->consecutive_motion >= filter->minimum_motion_frames)) {
+ && (filter->consecutive_motion >= minimum_motion_frames)) {
GstStructure *s;
GstMessage *m;
filter->previous_motion = true;
@@ -1061,7 +1049,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
}
}
}
- if (filter->postnomotion > 0) {
+ if (postnomotion > 0) {
guint64 last_buf_timestamp = GST_BUFFER_TIMESTAMP (buf) / 1000000000l;
if ((last_buf_timestamp -
(filter->last_motion_timestamp / 1000000000l)) >=
@@ -1086,10 +1074,9 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
GFREE (motionmaskcoords);
GFREE (motionmaskcellsidx);
GFREE (motioncellsidx);
-
- g_mutex_unlock (filter->propset_mutex);
+ } else {
+ GST_OBJECT_UNLOCK (filter);
}
-
return gst_pad_push (filter->srcpad, buf);
}
diff --git a/ext/opencv/gstmotioncells.h b/ext/opencv/gstmotioncells.h
index da4e822ef..8a2930eee 100644
--- a/ext/opencv/gstmotioncells.h
+++ b/ext/opencv/gstmotioncells.h
@@ -107,7 +107,6 @@ struct _GstMotioncells
gint width, height;
//time stuff
struct timeval tv;
- GMutex *propset_mutex;
double framerate;
};
diff --git a/ext/opus/Makefile.am b/ext/opus/Makefile.am
index cb0a9b338..cdf3c30ac 100644
--- a/ext/opus/Makefile.am
+++ b/ext/opus/Makefile.am
@@ -1,6 +1,6 @@
plugin_LTLIBRARIES = libgstopus.la
-libgstopus_la_SOURCES = gstopus.c gstopusdec.c gstopusenc.c gstopusparse.c gstopusheader.c gstopuscommon.c
+libgstopus_la_SOURCES = gstopus.c gstopusdec.c gstopusenc.c gstopusparse.c gstopusheader.c gstopuscommon.c gstrtpopuspay.c gstrtpopusdepay.c
libgstopus_la_CFLAGS = \
-DGST_USE_UNSTABLE_API \
$(GST_PLUGINS_BASE_CFLAGS) \
@@ -9,10 +9,11 @@ libgstopus_la_CFLAGS = \
libgstopus_la_LIBADD = \
-lgstaudio-$(GST_MAJORMINOR) \
$(GST_PLUGINS_BASE_LIBS) -lgsttag-$(GST_MAJORMINOR) \
+ -lgstrtp-@GST_MAJORMINOR@ \
$(GST_BASE_LIBS) \
$(GST_LIBS) \
$(OPUS_LIBS)
libgstopus_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) $(LIBM)
libgstopus_la_LIBTOOLFLAGS = --tag=disable-static
-noinst_HEADERS = gstopusenc.h gstopusdec.h gstopusparse.h gstopusheader.h gstopuscommon.h
+noinst_HEADERS = gstopusenc.h gstopusdec.h gstopusparse.h gstopusheader.h gstopuscommon.h gstrtpopuspay.h gstrtpopusdepay.h
diff --git a/ext/opus/gstopus.c b/ext/opus/gstopus.c
index c5f68a131..8db6e197f 100644
--- a/ext/opus/gstopus.c
+++ b/ext/opus/gstopus.c
@@ -25,6 +25,9 @@
#include "gstopusenc.h"
#include "gstopusparse.h"
+#include "gstrtpopuspay.h"
+#include "gstrtpopusdepay.h"
+
#include <gst/tag/tag.h>
static gboolean
@@ -43,6 +46,14 @@ plugin_init (GstPlugin * plugin)
GST_TYPE_OPUS_PARSE))
return FALSE;
+ if (!gst_element_register (plugin, "rtpopusdepay", GST_RANK_NONE,
+ GST_TYPE_RTP_OPUS_DEPAY))
+ return FALSE;
+
+ if (!gst_element_register (plugin, "rtpopuspay", GST_RANK_NONE,
+ GST_TYPE_RTP_OPUS_PAY))
+ return FALSE;
+
gst_tag_register_musicbrainz_tags ();
return TRUE;
diff --git a/ext/opus/gstopuscommon.c b/ext/opus/gstopuscommon.c
index 426c5b897..dbf585a82 100644
--- a/ext/opus/gstopuscommon.c
+++ b/ext/opus/gstopuscommon.c
@@ -17,6 +17,8 @@
* Boston, MA 02111-1307, USA.
*/
+#include <stdio.h>
+#include <string.h>
#include "gstopuscommon.h"
/* http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html#x1-800004.3.9 */
@@ -86,3 +88,19 @@ const char *gst_opus_channel_names[] = {
"side right",
"none"
};
+
+void
+gst_opus_common_log_channel_mapping_table (GstElement * element,
+ GstDebugCategory * category, const char *msg, int n_channels,
+ const guint8 * table)
+{
+ char s[8 + 256 * 4] = "[ "; /* enough for 256 times "255 " at most */
+ int n;
+
+ for (n = 0; n < n_channels; ++n) {
+ size_t len = strlen (s);
+ snprintf (s + len, sizeof (s) - len, "%d ", table[n]);
+ }
+ strcat (s, "]");
+ GST_CAT_LEVEL_LOG (category, GST_LEVEL_INFO, element, "%s: %s", msg, s);
+}
diff --git a/ext/opus/gstopuscommon.h b/ext/opus/gstopuscommon.h
index 65b944e9e..1fba5650d 100644
--- a/ext/opus/gstopuscommon.h
+++ b/ext/opus/gstopuscommon.h
@@ -28,6 +28,9 @@ G_BEGIN_DECLS
extern const GstAudioChannelPosition gst_opus_channel_positions[][8];
extern const char *gst_opus_channel_names[];
+extern void gst_opus_common_log_channel_mapping_table (GstElement *element,
+ GstDebugCategory * category, const char *msg,
+ int n_channels, const guint8 *table);
G_END_DECLS
diff --git a/ext/opus/gstopusdec.c b/ext/opus/gstopusdec.c
index b060b22ae..67662456e 100644
--- a/ext/opus/gstopusdec.c
+++ b/ext/opus/gstopusdec.c
@@ -38,12 +38,11 @@
*/
#ifdef HAVE_CONFIG_H
-# include "config.h"
+#include "config.h"
#endif
#include <math.h>
#include <string.h>
-#include <gst/tag/tag.h>
#include "gstopusheader.h"
#include "gstopuscommon.h"
#include "gstopusdec.h"
@@ -57,7 +56,7 @@ GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-raw, "
"format = (string) { " GST_AUDIO_NE (S16) " }, "
- "rate = (int) { 8000, 12000, 16000, 24000, 48000 }, "
+ "rate = (int) { 48000, 24000, 16000, 12000, 8000 }, "
"channels = (int) [ 1, 8 ] ")
);
@@ -207,12 +206,32 @@ gst_opus_dec_get_r128_volume (gint16 r128_gain)
return DB_TO_LINEAR (gst_opus_dec_get_r128_gain (r128_gain));
}
+static GstCaps *
+gst_opus_dec_negotiate (GstOpusDec * dec)
+{
+ GstCaps *caps = gst_pad_get_allowed_caps (GST_AUDIO_DECODER_SRC_PAD (dec));
+ GstStructure *s;
+
+ caps = gst_caps_make_writable (caps);
+ gst_caps_truncate (caps);
+
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_fixate_field_nearest_int (s, "rate", 48000);
+ gst_structure_get_int (s, "rate", &dec->sample_rate);
+ gst_structure_fixate_field_nearest_int (s, "channels", dec->n_channels);
+ gst_structure_get_int (s, "channels", &dec->n_channels);
+
+ GST_INFO_OBJECT (dec, "Negotiated %d channels, %d Hz", dec->n_channels,
+ dec->sample_rate);
+
+ return caps;
+}
+
static GstFlowReturn
gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
{
const guint8 *data;
GstCaps *caps;
- GstStructure *s;
const GstAudioChannelPosition *pos = NULL;
g_return_val_if_fail (gst_opus_header_is_id_header (buf), GST_FLOW_ERROR);
@@ -277,16 +296,7 @@ gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
}
}
- /* negotiate width with downstream */
- caps = gst_pad_get_allowed_caps (GST_AUDIO_DECODER_SRC_PAD (dec));
- s = gst_caps_get_structure (caps, 0);
- gst_structure_fixate_field_nearest_int (s, "rate", 48000);
- gst_structure_get_int (s, "rate", &dec->sample_rate);
- gst_structure_fixate_field_nearest_int (s, "channels", dec->n_channels);
- gst_structure_get_int (s, "channels", &dec->n_channels);
-
- GST_INFO_OBJECT (dec, "Negotiated %d channels, %d Hz", dec->n_channels,
- dec->sample_rate);
+ caps = gst_opus_dec_negotiate (dec);
if (pos) {
GST_DEBUG_OBJECT (dec, "Setting channel positions on caps");
@@ -327,11 +337,36 @@ opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer)
GstBuffer *buf;
if (dec->state == NULL) {
+ /* If we did not get any headers, default to 2 channels */
+ if (dec->n_channels == 0) {
+ GstCaps *caps;
+ GST_INFO_OBJECT (dec, "No header, assuming single stream");
+ dec->n_channels = 2;
+ dec->sample_rate = 48000;
+ caps = gst_opus_dec_negotiate (dec);
+ GST_INFO_OBJECT (dec, "Setting src caps to %" GST_PTR_FORMAT, caps);
+ gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec), caps);
+ gst_caps_unref (caps);
+ /* default stereo mapping */
+ dec->channel_mapping_family = 0;
+ dec->channel_mapping[0] = 0;
+ dec->channel_mapping[1] = 1;
+ dec->n_streams = 1;
+ dec->n_stereo_streams = 1;
+ }
+
GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz",
dec->n_channels, dec->sample_rate);
- dec->state = opus_multistream_decoder_create (dec->sample_rate,
- dec->n_channels, dec->n_streams, dec->n_stereo_streams,
- dec->channel_mapping, &err);
+#ifndef GST_DISABLE_DEBUG
+ gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug,
+ "Mapping table", dec->n_channels, dec->channel_mapping);
+#endif
+
+ GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams,
+ dec->n_stereo_streams);
+ dec->state =
+ opus_multistream_decoder_create (dec->sample_rate, dec->n_channels,
+ dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err);
if (!dec->state || err != OPUS_OK)
goto creation_failed;
}
@@ -411,11 +446,11 @@ opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer)
GST_INFO_OBJECT (dec,
"Skipping %u samples (%u at 48000 Hz, %u left to skip)", skip,
scaled_skip, dec->pre_skip);
+ }
- if (gst_buffer_get_size (outbuf) == 0) {
- gst_buffer_unref (outbuf);
- outbuf = NULL;
- }
+ if (gst_buffer_get_size (outbuf) == 0) {
+ gst_buffer_unref (outbuf);
+ outbuf = NULL;
}
/* Apply gain */
diff --git a/ext/opus/gstopusenc.c b/ext/opus/gstopusenc.c
index ff9243ad4..999c0f39f 100644
--- a/ext/opus/gstopusenc.c
+++ b/ext/opus/gstopusenc.c
@@ -161,6 +161,8 @@ static void gst_opus_enc_finalize (GObject * object);
static gboolean gst_opus_enc_sink_event (GstAudioEncoder * benc,
GstEvent * event);
+static GstCaps *gst_opus_enc_sink_getcaps (GstAudioEncoder * benc,
+ GstCaps * filter);
static gboolean gst_opus_enc_setup (GstOpusEnc * enc);
static void gst_opus_enc_get_property (GObject * object, guint prop_id,
@@ -211,6 +213,7 @@ gst_opus_enc_class_init (GstOpusEncClass * klass)
base_class->set_format = GST_DEBUG_FUNCPTR (gst_opus_enc_set_format);
base_class->handle_frame = GST_DEBUG_FUNCPTR (gst_opus_enc_handle_frame);
base_class->event = GST_DEBUG_FUNCPTR (gst_opus_enc_sink_event);
+ base_class->getcaps = GST_DEBUG_FUNCPTR (gst_opus_enc_sink_getcaps);
g_object_class_install_property (gobject_class, PROP_AUDIO,
g_param_spec_boolean ("audio", "Audio or voice",
@@ -401,7 +404,50 @@ gst_opus_enc_get_frame_samples (GstOpusEnc * enc)
}
static void
-gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
+gst_opus_enc_setup_trivial_mapping (GstOpusEnc * enc, guint8 mapping[256])
+{
+ int n;
+
+ for (n = 0; n < 255; ++n)
+ mapping[n] = n;
+}
+
+static int
+gst_opus_enc_find_channel_position (GstOpusEnc * enc, const GstAudioInfo * info,
+ GstAudioChannelPosition position)
+{
+ int n;
+ for (n = 0; n < enc->n_channels; ++n) {
+ if (GST_AUDIO_INFO_POSITION (info, n) == position) {
+ return n;
+ }
+ }
+ return -1;
+}
+
+static int
+gst_opus_enc_find_channel_position_in_vorbis_order (GstOpusEnc * enc,
+ GstAudioChannelPosition position)
+{
+ int c;
+
+ for (c = 0; c < enc->n_channels; ++c) {
+ if (gst_opus_channel_positions[enc->n_channels - 1][c] == position) {
+ GST_INFO_OBJECT (enc,
+ "Channel position %s maps to index %d in Vorbis order",
+ gst_opus_channel_names[position], c);
+ return c;
+ }
+ }
+ GST_WARNING_OBJECT (enc,
+ "Channel position %s is not representable in Vorbis order",
+ gst_opus_channel_names[position]);
+ return -1;
+}
+
+static void
+gst_opus_enc_setup_channel_mappings (GstOpusEnc * enc,
+ const GstAudioInfo * info)
{
#define MAPS(idx,pos) (GST_AUDIO_INFO_POSITION (info, (idx)) == GST_AUDIO_CHANNEL_POSITION_##pos)
@@ -411,14 +457,15 @@ gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
enc->n_channels);
/* Start by setting up a default trivial mapping */
- for (n = 0; n < 255; ++n)
- enc->channel_mapping[n] = n;
+ enc->n_stereo_streams = 0;
+ gst_opus_enc_setup_trivial_mapping (enc, enc->encoding_channel_mapping);
+ gst_opus_enc_setup_trivial_mapping (enc, enc->decoding_channel_mapping);
/* For one channel, use the basic RTP mapping */
if (enc->n_channels == 1) {
GST_INFO_OBJECT (enc, "Mono, trivial RTP mapping");
enc->channel_mapping_family = 0;
- enc->channel_mapping[0] = 0;
+ /* implicit mapping for family 0 */
return;
}
@@ -428,9 +475,11 @@ gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
if (MAPS (0, FRONT_LEFT) && MAPS (1, FRONT_RIGHT)) {
GST_INFO_OBJECT (enc, "Stereo, canonical mapping");
enc->channel_mapping_family = 0;
+ enc->n_stereo_streams = 1;
/* The channel mapping is implicit for family 0, that's why we do not
attempt to create one for right/left - this will be mapped to the
Vorbis mapping below. */
+ return;
} else {
GST_DEBUG_OBJECT (enc, "Stereo, but not canonical mapping, continuing");
}
@@ -438,42 +487,115 @@ gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
/* For channels between 1 and 8, we use the Vorbis mapping if we can
find a permutation that matches it. Mono will have been taken care
- of earlier, but this code also handles it. */
+ of earlier, but this code also handles it. Same for left/right stereo.
+ There are two mappings. One maps the input channels to an ordering
+ which has the natural pairs first so they can benefit from the Opus
+ stereo channel coupling, and the other maps this ordering to the
+ Vorbis ordering. */
if (enc->n_channels >= 1 && enc->n_channels <= 8) {
+ int c0, c1, c0v, c1v;
+ int mapped;
+ gboolean positions_done[256];
+ static const GstAudioChannelPosition pairs[][2] = {
+ {GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT},
+ {GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT},
+ {GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER},
+ {GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER},
+ {GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT},
+ };
+ size_t pair;
+
GST_DEBUG_OBJECT (enc,
- "In range for the Vorbis mapping, checking channel positions");
- for (n = 0; n < enc->n_channels; ++n) {
- GstAudioChannelPosition pos = GST_AUDIO_INFO_POSITION (info, n);
- int c;
-
- GST_DEBUG_OBJECT (enc, "Channel %d has position %d (%s)", n, pos,
- gst_opus_channel_names[pos]);
- for (c = 0; c < enc->n_channels; ++c) {
- if (gst_opus_channel_positions[enc->n_channels - 1][c] == pos) {
- GST_DEBUG_OBJECT (enc, "Found in Vorbis mapping as channel %d", c);
- break;
+ "In range for the Vorbis mapping, building channel mapping tables");
+
+ enc->n_stereo_streams = 0;
+ mapped = 0;
+ for (n = 0; n < 256; ++n)
+ positions_done[n] = FALSE;
+
+ /* First, find any natural pairs, and move them to the front */
+ for (pair = 0; pair < G_N_ELEMENTS (pairs); ++pair) {
+ GstAudioChannelPosition p0 = pairs[pair][0];
+ GstAudioChannelPosition p1 = pairs[pair][1];
+ c0 = gst_opus_enc_find_channel_position (enc, info, p0);
+ c1 = gst_opus_enc_find_channel_position (enc, info, p1);
+ if (c0 >= 0 && c1 >= 0) {
+ /* We found a natural pair */
+ GST_DEBUG_OBJECT (enc, "Natural pair '%s/%s' found at %d %d",
+ gst_opus_channel_names[p0], gst_opus_channel_names[p1], c0, c1);
+ /* Find where they map in Vorbis order */
+ c0v = gst_opus_enc_find_channel_position_in_vorbis_order (enc, p0);
+ c1v = gst_opus_enc_find_channel_position_in_vorbis_order (enc, p1);
+ if (c0v < 0 || c1v < 0) {
+ GST_WARNING_OBJECT (enc,
+ "Cannot map channel positions to Vorbis order, using unknown mapping");
+ enc->channel_mapping_family = 255;
+ enc->n_stereo_streams = 0;
+ return;
}
+
+ enc->encoding_channel_mapping[mapped] = c0;
+ enc->encoding_channel_mapping[mapped + 1] = c1;
+ enc->decoding_channel_mapping[c0v] = mapped;
+ enc->decoding_channel_mapping[c1v] = mapped + 1;
+ enc->n_stereo_streams++;
+ mapped += 2;
+ positions_done[p0] = positions_done[p1] = TRUE;
}
- if (c == enc->n_channels) {
- /* We did not find that position, so use undefined */
- GST_WARNING_OBJECT (enc,
- "Position %d (%s) not found in Vorbis mapping, using unknown mapping",
- pos, gst_opus_channel_positions[pos]);
- enc->channel_mapping_family = 255;
- return;
+ }
+
+ /* Now add all other input channels as mono streams */
+ for (n = 0; n < enc->n_channels; ++n) {
+ GstAudioChannelPosition position = GST_AUDIO_INFO_POSITION (info, n);
+
+ /* if we already mapped it while searching for pairs, nothing else
+ needs to be done */
+ if (!positions_done[position]) {
+ int cv;
+ GST_DEBUG_OBJECT (enc, "Channel position %s is not mapped yet, adding",
+ gst_opus_channel_names[position]);
+ cv = gst_opus_enc_find_channel_position_in_vorbis_order (enc, position);
+ if (cv < 0) {
+ GST_WARNING_OBJECT (enc,
+ "Cannot map channel positions to Vorbis order, using unknown mapping");
+ enc->channel_mapping_family = 255;
+ enc->n_stereo_streams = 0;
+ return;
+ }
+ enc->encoding_channel_mapping[mapped] = n;
+ enc->decoding_channel_mapping[cv] = mapped;
+ mapped++;
}
- GST_DEBUG_OBJECT (enc, "Mapping output channel %d to %d (%s)", c, n,
- gst_opus_channel_names[pos]);
- enc->channel_mapping[c] = n;
}
- GST_INFO_OBJECT (enc, "Permutation found, using Vorbis mapping");
+
+#ifndef GST_DISABLE_DEBUG
+ GST_INFO_OBJECT (enc,
+ "Mapping tables built: %d channels, %d stereo streams", enc->n_channels,
+ enc->n_stereo_streams);
+ gst_opus_common_log_channel_mapping_table (GST_ELEMENT (enc), opusenc_debug,
+ "Encoding mapping table", enc->n_channels,
+ enc->encoding_channel_mapping);
+ gst_opus_common_log_channel_mapping_table (GST_ELEMENT (enc), opusenc_debug,
+ "Decoding mapping table", enc->n_channels,
+ enc->decoding_channel_mapping);
+#endif
+
enc->channel_mapping_family = 1;
return;
}
- /* For other cases, we use undefined, with the default trivial mapping */
+ /* More than 8 channels, if future mappings are added for those */
+
+ /* For other cases, we use undefined, with the default trivial mapping
+ and all mono streams */
GST_WARNING_OBJECT (enc, "Unknown mapping");
enc->channel_mapping_family = 255;
+ enc->n_stereo_streams = 0;
#undef MAPS
}
@@ -489,7 +611,7 @@ gst_opus_enc_set_format (GstAudioEncoder * benc, GstAudioInfo * info)
enc->n_channels = GST_AUDIO_INFO_CHANNELS (info);
enc->sample_rate = GST_AUDIO_INFO_RATE (info);
- gst_opus_enc_setup_channel_mapping (enc, info);
+ gst_opus_enc_setup_channel_mappings (enc, info);
GST_DEBUG_OBJECT (benc, "Setup with %d channels, %d Hz", enc->n_channels,
enc->sample_rate);
@@ -514,17 +636,24 @@ gst_opus_enc_set_format (GstAudioEncoder * benc, GstAudioInfo * info)
static gboolean
gst_opus_enc_setup (GstOpusEnc * enc)
{
- int error = OPUS_OK, n;
- guint8 trivial_mapping[256];
-
- GST_DEBUG_OBJECT (enc, "setup");
-
- for (n = 0; n < 256; ++n)
- trivial_mapping[n] = n;
+ int error = OPUS_OK;
+
+#ifndef GST_DISABLE_DEBUG
+ GST_DEBUG_OBJECT (enc,
+ "setup: %d Hz, %d channels, %d stereo streams, family %d",
+ enc->sample_rate, enc->n_channels, enc->n_stereo_streams,
+ enc->channel_mapping_family);
+ GST_INFO_OBJECT (enc, "Mapping tables built: %d channels, %d stereo streams",
+ enc->n_channels, enc->n_stereo_streams);
+ gst_opus_common_log_channel_mapping_table (GST_ELEMENT (enc), opusenc_debug,
+ "Encoding mapping table", enc->n_channels, enc->encoding_channel_mapping);
+ gst_opus_common_log_channel_mapping_table (GST_ELEMENT (enc), opusenc_debug,
+ "Decoding mapping table", enc->n_channels, enc->decoding_channel_mapping);
+#endif
- enc->state =
- opus_multistream_encoder_create (enc->sample_rate, enc->n_channels,
- enc->n_channels, 0, trivial_mapping,
+ enc->state = opus_multistream_encoder_create (enc->sample_rate,
+ enc->n_channels, enc->n_channels - enc->n_stereo_streams,
+ enc->n_stereo_streams, enc->encoding_channel_mapping,
enc->audio_or_voip ? OPUS_APPLICATION_AUDIO : OPUS_APPLICATION_VOIP,
&error);
if (!enc->state || error != OPUS_OK)
@@ -580,6 +709,75 @@ gst_opus_enc_sink_event (GstAudioEncoder * benc, GstEvent * event)
return FALSE;
}
+static GstCaps *
+gst_opus_enc_sink_getcaps (GstAudioEncoder * benc, GstCaps * filter)
+{
+ GstOpusEnc *enc;
+ GstCaps *caps;
+ GstCaps *peercaps = NULL;
+ GstCaps *intersect = NULL;
+ guint i;
+ gboolean allow_multistream;
+
+ enc = GST_OPUS_ENC (benc);
+
+ GST_DEBUG_OBJECT (enc, "sink getcaps");
+
+ peercaps = gst_pad_peer_query_caps (GST_AUDIO_ENCODER_SRC_PAD (benc), filter);
+ if (!peercaps) {
+ GST_DEBUG_OBJECT (benc, "No peercaps, returning template sink caps");
+ return
+ gst_caps_copy (gst_pad_get_pad_template_caps
+ (GST_AUDIO_ENCODER_SINK_PAD (benc)));
+ }
+
+ intersect = gst_caps_intersect (peercaps,
+ gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (benc)));
+ gst_caps_unref (peercaps);
+
+ if (gst_caps_is_empty (intersect))
+ return intersect;
+
+ allow_multistream = FALSE;
+ for (i = 0; i < gst_caps_get_size (intersect); i++) {
+ GstStructure *s = gst_caps_get_structure (intersect, i);
+ gboolean multistream;
+ if (gst_structure_get_boolean (s, "multistream", &multistream)) {
+ if (multistream) {
+ allow_multistream = TRUE;
+ }
+ } else {
+ allow_multistream = TRUE;
+ }
+ }
+
+ gst_caps_unref (intersect);
+
+ caps =
+ gst_caps_copy (gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SINK_PAD
+ (benc)));
+ if (!allow_multistream) {
+ GValue range = { 0 };
+ g_value_init (&range, GST_TYPE_INT_RANGE);
+ gst_value_set_int_range (&range, 1, 2);
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+ gst_structure_set_value (s, "channels", &range);
+ }
+ g_value_unset (&range);
+ }
+
+ if (filter) {
+ GstCaps *tmp = gst_caps_intersect_full (caps, filter,
+ GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = tmp;
+ }
+
+ GST_DEBUG_OBJECT (enc, "Returning caps: %" GST_PTR_FORMAT, caps);
+ return caps;
+}
+
static GstFlowReturn
gst_opus_enc_encode (GstOpusEnc * enc, GstBuffer * buf)
{
@@ -684,7 +882,8 @@ gst_opus_enc_handle_frame (GstAudioEncoder * benc, GstBuffer * buf)
enc->headers = NULL;
gst_opus_header_create_caps (&caps, &enc->headers, enc->n_channels,
- enc->sample_rate, enc->channel_mapping_family, enc->channel_mapping,
+ enc->n_stereo_streams, enc->sample_rate, enc->channel_mapping_family,
+ enc->decoding_channel_mapping,
gst_tag_setter_get_tag_list (GST_TAG_SETTER (enc)));
diff --git a/ext/opus/gstopusenc.h b/ext/opus/gstopusenc.h
index 8c2c3c6e8..1e39ad03d 100644
--- a/ext/opus/gstopusenc.h
+++ b/ext/opus/gstopusenc.h
@@ -79,7 +79,9 @@ struct _GstOpusEnc {
GstTagList *tags;
guint8 channel_mapping_family;
- guint8 channel_mapping[256];
+ guint8 encoding_channel_mapping[256];
+ guint8 decoding_channel_mapping[256];
+ guint8 n_stereo_streams;
};
struct _GstOpusEncClass {
diff --git a/ext/opus/gstopusheader.c b/ext/opus/gstopusheader.c
index a46c5d6f6..68826a56b 100644
--- a/ext/opus/gstopusheader.c
+++ b/ext/opus/gstopusheader.c
@@ -27,12 +27,17 @@
#include "gstopusheader.h"
static GstBuffer *
-gst_opus_enc_create_id_buffer (gint nchannels, gint sample_rate,
- guint8 channel_mapping_family, const guint8 * channel_mapping)
+gst_opus_enc_create_id_buffer (gint nchannels, gint n_stereo_streams,
+ gint sample_rate, guint8 channel_mapping_family,
+ const guint8 * channel_mapping)
{
GstBuffer *buffer;
GstByteWriter bw;
+ g_return_val_if_fail (nchannels > 0 && nchannels < 256, NULL);
+ g_return_val_if_fail (n_stereo_streams >= 0, NULL);
+ g_return_val_if_fail (n_stereo_streams <= nchannels - n_stereo_streams, NULL);
+
gst_byte_writer_init (&bw);
/* See http://wiki.xiph.org/OggOpus */
@@ -44,8 +49,8 @@ gst_opus_enc_create_id_buffer (gint nchannels, gint sample_rate,
gst_byte_writer_put_uint16_le (&bw, 0); /* output gain */
gst_byte_writer_put_uint8 (&bw, channel_mapping_family);
if (channel_mapping_family > 0) {
- gst_byte_writer_put_uint8 (&bw, nchannels);
- gst_byte_writer_put_uint8 (&bw, 0);
+ gst_byte_writer_put_uint8 (&bw, nchannels - n_stereo_streams);
+ gst_byte_writer_put_uint8 (&bw, n_stereo_streams);
gst_byte_writer_put_data (&bw, channel_mapping, nchannels);
}
@@ -145,11 +150,38 @@ void
gst_opus_header_create_caps_from_headers (GstCaps ** caps, GSList ** headers,
GstBuffer * buf1, GstBuffer * buf2)
{
+ int n_streams, family;
+ gboolean multistream;
+ guint8 *data;
+ gsize size;
+
g_return_if_fail (caps);
g_return_if_fail (headers && !*headers);
+ g_return_if_fail (gst_buffer_get_size (buf1) >= 19);
+
+ data = gst_buffer_map (buf1, &size, NULL, GST_MAP_READ);
+
+ /* work out the number of streams */
+ family = data[18];
+ if (family == 0) {
+ n_streams = 1;
+ } else {
+ /* only included in the header for family > 0 */
+ if (size >= 20)
+ n_streams = data[19];
+ else {
+ g_warning ("family > 0 but header buffer size < 20");
+ gst_buffer_unmap (buf1, data, size);
+ return;
+ }
+ }
+
+ gst_buffer_unmap (buf1, data, size);
/* mark and put on caps */
- *caps = gst_caps_from_string ("audio/x-opus");
+ multistream = n_streams > 1;
+ *caps = gst_caps_new_simple ("audio/x-opus",
+ "multistream", G_TYPE_BOOLEAN, multistream, NULL);
*caps = _gst_caps_set_buffer_array (*caps, "streamheader", buf1, buf2, NULL);
*headers = g_slist_prepend (*headers, buf2);
@@ -158,7 +190,7 @@ gst_opus_header_create_caps_from_headers (GstCaps ** caps, GSList ** headers,
void
gst_opus_header_create_caps (GstCaps ** caps, GSList ** headers, gint nchannels,
- gint sample_rate, guint8 channel_mapping_family,
+ gint n_stereo_streams, gint sample_rate, guint8 channel_mapping_family,
const guint8 * channel_mapping, const GstTagList * tags)
{
GstBuffer *buf1, *buf2;
@@ -175,7 +207,7 @@ gst_opus_header_create_caps (GstCaps ** caps, GSList ** headers, gint nchannels,
/* create header buffers */
buf1 =
- gst_opus_enc_create_id_buffer (nchannels, sample_rate,
+ gst_opus_enc_create_id_buffer (nchannels, n_stereo_streams, sample_rate,
channel_mapping_family, channel_mapping);
buf2 = gst_opus_enc_create_metadata_buffer (tags);
diff --git a/ext/opus/gstopusheader.h b/ext/opus/gstopusheader.h
index 3b2cfc265..c6278eff3 100644
--- a/ext/opus/gstopusheader.h
+++ b/ext/opus/gstopusheader.h
@@ -28,7 +28,7 @@ G_BEGIN_DECLS
extern void gst_opus_header_create_caps_from_headers (GstCaps **caps, GSList **headers,
GstBuffer *id_header, GstBuffer *comment_header);
extern void gst_opus_header_create_caps (GstCaps **caps, GSList **headers,
- gint nchannels, gint sample_rate,
+ gint nchannels, gint n_stereo_streams, gint sample_rate,
guint8 channel_mapping_family, const guint8 *channel_mapping,
const GstTagList *tags);
extern gboolean gst_opus_header_is_header (GstBuffer * buf,
diff --git a/ext/opus/gstopusparse.c b/ext/opus/gstopusparse.c
index fd7024c62..05963ca79 100644
--- a/ext/opus/gstopusparse.c
+++ b/ext/opus/gstopusparse.c
@@ -314,7 +314,7 @@ gst_opus_parse_parse_frame (GstBaseParse * base, GstBaseParseFrame * frame)
channel_mapping_family = 0;
channel_mapping[0] = 0;
channel_mapping[1] = 1;
- gst_opus_header_create_caps (&caps, &parse->headers, channels, 0,
+ gst_opus_header_create_caps (&caps, &parse->headers, channels, 1, 0,
channel_mapping_family, channel_mapping, NULL);
}
diff --git a/ext/opus/gstrtpopusdepay.c b/ext/opus/gstrtpopusdepay.c
new file mode 100644
index 000000000..f8effbb6c
--- /dev/null
+++ b/ext/opus/gstrtpopusdepay.c
@@ -0,0 +1,120 @@
+/*
+ * Opus Depayloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include "gstrtpopusdepay.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpopusdepay_debug);
+#define GST_CAT_DEFAULT (rtpopusdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_opus_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ","
+ "clock-rate = (int) 48000, "
+ "encoding-name = (string) \"X-GST-OPUS-DRAFT-SPITTKA-00\"")
+ );
+
+static GstStaticPadTemplate gst_rtp_opus_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-opus")
+ );
+
+static GstBuffer *gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload,
+ GstBuffer * buf);
+static gboolean gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+
+G_DEFINE_TYPE (GstRTPOpusDepay, gst_rtp_opus_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+
+static void
+gst_rtp_opus_depay_class_init (GstRTPOpusDepayClass * klass)
+{
+ GstRTPBaseDepayloadClass *gstbasertpdepayload_class;
+ GstElementClass *element_class;
+
+ element_class = GST_ELEMENT_CLASS (klass);
+ gstbasertpdepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_opus_depay_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_opus_depay_sink_template));
+ gst_element_class_set_details_simple (element_class,
+ "RTP Opus packet depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Opus audio from RTP packets",
+ "Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>");
+
+ gstbasertpdepayload_class->process = gst_rtp_opus_depay_process;
+ gstbasertpdepayload_class->set_caps = gst_rtp_opus_depay_setcaps;
+
+ GST_DEBUG_CATEGORY_INIT (rtpopusdepay_debug, "rtpopusdepay", 0,
+ "Opus RTP Depayloader");
+}
+
+static void
+gst_rtp_opus_depay_init (GstRTPOpusDepay * rtpopusdepay)
+{
+
+}
+
+static gboolean
+gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ gboolean ret;
+
+ srccaps = gst_caps_new_empty_simple ("audio/x-opus");
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+
+ GST_DEBUG_OBJECT (depayload,
+ "set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret);
+ gst_caps_unref (srccaps);
+
+ depayload->clock_rate = 48000;
+
+ return ret;
+}
+
+static GstBuffer *
+gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
+{
+ GstBuffer *outbuf;
+ GstRTPBuffer rtpbuf = { NULL, };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtpbuf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtpbuf);
+ gst_rtp_buffer_unmap (&rtpbuf);
+
+ return outbuf;
+}
diff --git a/ext/opus/gstrtpopusdepay.h b/ext/opus/gstrtpopusdepay.h
new file mode 100644
index 000000000..968ae52ae
--- /dev/null
+++ b/ext/opus/gstrtpopusdepay.h
@@ -0,0 +1,57 @@
+/*
+ * Opus Depayloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.eu@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_RTP_OPUS_DEPAY_H__
+#define __GST_RTP_OPUS_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS typedef struct _GstRTPOpusDepay GstRTPOpusDepay;
+typedef struct _GstRTPOpusDepayClass GstRTPOpusDepayClass;
+
+#define GST_TYPE_RTP_OPUS_DEPAY \
+ (gst_rtp_opus_depay_get_type())
+#define GST_RTP_OPUS_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_OPUS_DEPAY,GstRTPOpusDepay))
+#define GST_RTP_OPUS_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_OPUS_DEPAY,GstRTPOpusDepayClass))
+#define GST_IS_RTP_OPUS_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_OPUS_DEPAY))
+#define GST_IS_RTP_OPUS_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_OPUS_DEPAY))
+
+
+struct _GstRTPOpusDepay
+{
+ GstRTPBaseDepayload depayload;
+
+};
+
+struct _GstRTPOpusDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_opus_depay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_OPUS_DEPAY_H__ */
diff --git a/ext/opus/gstrtpopuspay.c b/ext/opus/gstrtpopuspay.c
new file mode 100644
index 000000000..5003c739f
--- /dev/null
+++ b/ext/opus/gstrtpopuspay.c
@@ -0,0 +1,137 @@
+/*
+ * Opus Payloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpopuspay.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpopuspay_debug);
+#define GST_CAT_DEFAULT (rtpopuspay_debug)
+
+
+static GstStaticPadTemplate gst_rtp_opus_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-opus, multistream = (boolean) FALSE")
+ );
+
+static GstStaticPadTemplate gst_rtp_opus_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 48000, "
+ "encoding-name = (string) \"X-GST-OPUS-DRAFT-SPITTKA-00\"")
+ );
+
+static gboolean gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+
+G_DEFINE_TYPE (GstRtpOPUSPay, gst_rtp_opus_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+
+static void
+gst_rtp_opus_pay_class_init (GstRtpOPUSPayClass * klass)
+{
+ GstRTPBasePayloadClass *gstbasertppayload_class;
+ GstElementClass *element_class;
+
+ gstbasertppayload_class = (GstRTPBasePayloadClass *) klass;
+ element_class = GST_ELEMENT_CLASS (klass);
+
+ gstbasertppayload_class->set_caps = gst_rtp_opus_pay_setcaps;
+ gstbasertppayload_class->handle_buffer = gst_rtp_opus_pay_handle_buffer;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_opus_pay_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_opus_pay_sink_template));
+
+ gst_element_class_set_details_simple (element_class,
+ "RTP Opus payloader",
+ "Codec/Payloader/Network/RTP",
+ "Puts Opus audio in RTP packets",
+ "Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpopuspay_debug, "rtpopuspay", 0,
+ "Opus RTP Payloader");
+}
+
+static void
+gst_rtp_opus_pay_init (GstRtpOPUSPay * rtpopuspay)
+{
+}
+
+static gboolean
+gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gboolean res;
+ gchar *capsstr;
+
+ capsstr = gst_caps_to_string (caps);
+
+ gst_rtp_base_payload_set_options (payload, "audio", FALSE,
+ "X-GST-OPUS-DRAFT-SPITTKA-00", 48000);
+ res =
+ gst_rtp_base_payload_set_outcaps (payload, "caps", G_TYPE_STRING, capsstr,
+ NULL);
+ g_free (capsstr);
+
+ return res;
+}
+
+static GstFlowReturn
+gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRTPBuffer rtpbuf = { NULL, };
+ GstBuffer *outbuf;
+ gsize size;
+ gpointer *data;
+
+ /* Copy data and timestamp to a new output buffer
+ * FIXME : Don't we have a convenience function for this ? */
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+ outbuf = gst_rtp_buffer_new_copy_data (data, size);
+ GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
+
+ /* Unmap and free input buffer */
+ gst_buffer_unmap (buffer, data, size);
+ gst_buffer_unref (buffer);
+
+ /* Remove marker from RTP buffer */
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtpbuf);
+ gst_rtp_buffer_set_marker (&rtpbuf, FALSE);
+ gst_rtp_buffer_unmap (&rtpbuf);
+
+ /* Push out */
+ return gst_rtp_base_payload_push (basepayload, outbuf);
+}
diff --git a/ext/opus/gstrtpopuspay.h b/ext/opus/gstrtpopuspay.h
new file mode 100644
index 000000000..81160fe2a
--- /dev/null
+++ b/ext/opus/gstrtpopuspay.h
@@ -0,0 +1,58 @@
+/*
+ * Opus Payloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.eu@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_RTP_OPUS_PAY_H__
+#define __GST_RTP_OPUS_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_OPUS_PAY \
+ (gst_rtp_opus_pay_get_type())
+#define GST_RTP_OPUS_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_OPUS_PAY,GstRtpOPUSPay))
+#define GST_RTP_OPUS_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_OPUS_PAY,GstRtpOPUSPayClass))
+#define GST_IS_RTP_OPUS_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_OPUS_PAY))
+#define GST_IS_RTP_OPUS_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_OPUS_PAY))
+
+typedef struct _GstRtpOPUSPay GstRtpOPUSPay;
+typedef struct _GstRtpOPUSPayClass GstRtpOPUSPayClass;
+
+struct _GstRtpOPUSPay
+{
+ GstRTPBasePayload payload;
+};
+
+struct _GstRtpOPUSPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_opus_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_OPUS_PAY_H__ */
diff --git a/ext/resindvd/rsndec.c b/ext/resindvd/rsndec.c
index 7db1e46eb..3abc0065c 100644
--- a/ext/resindvd/rsndec.c
+++ b/ext/resindvd/rsndec.c
@@ -277,8 +277,7 @@ rsn_dec_change_state (GstElement * element, GstStateChange transition)
new_child = gst_element_factory_make ("autoconvert", NULL);
decoder_factories = klass->get_decoder_factories (klass);
- g_object_set (G_OBJECT (new_child), "initial-identity", TRUE,
- "factories", decoder_factories, NULL);
+ g_object_set (G_OBJECT (new_child), "factories", decoder_factories, NULL);
if (new_child == NULL || !rsn_dec_set_child (self, new_child))
ret = GST_STATE_CHANGE_FAILURE;
break;
diff --git a/ext/voaacenc/gstvoaacenc.c b/ext/voaacenc/gstvoaacenc.c
index c9fa92180..a0bd9dae5 100644
--- a/ext/voaacenc/gstvoaacenc.c
+++ b/ext/voaacenc/gstvoaacenc.c
@@ -454,9 +454,6 @@ gst_voaacenc_handle_frame (GstAudioEncoder * benc, GstBuffer * buf)
g_return_val_if_fail (voaacenc->handle, GST_FLOW_NOT_NEGOTIATED);
- if (voaacenc->rate == 0 || voaacenc->channels == 0)
- goto not_negotiated;
-
/* we don't deal with squeezing remnants, so simply discard those */
if (G_UNLIKELY (buf == NULL)) {
GST_DEBUG_OBJECT (benc, "no data");
@@ -508,13 +505,6 @@ exit:
return ret;
/* ERRORS */
-not_negotiated:
- {
- GST_ELEMENT_ERROR (voaacenc, STREAM, TYPE_NOT_FOUND,
- (NULL), ("unknown type"));
- ret = GST_FLOW_NOT_NEGOTIATED;
- goto exit;
- }
encode_failed:
{
GST_ELEMENT_ERROR (voaacenc, STREAM, ENCODE, (NULL), ("encode failed"));
diff --git a/ext/voamrwbenc/gstvoamrwbenc.c b/ext/voamrwbenc/gstvoamrwbenc.c
index 4ecc5768c..4647b84ab 100644
--- a/ext/voamrwbenc/gstvoamrwbenc.c
+++ b/ext/voamrwbenc/gstvoamrwbenc.c
@@ -281,11 +281,6 @@ gst_voamrwbenc_handle_frame (GstAudioEncoder * benc, GstBuffer * buffer)
g_return_val_if_fail (amrwbenc->handle, GST_FLOW_NOT_NEGOTIATED);
- if (amrwbenc->rate == 0 || amrwbenc->channels == 0) {
- ret = GST_FLOW_NOT_NEGOTIATED;
- goto done;
- }
-
/* we don't deal with squeezing remnants, so simply discard those */
if (G_UNLIKELY (buffer == NULL)) {
GST_DEBUG_OBJECT (amrwbenc, "no data");
diff --git a/ext/xvid/gstxvidenc.c b/ext/xvid/gstxvidenc.c
index dc60d0d8c..57e9a3317 100644
--- a/ext/xvid/gstxvidenc.c
+++ b/ext/xvid/gstxvidenc.c
@@ -51,14 +51,37 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("video/x-xvid, "
+ GST_STATIC_CAPS ("video/mpeg, "
+ "mpegversion = (int) 4, "
+ "systemstream = (boolean) FALSE, "
+ "width = (int) [ 0, MAX ], "
+ "height = (int) [ 0, MAX ], "
+ "framerate = (fraction) [ 0/1, MAX ], "
+ "profile = (string) simple, "
+ "level = (string) { 0, 1, 2, 3, 4a, 5, 6 };"
+ "video/mpeg, "
+ "mpegversion = (int) 4, "
+ "systemstream = (boolean) FALSE, "
+ "width = (int) [ 0, MAX ], "
+ "height = (int) [ 0, MAX ], "
+ "framerate = (fraction) [ 0/1, MAX ], "
+ "profile = (string) advanced-real-time-simple, "
+ "level = (string) { 1, 2, 3, 4 };"
+ "video/mpeg, "
+ "mpegversion = (int) 4, "
+ "systemstream = (boolean) FALSE, "
"width = (int) [ 0, MAX ], "
- "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ]; "
+ "height = (int) [ 0, MAX ], "
+ "framerate = (fraction) [ 0/1, MAX ], "
+ "profile = (string) advanced-simple, "
+ "level = (string) { 0, 1, 2, 3, 4 };"
"video/mpeg, "
"mpegversion = (int) 4, "
"systemstream = (boolean) FALSE, "
+ "width = (int) [ 0, MAX ], " "height = (int) [ 0, MAX ]; "
+ "video/x-xvid, "
"width = (int) [ 0, MAX ], "
- "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ]")
+ "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ];")
);
@@ -106,6 +129,9 @@ gst_xvidenc_profile_get_type (void)
{XVID_PROFILE_S_L1, "S_L1", "Simple profile, L1"},
{XVID_PROFILE_S_L2, "S_L2", "Simple profile, L2"},
{XVID_PROFILE_S_L3, "S_L3", "Simple profile, L3"},
+ {XVID_PROFILE_S_L4a, "S_L4a", "Simple profile, L4a"},
+ {XVID_PROFILE_S_L5, "S_L5", "Simple profile, L5"},
+ {XVID_PROFILE_S_L6, "S_L6", "Simple profile, L6"},
{XVID_PROFILE_ARTS_L1, "ARTS_L1",
"Advanced real-time simple profile, L1"},
{XVID_PROFILE_ARTS_L2, "ARTS_L2",
@@ -578,11 +604,97 @@ gst_xvidenc_setup (GstXvidEnc * xvidenc)
xvid_enc_create_t xenc;
xvid_enc_plugin_t xplugin[2];
gint ret;
+ GstCaps *allowed_caps;
+ gint profile = -1;
+
+ /* Negotiate profile/level with downstream */
+ allowed_caps = gst_pad_get_allowed_caps (xvidenc->srcpad);
+ if (allowed_caps && !gst_caps_is_empty (allowed_caps)) {
+ const gchar *profile_str, *level_str;
+
+ allowed_caps = gst_caps_make_writable (allowed_caps);
+ gst_caps_truncate (allowed_caps);
+
+ profile_str =
+ gst_structure_get_string (gst_caps_get_structure (allowed_caps, 0),
+ "profile");
+ level_str =
+ gst_structure_get_string (gst_caps_get_structure (allowed_caps, 0),
+ "level");
+ if (profile_str) {
+ if (g_str_equal (profile_str, "simple")) {
+ if (!level_str) {
+ profile = XVID_PROFILE_S_L0;
+ } else if (g_str_equal (level_str, "0")) {
+ profile = XVID_PROFILE_S_L0;
+ } else if (g_str_equal (level_str, "1")) {
+ profile = XVID_PROFILE_S_L1;
+ } else if (g_str_equal (level_str, "2")) {
+ profile = XVID_PROFILE_S_L2;
+ } else if (g_str_equal (level_str, "3")) {
+ profile = XVID_PROFILE_S_L3;
+ } else if (g_str_equal (level_str, "4a")) {
+ profile = XVID_PROFILE_S_L4a;
+ } else if (g_str_equal (level_str, "5")) {
+ profile = XVID_PROFILE_S_L5;
+ } else if (g_str_equal (level_str, "6")) {
+ profile = XVID_PROFILE_S_L6;
+ } else {
+ GST_ERROR_OBJECT (xvidenc,
+ "Invalid profile/level combination (%s %s)", profile_str,
+ level_str);
+ }
+ } else if (g_str_equal (profile_str, "advanced-real-time-simple")) {
+ if (!level_str) {
+ profile = XVID_PROFILE_ARTS_L1;
+ } else if (g_str_equal (level_str, "1")) {
+ profile = XVID_PROFILE_ARTS_L1;
+ } else if (g_str_equal (level_str, "2")) {
+ profile = XVID_PROFILE_ARTS_L2;
+ } else if (g_str_equal (level_str, "3")) {
+ profile = XVID_PROFILE_ARTS_L3;
+ } else if (g_str_equal (level_str, "4")) {
+ profile = XVID_PROFILE_ARTS_L4;
+ } else {
+ GST_ERROR_OBJECT (xvidenc,
+ "Invalid profile/level combination (%s %s)", profile_str,
+ level_str);
+ }
+ } else if (g_str_equal (profile_str, "advanced-simple")) {
+ if (!level_str) {
+ profile = XVID_PROFILE_AS_L0;
+ } else if (g_str_equal (level_str, "0")) {
+ profile = XVID_PROFILE_AS_L0;
+ } else if (g_str_equal (level_str, "1")) {
+ profile = XVID_PROFILE_AS_L1;
+ } else if (g_str_equal (level_str, "2")) {
+ profile = XVID_PROFILE_AS_L2;
+ } else if (g_str_equal (level_str, "3")) {
+ profile = XVID_PROFILE_AS_L3;
+ } else if (g_str_equal (level_str, "4")) {
+ profile = XVID_PROFILE_AS_L4;
+ } else {
+ GST_ERROR_OBJECT (xvidenc,
+ "Invalid profile/level combination (%s %s)", profile_str,
+ level_str);
+ }
+ } else {
+ GST_ERROR_OBJECT (xvidenc, "Invalid profile (%s)", profile_str);
+ }
+ }
+ }
+ if (allowed_caps)
+ gst_caps_unref (allowed_caps);
+
+ if (profile != -1) {
+ xvidenc->profile = profile;
+ g_object_notify (G_OBJECT (xvidenc), "profile");
+ }
/* see xvid.h for the meaning of all this. */
gst_xvid_init_struct (xenc);
- xenc.profile = xvidenc->profile;
+ xenc.profile = xvidenc->used_profile = xvidenc->profile;
xenc.width = xvidenc->width;
xenc.height = xvidenc->height;
xenc.max_bframes = xvidenc->max_bframes;
@@ -783,6 +895,78 @@ gst_xvidenc_setcaps (GstPad * pad, GstCaps * vscaps)
/* just to be sure */
gst_pad_fixate_caps (xvidenc->srcpad, new_caps);
+ if (xvidenc->used_profile != 0) {
+ switch (xvidenc->used_profile) {
+ case XVID_PROFILE_S_L0:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "0", NULL);
+ break;
+ case XVID_PROFILE_S_L1:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "1", NULL);
+ break;
+ case XVID_PROFILE_S_L2:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "2", NULL);
+ break;
+ case XVID_PROFILE_S_L3:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "3", NULL);
+ break;
+ case XVID_PROFILE_S_L4a:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "4a", NULL);
+ break;
+ case XVID_PROFILE_S_L5:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "5", NULL);
+ break;
+ case XVID_PROFILE_S_L6:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "6", NULL);
+ break;
+ case XVID_PROFILE_ARTS_L1:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-real-time-simple", "level", G_TYPE_STRING, "1", NULL);
+ break;
+ case XVID_PROFILE_ARTS_L2:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-real-time-simple", "level", G_TYPE_STRING, "2", NULL);
+ break;
+ case XVID_PROFILE_ARTS_L3:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-real-time-simple", "level", G_TYPE_STRING, "3", NULL);
+ break;
+ case XVID_PROFILE_ARTS_L4:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-real-time-simple", "level", G_TYPE_STRING, "4", NULL);
+ break;
+ case XVID_PROFILE_AS_L0:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-simple", "level", G_TYPE_STRING, "0", NULL);
+ break;
+ case XVID_PROFILE_AS_L1:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-simple", "level", G_TYPE_STRING, "1", NULL);
+ break;
+ case XVID_PROFILE_AS_L2:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-simple", "level", G_TYPE_STRING, "2", NULL);
+ break;
+ case XVID_PROFILE_AS_L3:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-simple", "level", G_TYPE_STRING, "3", NULL);
+ break;
+ case XVID_PROFILE_AS_L4:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-simple", "level", G_TYPE_STRING, "4", NULL);
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ }
+
/* src pad should accept anyway */
ret = gst_pad_set_caps (xvidenc->srcpad, new_caps);
gst_caps_unref (new_caps);
diff --git a/ext/xvid/gstxvidenc.h b/ext/xvid/gstxvidenc.h
index a2f32d0a8..121c99d41 100644
--- a/ext/xvid/gstxvidenc.h
+++ b/ext/xvid/gstxvidenc.h
@@ -64,6 +64,7 @@ struct _GstXvidEnc {
/* encoding profile */
gint profile;
+ gint used_profile;
/* quantizer type; h263, MPEG */
gint quant_type;
diff --git a/gst-libs/gst/basecamerabinsrc/Makefile.am b/gst-libs/gst/basecamerabinsrc/Makefile.am
index 733e130dd..ff8c78264 100644
--- a/gst-libs/gst/basecamerabinsrc/Makefile.am
+++ b/gst-libs/gst/basecamerabinsrc/Makefile.am
@@ -20,8 +20,6 @@ libgstbasecamerabinsrc_@GST_MAJORMINOR@_la_CFLAGS = \
-DGST_USE_UNSTABLE_API \
$(GST_CFLAGS)
libgstbasecamerabinsrc_@GST_MAJORMINOR@_la_LIBADD = \
- $(top_builddir)/gst-libs/gst/interfaces/libgstphotography-@GST_MAJORMINOR@.la \
- $(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_MAJORMINOR) -lgstinterfaces-$(GST_MAJORMINOR) \
-lgstapp-$(GST_MAJORMINOR) $(GST_BASE_LIBS) $(GST_LIBS)
libgstbasecamerabinsrc_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_ALL_LDFLAGS) $(GST_LT_LDFLAGS)
@@ -35,7 +33,6 @@ Android.mk: Makefile.am
-:LDFLAGS $(libgstbasecamerabinsrc_@GST_MAJORMINOR@_la_LDFLAGS) \
$(libgstbasecamerabinsrc_@GST_MAJORMINOR@_la_LIBADD) \
-ldl \
- -:LIBFILTER_STATIC gstphotography-@GST_MAJORMINOR@ \
-:HEADER_TARGET gstreamer-@GST_MAJORMINOR@/gst/basecamerabinsrc \
-:HEADERS $(libgstbasecamerabinsrcinclude_HEADERS) \
-:PASSTHROUGH LOCAL_ARM_MODE:=arm \
diff --git a/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.c b/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.c
index 9e26d5414..98213c9ba 100644
--- a/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.c
+++ b/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.c
@@ -112,57 +112,6 @@ GST_STATIC_PAD_TEMPLATE (GST_BASE_CAMERA_SRC_VIDEO_PAD_NAME,
*/
/**
- * gst_base_camera_src_get_photography:
- * @self: the camerasrc bin
- *
- * Get object implementing photography interface, if there is one. Otherwise
- * returns NULL.
- */
-GstPhotography *
-gst_base_camera_src_get_photography (GstBaseCameraSrc * self)
-{
- GstElement *elem;
-
- if (GST_IS_PHOTOGRAPHY (self)) {
- elem = GST_ELEMENT (self);
- } else {
- elem = gst_bin_get_by_interface (GST_BIN (self), GST_TYPE_PHOTOGRAPHY);
- }
-
- if (elem) {
- return GST_PHOTOGRAPHY (elem);
- }
-
- return NULL;
-}
-
-
-/**
- * gst_base_camera_src_get_colorbalance:
- * @self: the camerasrc bin
- *
- * Get object implementing colorbalance interface, if there is one. Otherwise
- * returns NULL.
- */
-GstColorBalance *
-gst_base_camera_src_get_color_balance (GstBaseCameraSrc * self)
-{
- GstElement *elem;
-
- if (GST_IS_COLOR_BALANCE (self)) {
- elem = GST_ELEMENT (self);
- } else {
- elem = gst_bin_get_by_interface (GST_BIN (self), GST_TYPE_COLOR_BALANCE);
- }
-
- if (elem) {
- return GST_COLOR_BALANCE (self);
- }
-
- return NULL;
-}
-
-/**
* gst_base_camera_src_set_mode:
* @self: the camerasrc bin
* @mode: the mode
diff --git a/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.h b/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.h
index ab9b47773..9d63e2d3c 100644
--- a/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.h
+++ b/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.h
@@ -1,6 +1,7 @@
/*
* GStreamer
* Copyright (C) 2010 Texas Instruments, Inc
+ * Copyright (C) 2011 Thiago Santos <thiago.sousa.santos@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -29,8 +30,6 @@
#include <gst/gst.h>
#include <gst/gstbin.h>
-#include <gst/interfaces/photography.h>
-#include <gst/video/colorbalance.h>
#include "gstcamerabin-enum.h"
#include "gstcamerabinpreview.h"
@@ -131,9 +130,6 @@ struct _GstBaseCameraSrcClass
#define MAX_ZOOM 10.0f
#define ZOOM_1X MIN_ZOOM
-GstPhotography * gst_base_camera_src_get_photography (GstBaseCameraSrc *self);
-GstColorBalance * gst_base_camera_src_get_color_balance (GstBaseCameraSrc *self);
-
gboolean gst_base_camera_src_set_mode (GstBaseCameraSrc *self, GstCameraBinMode mode);
void gst_base_camera_src_setup_zoom (GstBaseCameraSrc * self);
void gst_base_camera_src_setup_preview (GstBaseCameraSrc * self, GstCaps * preview_caps);
diff --git a/gst-libs/gst/codecparsers/Makefile.am b/gst-libs/gst/codecparsers/Makefile.am
index c3dfe9307..2593f85c5 100644
--- a/gst-libs/gst/codecparsers/Makefile.am
+++ b/gst-libs/gst/codecparsers/Makefile.am
@@ -1,13 +1,16 @@
lib_LTLIBRARIES = libgstcodecparsers-@GST_MAJORMINOR@.la
libgstcodecparsers_@GST_MAJORMINOR@_la_SOURCES = \
- gstmpegvideoparser.c gsth264parser.c gstvc1parser.c
+ gstmpegvideoparser.c gsth264parser.c gstvc1parser.c gstmpeg4parser.c \
+ parserutils.c
libgstcodecparsers_@GST_MAJORMINOR@includedir = \
$(includedir)/gstreamer-@GST_MAJORMINOR@/gst/codecparsers
+noinst_HEADERS = parserutils.h
+
libgstcodecparsers_@GST_MAJORMINOR@include_HEADERS = \
- gstmpegvideoparser.h gsth264parser.h gstvc1parser.h
+ gstmpegvideoparser.h gsth264parser.h gstvc1parser.h gstmpeg4parser.h
libgstcodecparsers_@GST_MAJORMINOR@_la_CFLAGS = \
$(GST_PLUGINS_BAD_CFLAGS) \
diff --git a/gst-libs/gst/codecparsers/gsth264parser.c b/gst-libs/gst/codecparsers/gsth264parser.c
index 1822b14ca..6a164ecb1 100644
--- a/gst-libs/gst/codecparsers/gsth264parser.c
+++ b/gst-libs/gst/codecparsers/gsth264parser.c
@@ -94,31 +94,31 @@ GST_DEBUG_CATEGORY (h264_parser_debug);
#define GST_CAT_DEFAULT h264_parser_debug
/**** Default scaling_lists according to Table 7-2 *****/
-const guint8 default_4x4_intra[16] = {
+static const guint8 default_4x4_intra[16] = {
6, 13, 13, 20, 20, 20, 28, 28, 28, 28, 32, 32,
32, 37, 37, 42
};
-const guint8 default_4x4_inter[16] = {
+static const guint8 default_4x4_inter[16] = {
10, 14, 14, 20, 20, 20, 24, 24, 24, 24, 27, 27,
27, 30, 30, 34
};
-const guint8 default_8x8_intra[64] = {
+static const guint8 default_8x8_intra[64] = {
6, 10, 10, 13, 11, 13, 16, 16, 16, 16, 18, 18,
18, 18, 18, 23, 23, 23, 23, 23, 23, 25, 25, 25, 25, 25, 25, 25, 27, 27, 27,
27, 27, 27, 27, 27, 29, 29, 29, 29, 29, 29, 29, 31, 31, 31, 31, 31, 31, 33,
33, 33, 33, 33, 36, 36, 36, 36, 38, 38, 38, 40, 40, 42
};
-const guint8 default_8x8_inter[64] = {
+static const guint8 default_8x8_inter[64] = {
9, 13, 13, 15, 13, 15, 17, 17, 17, 17, 19, 19,
19, 19, 19, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 24, 24, 24,
24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 27, 27, 27, 27, 27, 27, 28,
28, 28, 28, 28, 30, 30, 30, 30, 32, 32, 32, 33, 33, 35
};
-const guint8 zigzag_8x8[64] = {
+static const guint8 zigzag_8x8[64] = {
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34,
@@ -129,7 +129,7 @@ const guint8 zigzag_8x8[64] = {
53, 60, 61, 54, 47, 55, 62, 63
};
-const guint8 zigzag_4x4[16] = {
+static const guint8 zigzag_4x4[16] = {
0, 1, 4, 8,
5, 2, 3, 6,
9, 12, 13, 10,
@@ -743,59 +743,60 @@ error:
}
static gboolean
-slice_parse_ref_pic_list_reordering (GstH264SliceHdr * slice, NalReader * nr)
+slice_parse_ref_pic_list_modification_1 (GstH264SliceHdr * slice,
+ NalReader * nr, guint list)
{
- GST_DEBUG ("parsing \"Reference picture list reordering\"");
+ GstH264RefPicListModification *entries;
+ guint8 *ref_pic_list_modification_flag;
+ guint32 modification_of_pic_nums_idc;
+ guint i = 0;
- if (!GST_H264_IS_I_SLICE (slice) && !GST_H264_IS_SI_SLICE (slice)) {
- guint8 ref_pic_list_reordering_flag_l0;
- guint32 reordering_of_pic_nums_idc;
-
- READ_UINT8 (nr, ref_pic_list_reordering_flag_l0, 1);
- if (ref_pic_list_reordering_flag_l0)
- do {
- READ_UE (nr, reordering_of_pic_nums_idc);
- if (reordering_of_pic_nums_idc == 0 || reordering_of_pic_nums_idc == 1) {
- guint32 abs_diff_pic_num_minus1 G_GNUC_UNUSED;
-
- READ_UE_ALLOWED (nr, abs_diff_pic_num_minus1, 0,
- slice->max_pic_num - 1);
- } else if (reordering_of_pic_nums_idc == 2) {
- guint32 long_term_pic_num;
-
- READ_UE (nr, long_term_pic_num);
- }
- } while (reordering_of_pic_nums_idc != 3);
+ if (list == 0) {
+ entries = slice->ref_pic_list_modification_l0;
+ ref_pic_list_modification_flag = &slice->ref_pic_list_modification_flag_l0;
+ } else {
+ entries = slice->ref_pic_list_modification_l1;
+ ref_pic_list_modification_flag = &slice->ref_pic_list_modification_flag_l1;
}
- if (GST_H264_IS_B_SLICE (slice)) {
- guint8 ref_pic_list_reordering_flag_l1;
- guint32 reordering_of_pic_nums_idc;
-
- READ_UINT8 (nr, ref_pic_list_reordering_flag_l1, 1);
- if (ref_pic_list_reordering_flag_l1)
- do {
- READ_UE (nr, reordering_of_pic_nums_idc);
- if (reordering_of_pic_nums_idc == 0 || reordering_of_pic_nums_idc == 1) {
- guint32 abs_diff_num_minus1;
-
- READ_UE (nr, abs_diff_num_minus1);
- } else if (reordering_of_pic_nums_idc == 2) {
- guint32 long_term_pic_num;
-
- READ_UE (nr, long_term_pic_num);
- }
- } while (reordering_of_pic_nums_idc != 3);
+ READ_UINT8 (nr, *ref_pic_list_modification_flag, 1);
+ if (*ref_pic_list_modification_flag) {
+ do {
+ READ_UE (nr, modification_of_pic_nums_idc);
+ if (modification_of_pic_nums_idc == 0 ||
+ modification_of_pic_nums_idc == 1) {
+ READ_UE_ALLOWED (nr, entries[i].value.abs_diff_pic_num_minus1, 0,
+ slice->max_pic_num - 1);
+ } else if (modification_of_pic_nums_idc == 2) {
+ READ_UE (nr, entries[i].value.long_term_pic_num);
+ }
+ } while (modification_of_pic_nums_idc != 3);
}
return TRUE;
error:
- GST_WARNING ("error parsing \"Reference picture list reordering\"");
+ GST_WARNING ("error parsing \"Reference picture list %u modification\"",
+ list);
return FALSE;
}
static gboolean
+slice_parse_ref_pic_list_modification (GstH264SliceHdr * slice, NalReader * nr)
+{
+ if (!GST_H264_IS_I_SLICE (slice) && !GST_H264_IS_SI_SLICE (slice)) {
+ if (!slice_parse_ref_pic_list_modification_1 (slice, nr, 0))
+ return FALSE;
+ }
+
+ if (GST_H264_IS_B_SLICE (slice)) {
+ if (!slice_parse_ref_pic_list_modification_1 (slice, nr, 1))
+ return FALSE;
+ }
+ return TRUE;
+}
+
+static gboolean
gst_h264_slice_parse_dec_ref_pic_marking (GstH264SliceHdr * slice,
GstH264NalUnit * nalu, NalReader * nr)
{
@@ -1090,6 +1091,7 @@ gst_h264_parser_parse_pic_timing (GstH264NalParser * nalparser,
guint8 num_clock_num_ts;
guint i;
+ tim->pic_struct_present_flag = TRUE;
READ_UINT8 (nr, tim->pic_struct, 4);
CHECK_ALLOWED ((gint8) tim->pic_struct, 0, 8);
@@ -1149,22 +1151,28 @@ gst_h264_nal_parser_free (GstH264NalParser * nalparser)
}
/**
- * gst_h264_parser_identify_nalu:
+ * gst_h264_parser_identify_nalu_unchecked:
* @nalparser: a #GstH264NalParser
* @data: The data to parse
* @offset: the offset from which to parse @data
* @size: the size of @data
* @nalu: The #GstH264NalUnit where to store parsed nal headers
*
- * Parses @data and fills @nalu from the next nalu data from @data
+ * Parses @data and fills @nalu from the next nalu data from @data.
+ *
+ * This differs from @gst_h264_parser_identify_nalu in that it doesn't
+ * check whether the packet is complete or not.
+ *
+ * Note: Only use this function if you already know the provided @data
+ * is a complete NALU, else use @gst_h264_parser_identify_nalu.
*
* Returns: a #GstH264ParserResult
*/
GstH264ParserResult
-gst_h264_parser_identify_nalu (GstH264NalParser * nalparser,
+gst_h264_parser_identify_nalu_unchecked (GstH264NalParser * nalparser,
const guint8 * data, guint offset, gsize size, GstH264NalUnit * nalu)
{
- gint off1, off2;
+ gint off1;
if (size < offset + 4) {
GST_DEBUG ("Can't parse, buffer has too small size %" G_GSIZE_FORMAT
@@ -1187,12 +1195,14 @@ gst_h264_parser_identify_nalu (GstH264NalParser * nalparser,
nalu->valid = TRUE;
nalu->sc_offset = offset + off1;
+
/* sc might have 2 or 3 0-bytes */
if (nalu->sc_offset > 0 && data[nalu->sc_offset - 1] == 00)
nalu->sc_offset--;
nalu->offset = offset + off1 + 3;
nalu->data = (guint8 *) data;
+
set_nalu_datas (nalu);
if (nalu->type == GST_H264_NAL_SEQ_END ||
@@ -1202,6 +1212,37 @@ gst_h264_parser_identify_nalu (GstH264NalParser * nalparser,
return GST_H264_PARSER_OK;
}
+ nalu->size = size - nalu->offset;
+
+ return GST_H264_PARSER_OK;
+}
+
+/**
+ * gst_h264_parser_identify_nalu:
+ * @nalparser: a #GstH264NalParser
+ * @data: The data to parse
+ * @offset: the offset from which to parse @data
+ * @size: the size of @data
+ * @nalu: The #GstH264NalUnit where to store parsed nal headers
+ *
+ * Parses @data and fills @nalu from the next nalu data from @data
+ *
+ * Returns: a #GstH264ParserResult
+ */
+GstH264ParserResult
+gst_h264_parser_identify_nalu (GstH264NalParser * nalparser,
+ const guint8 * data, guint offset, gsize size, GstH264NalUnit * nalu)
+{
+ GstH264ParserResult res;
+ gint off2;
+
+ res =
+ gst_h264_parser_identify_nalu_unchecked (nalparser, data, offset, size,
+ nalu);
+
+ if (res != GST_H264_PARSER_OK || nalu->size == 0)
+ goto beach;
+
off2 = scan_for_start_codes (data + nalu->offset, size - nalu->offset);
if (off2 < 0) {
GST_DEBUG ("Nal start %d, No end found", nalu->offset);
@@ -1217,9 +1258,12 @@ gst_h264_parser_identify_nalu (GstH264NalParser * nalparser,
return GST_H264_PARSER_BROKEN_DATA;
GST_DEBUG ("Complete nal found. Off: %d, Size: %d", nalu->offset, nalu->size);
- return GST_H264_PARSER_OK;
+
+beach:
+ return res;
}
+
/**
* gst_h264_parser_identify_nalu_avc:
* @nalparser: a #GstH264NalParser
@@ -1781,7 +1825,7 @@ gst_h264_parser_parse_slice_hdr (GstH264NalParser * nalparser,
}
}
- if (!slice_parse_ref_pic_list_reordering (slice, &nr))
+ if (!slice_parse_ref_pic_list_modification (slice, &nr))
goto error;
if ((pps->weighted_pred_flag && (GST_H264_IS_P_SLICE (slice)
@@ -1864,6 +1908,9 @@ gst_h264_parser_parse_sei (GstH264NalParser * nalparser, GstH264NalUnit * nalu,
nal_reader_init (&nr, nalu->data + nalu->offset + 1, nalu->size - 1);
+ /* init */
+ memset (sei, 0, sizeof (*sei));
+
sei->payloadType = 0;
do {
READ_UINT8 (&nr, payload_type_byte, 8);
diff --git a/gst-libs/gst/codecparsers/gsth264parser.h b/gst-libs/gst/codecparsers/gsth264parser.h
index 2dfe2d7b6..d58f1b07d 100644
--- a/gst-libs/gst/codecparsers/gsth264parser.h
+++ b/gst-libs/gst/codecparsers/gsth264parser.h
@@ -168,24 +168,25 @@ typedef enum
GST_H264_S_SI_SLICE = 9
} GstH264SliceType;
-typedef struct _GstH264NalParser GstH264NalParser;
+typedef struct _GstH264NalParser GstH264NalParser;
-typedef struct _GstH264NalUnit GstH264NalUnit;
+typedef struct _GstH264NalUnit GstH264NalUnit;
-typedef struct _GstH264SPS GstH264SPS;
-typedef struct _GstH264PPS GstH264PPS;
-typedef struct _GstH264HRDParams GstH264HRDParams;
-typedef struct _GstH264VUIParams GstH264VUIParams;
+typedef struct _GstH264SPS GstH264SPS;
+typedef struct _GstH264PPS GstH264PPS;
+typedef struct _GstH264HRDParams GstH264HRDParams;
+typedef struct _GstH264VUIParams GstH264VUIParams;
-typedef struct _GstH264DecRefPicMarking GstH264DecRefPicMarking;
-typedef struct _GstH264RefPicMarking GstH264RefPicMarking;
-typedef struct _GstH264PredWeightTable GstH264PredWeightTable;
-typedef struct _GstH264SliceHdr GstH264SliceHdr;
+typedef struct _GstH264RefPicListModification GstH264RefPicListModification;
+typedef struct _GstH264DecRefPicMarking GstH264DecRefPicMarking;
+typedef struct _GstH264RefPicMarking GstH264RefPicMarking;
+typedef struct _GstH264PredWeightTable GstH264PredWeightTable;
+typedef struct _GstH264SliceHdr GstH264SliceHdr;
-typedef struct _GstH264ClockTimestamp GstH264ClockTimestamp;
-typedef struct _GstH264PicTiming GstH264PicTiming;
-typedef struct _GstH264BufferingPeriod GstH264BufferingPeriod;
-typedef struct _GstH264SEIMessage GstH264SEIMessage;
+typedef struct _GstH264ClockTimestamp GstH264ClockTimestamp;
+typedef struct _GstH264PicTiming GstH264PicTiming;
+typedef struct _GstH264BufferingPeriod GstH264BufferingPeriod;
+typedef struct _GstH264SEIMessage GstH264SEIMessage;
/**
* GstH264NalUnit:
@@ -485,6 +486,18 @@ struct _GstH264PPS
gboolean valid;
};
+struct _GstH264RefPicListModification
+{
+ guint8 modification_of_pic_nums_idc;
+ union
+ {
+ /* if modification_of_pic_nums_idc == 0 || 1 */
+ guint32 abs_diff_pic_num_minus1;
+ /* if modification_of_pic_nums_idc == 2 */
+ guint32 long_term_pic_num;
+ } value;
+};
+
struct _GstH264PredWeightTable
{
guint8 luma_log2_weight_denom;
@@ -559,6 +572,11 @@ struct _GstH264SliceHdr
guint8 num_ref_idx_l0_active_minus1;
guint8 num_ref_idx_l1_active_minus1;
+ guint8 ref_pic_list_modification_flag_l0;
+ GstH264RefPicListModification ref_pic_list_modification_l0[32];
+ guint8 ref_pic_list_modification_flag_l1;
+ GstH264RefPicListModification ref_pic_list_modification_l1[32];
+
GstH264PredWeightTable pred_weight_table;
/* if nal_unit.ref_idc != 0 */
GstH264DecRefPicMarking dec_ref_pic_marking;
@@ -660,6 +678,10 @@ GstH264ParserResult gst_h264_parser_identify_nalu (GstH264NalParser *nalpars
const guint8 *data, guint offset,
gsize size, GstH264NalUnit *nalu);
+GstH264ParserResult gst_h264_parser_identify_nalu_unchecked (GstH264NalParser *nalparser,
+ const guint8 *data, guint offset,
+ gsize size, GstH264NalUnit *nalu);
+
GstH264ParserResult gst_h264_parser_identify_nalu_avc (GstH264NalParser *nalparser, const guint8 *data,
guint offset, gsize size, guint8 nal_length_size,
GstH264NalUnit *nalu);
diff --git a/gst-libs/gst/codecparsers/gstmpeg4parser.c b/gst-libs/gst/codecparsers/gstmpeg4parser.c
new file mode 100644
index 000000000..302219fc1
--- /dev/null
+++ b/gst-libs/gst/codecparsers/gstmpeg4parser.c
@@ -0,0 +1,1735 @@
+/* Gstreamer
+ * Copyright (C) <2011> Intel
+ * Copyright (C) <2011> Collabora Ltd.
+ * Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+/**
+ * SECTION:gstmpeg4parser
+ * @short_description: Convenience library for parsing mpeg4 part 2 video
+ * bitstream.
+ *
+ * For more details about the structures, you can refer to the
+ * specifications: ISO-IEC-14496-2_2004_MPEG4_VISUAL.pdf
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <gst/base/gstbytereader.h>
+
+
+#include "gstmpeg4parser.h"
+#include "parserutils.h"
+
+#ifndef GST_DISABLE_GST_DEBUG
+
+#define GST_CAT_DEFAULT ensure_debug_category()
+
+static GstDebugCategory *
+ensure_debug_category (void)
+{
+ static gsize cat_gonce = 0;
+
+ if (g_once_init_enter (&cat_gonce)) {
+ gsize cat_done;
+
+ cat_done = (gsize) _gst_debug_category_new ("codecparsers_mpeg4", 0,
+ "GstMpeg4 codec parsing library");
+
+ g_once_init_leave (&cat_gonce, cat_done);
+ }
+
+ return (GstDebugCategory *) cat_gonce;
+}
+
+#else
+
+#define ensure_debug_category() /* NOOP */
+
+#endif /* GST_DISABLE_GST_DEBUG */
+
+#define CHECK_MARKER(br) G_STMT_START { \
+ guint8 marker;\
+ if (!gst_bit_reader_get_bits_uint8 (br, &marker, 1)) { \
+ GST_WARNING ("failed to read marker bit"); \
+ goto failed; \
+ } else if (!marker) {\
+ GST_WARNING ("Wrong marker bit"); \
+ goto failed;\
+ }\
+} G_STMT_END
+
+#define MARKER_UNCHECKED(br) G_STMT_START { \
+ if (!gst_bit_reader_get_bits_uint8_unchecked (br, 1)) { \
+ GST_WARNING ("Wrong marker bit"); \
+ goto failed; \
+ } \
+} G_STMT_END
+
+#define CHECK_REMAINING(br, needed) G_STMT_START { \
+ if (gst_bit_reader_get_remaining (br) < needed) \
+ goto failed; \
+} G_STMT_END
+
+static const guint8 default_intra_quant_mat[64] = {
+ 8, 17, 18, 19, 21, 23, 25, 27,
+ 17, 18, 19, 21, 23, 25, 27, 28,
+ 20, 21, 22, 23, 24, 26, 28, 30,
+ 21, 22, 23, 24, 26, 28, 30, 32,
+ 22, 23, 24, 26, 28, 30, 32, 35,
+ 23, 24, 26, 28, 30, 32, 35, 38,
+ 25, 26, 28, 30, 32, 35, 38, 41,
+ 27, 28, 30, 32, 35, 38, 41, 45
+};
+
+static const guint8 default_non_intra_quant_mat[64] = {
+ 16, 17, 18, 19, 20, 21, 22, 23,
+ 17, 18, 19, 20, 21, 22, 23, 24,
+ 18, 19, 20, 21, 22, 23, 24, 25,
+ 19, 20, 21, 22, 23, 24, 26, 27,
+ 20, 21, 22, 23, 25, 26, 27, 28,
+ 21, 22, 23, 24, 26, 27, 28, 30,
+ 22, 23, 24, 26, 27, 28, 30, 31,
+ 23, 24, 25, 27, 28, 30, 31, 33,
+};
+
+static const guint8 mpeg4_zigzag_8x8[64] = {
+ 0, 1, 8, 16, 9, 2, 3, 10,
+ 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34,
+ 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36,
+ 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46,
+ 53, 60, 61, 54, 47, 55, 62, 63
+};
+
+static const VLCTable mpeg4_dmv_size_vlc_table[] = {
+ {0x00, 2, 0},
+ {0x02, 3, 1},
+ {0x03, 3, 2},
+ {0x04, 3, 3},
+ {0x05, 3, 4},
+ {0x06, 3, 5},
+ {0x0e, 4, 6},
+ {0x1e, 5, 7},
+ {0x3e, 6, 8},
+ {0x7e, 7, 9},
+ {0xfe, 8, 10},
+ {0x1fe, 9, 11},
+ {0x3fe, 10, 12},
+ {0x7fe, 11, 13},
+ {0xffe, 12, 14}
+};
+
+static void
+mpeg4_util_par_from_info (guint8 aspect_ratio_info, guint8 * par_width,
+ guint8 * par_height)
+{
+ switch (aspect_ratio_info) {
+ case 0x02:
+ *par_width = 12;
+ *par_height = 11;
+ break;
+ case 0x03:
+ *par_width = 10;
+ *par_height = 11;
+ break;
+ case 0x04:
+ *par_width = 16;
+ *par_height = 11;
+ break;
+ case 0x05:
+ *par_width = 40;
+ *par_height = 33;
+ break;
+
+ case 0x01:
+ default:
+ *par_width = 1;
+ *par_height = 1;
+ }
+}
+
+static gboolean
+parse_quant (GstBitReader * br, guint8 quant_mat[64],
+ const guint8 default_quant_mat[64], guint8 * load_quant_mat)
+{
+ READ_UINT8 (br, *load_quant_mat, 1);
+ if (*load_quant_mat) {
+ guint i;
+ guint8 val;
+
+ val = 1;
+ for (i = 0; i < 64; i++) {
+
+ if (val != 0)
+ READ_UINT8 (br, val, 8);
+
+ if (val == 0) {
+ if (i == 0)
+ goto invalid_quant_mat;
+ quant_mat[mpeg4_zigzag_8x8[i]] = quant_mat[mpeg4_zigzag_8x8[i - 1]];
+ } else
+ quant_mat[mpeg4_zigzag_8x8[i]] = val;
+ }
+ } else
+ memcpy (quant_mat, default_quant_mat, 64);
+
+ return TRUE;
+
+failed:
+ GST_WARNING ("failed parsing quant matrix");
+ return FALSE;
+
+invalid_quant_mat:
+ GST_WARNING ("the first value should be non zero");
+ goto failed;
+}
+
+static gboolean
+parse_signal_type (GstBitReader * br, GstMpeg4VideoSignalType * signal_type)
+{
+ READ_UINT8 (br, signal_type->type, 1);
+
+ if (signal_type->type) {
+
+ READ_UINT8 (br, signal_type->format, 3);
+ READ_UINT8 (br, signal_type->range, 1);
+ READ_UINT8 (br, signal_type->color_description, 1);
+
+ if (signal_type->color_description) {
+ READ_UINT8 (br, signal_type->color_primaries, 8);
+ READ_UINT8 (br, signal_type->transfer_characteristics, 8);
+ READ_UINT8 (br, signal_type->matrix_coefficients, 8);
+ }
+ }
+
+ return TRUE;
+
+failed:
+ GST_WARNING ("failed parsing \"Video Signal Type\"");
+
+ return FALSE;
+}
+
+static gboolean
+parse_sprite_trajectory (GstBitReader * br,
+ GstMpeg4SpriteTrajectory * sprite_traj, guint no_of_sprite_warping_points)
+{
+ guint i, length;
+
+ for (i = 0; i < no_of_sprite_warping_points; i++) {
+
+ if (!decode_vlc (br, &length, mpeg4_dmv_size_vlc_table,
+ G_N_ELEMENTS (mpeg4_dmv_size_vlc_table)))
+ goto failed;
+
+ if (length)
+ READ_UINT16 (br, sprite_traj->vop_ref_points[i], length);
+ CHECK_MARKER (br);
+
+ if (!decode_vlc (br, &length, mpeg4_dmv_size_vlc_table,
+ G_N_ELEMENTS (mpeg4_dmv_size_vlc_table)))
+ goto failed;
+
+ if (length)
+ READ_UINT16 (br, sprite_traj->sprite_ref_points[i], length);
+ CHECK_MARKER (br);
+ }
+
+ return TRUE;
+
+failed:
+ GST_WARNING ("Could not parse the sprite trajectory");
+ return FALSE;
+}
+
+static guint
+find_psc (GstByteReader * br)
+{
+ guint psc_pos = -1, psc;
+
+ if (!gst_byte_reader_peek_uint24_be (br, &psc))
+ goto failed;
+
+ /* Scan for the picture start code (22 bits - 0x0020) */
+ while ((gst_byte_reader_get_remaining (br) >= 3)) {
+ if (gst_byte_reader_peek_uint24_be (br, &psc) &&
+ ((psc & 0xffffc0) == 0x000080)) {
+ psc_pos = gst_byte_reader_get_pos (br);
+ break;
+ } else
+ gst_byte_reader_skip (br, 1);
+ }
+
+failed:
+
+ return psc_pos;
+}
+
+static inline guint8
+compute_resync_marker_size (const GstMpeg4VideoObjectPlane * vop,
+ guint32 * pattern, guint32 * mask)
+{
+ guint8 off;
+
+ /* FIXME handle the binary only shape case */
+ switch (vop->coding_type) {
+ case (GST_MPEG4_I_VOP):
+ off = 16;
+ break;
+ case (GST_MPEG4_S_VOP):
+ case (GST_MPEG4_P_VOP):
+ off = 15 + vop->fcode_forward;
+
+ break;
+ case (GST_MPEG4_B_VOP):
+ off = MAX (15 + MAX (vop->fcode_forward, vop->fcode_backward), 17);
+
+ break;
+ default:
+ return -1;
+ }
+
+ if (mask && pattern) {
+ switch (off) {
+ case 16:
+ *pattern = 0x00008000;
+ *mask = 0xffff8000;
+ break;
+ case 17:
+ *pattern = 0x00004000;
+ *mask = 0xffffc000;
+ break;
+ case 18:
+ *pattern = 0x00002000;
+ *mask = 0xffffe000;
+ break;
+ case 19:
+ *pattern = 0x00001000;
+ *mask = 0xfffff000;
+ break;
+ case 20:
+ *pattern = 0x0000080;
+ *mask = 0xfffff800;
+ break;
+ case 21:
+ *pattern = 0x00000400;
+ *mask = 0xfffffc00;
+ break;
+ case 22:
+ *pattern = 0x00000200;
+ *mask = 0xfffffe00;
+ break;
+ case 23:
+ *pattern = 0x00000100;
+ *mask = 0xffffff00;
+ break;
+ }
+ }
+
+ return off++; /* Take the following 1 into account */
+}
+
+/**
+ * gst_mpeg4_next_resync:
+ * @packet: The #GstMpeg4Packet to fill
+ * @vop: The previously parsed #GstMpeg4VideoObjectPlane
+ * @offset: offset from which to start the parsing
+ * @data: The data to parse
+ * @size: The size of the @data to parse
+ *
+ * Parses @data and fills @packet with the information of the next resync packet
+ * found.
+ *
+ * Returns: a #GstMpeg4ParseResult
+ */
+static GstMpeg4ParseResult
+gst_mpeg4_next_resync (GstMpeg4Packet * packet,
+ const GstMpeg4VideoObjectPlane * vop, const guint8 * data, gsize size,
+ gboolean first_resync_marker)
+{
+ guint markersize = 0, off1, off2;
+ guint32 mask = 0xff, pattern = 0xff;
+ GstByteReader br;
+
+ gst_byte_reader_init (&br, data, size);
+
+ g_return_val_if_fail (packet != NULL, GST_MPEG4_PARSER_ERROR);
+ g_return_val_if_fail (vop != NULL, GST_MPEG4_PARSER_ERROR);
+
+ markersize = compute_resync_marker_size (vop, &pattern, &mask);
+
+ if (first_resync_marker) {
+ off1 = 0;
+ } else {
+ off1 = gst_byte_reader_masked_scan_uint32 (&br, mask, pattern, 0, size);
+ }
+
+ if (off1 == -1)
+ return GST_MPEG4_PARSER_NO_PACKET;
+
+ GST_DEBUG ("Resync code found at %i", off1);
+
+ packet->offset = off1;
+ packet->type = GST_MPEG4_RESYNC;
+ packet->marker_size = markersize;
+
+ off2 = gst_byte_reader_masked_scan_uint32 (&br, mask, pattern,
+ off1 + 2, size - off1 - 2);
+
+ if (off2 == -1)
+ return GST_MPEG4_PARSER_NO_PACKET_END;
+
+ packet->size = off2 - off1;
+
+ return GST_MPEG4_PARSER_OK;
+}
+
+
+/********** API **********/
+
+/**
+ * gst_mpeg4_parse:
+ * @packet: The #GstMpeg4Packet to fill
+ * @skip_user_data: %TRUE to skip user data packet %FALSE otherwize
+ * @vop: The last parsed #GstMpeg4VideoObjectPlane or %NULL if you do
+ * not need to detect the resync codes.
+ * @offset: offset from which to start the parsing
+ * @data: The data to parse
+ * @size: The size of the @data to parse
+ *
+ * Parses @data and fills @packet with the information of the next packet
+ * found.
+ *
+ * Returns: a #GstMpeg4ParseResult
+ */
+GstMpeg4ParseResult
+gst_mpeg4_parse (GstMpeg4Packet * packet, gboolean skip_user_data,
+ GstMpeg4VideoObjectPlane * vop, const guint8 * data, guint offset,
+ gsize size)
+{
+ gint off1, off2;
+ GstByteReader br;
+ GstMpeg4ParseResult resync_res;
+ static guint first_resync_marker = TRUE;
+
+ gst_byte_reader_init (&br, data, size);
+
+ g_return_val_if_fail (packet != NULL, GST_MPEG4_PARSER_ERROR);
+
+ if (size - offset <= 4) {
+ GST_DEBUG ("Can't parse, buffer is to small size %" G_GSSIZE_FORMAT
+ " at offset %d", size, offset);
+ return GST_MPEG4_PARSER_ERROR;
+ }
+
+ if (vop) {
+ resync_res =
+ gst_mpeg4_next_resync (packet, vop, data + offset, size - offset,
+ first_resync_marker);
+ first_resync_marker = FALSE;
+
+ /* We found a complet slice */
+ if (resync_res == GST_MPEG4_PARSER_OK)
+ return resync_res;
+ else if (resync_res == GST_MPEG4_PARSER_NO_PACKET_END) {
+ /* It doesn't mean there is no standard packet end, look for it */
+ off1 = packet->offset;
+ goto find_end;
+ } else if (resync_res == GST_MPEG4_PARSER_NO_PACKET)
+ return resync_res;
+ } else {
+ first_resync_marker = TRUE;
+ }
+
+ off1 = gst_byte_reader_masked_scan_uint32 (&br, 0xffffff00, 0x00000100,
+ offset, size - offset);
+
+ if (off1 == -1) {
+ GST_DEBUG ("No start code prefix in this buffer");
+ return GST_MPEG4_PARSER_NO_PACKET;
+ }
+
+ /* Recursively skip user data if needed */
+ if (skip_user_data && data[off1 + 3] == GST_MPEG4_USER_DATA)
+ /* If we are here, we know no resync code has been found the first time, so we
+ * don't look for it this time */
+ return gst_mpeg4_parse (packet, skip_user_data, NULL, data, off1 + 3,
+ size - off1 - 3);
+
+ packet->offset = off1 + 3;
+ packet->data = data;
+ packet->type = (GstMpeg4StartCode) (data[off1 + 3]);
+
+find_end:
+ off2 = gst_byte_reader_masked_scan_uint32 (&br, 0xffffff00, 0x00000100,
+ off1 + 4, size - off1 - 4);
+
+ if (off2 == -1) {
+ GST_DEBUG ("Packet start %d, No end found", off1 + 4);
+
+ packet->size = G_MAXUINT;
+ return GST_MPEG4_PARSER_NO_PACKET_END;
+ }
+
+ if (packet->type == GST_MPEG4_RESYNC) {
+ packet->size = (gsize) off2 - off1;
+ } else {
+ packet->size = (gsize) off2 - off1 - 3;
+ }
+
+ GST_DEBUG ("Complete packet of type %x found at: %d, Size: %" G_GSSIZE_FORMAT,
+ packet->type, packet->offset, packet->size);
+ return GST_MPEG4_PARSER_OK;
+
+}
+
+/**
+ * gst_h263_parse:
+ * @packet: The #GstMpeg4Packet to fill
+ * @offset: offset from which to start the parsing
+ * @data: The data to parse
+ * @size: The size of the @data to parse
+ *
+ * Parses @data and fills @packet with the information of the next packet
+ * found.
+ *
+ * Note that the type of the packet is meaningless in this case.
+ *
+ * Returns: a #GstMpeg4ParseResult
+ */
+GstMpeg4ParseResult
+gst_h263_parse (GstMpeg4Packet * packet,
+ const guint8 * data, guint offset, gsize size)
+{
+ gint off1, off2;
+ GstByteReader br;
+
+ gst_byte_reader_init (&br, data, size);
+
+ g_return_val_if_fail (packet != NULL, GST_MPEG4_PARSER_ERROR);
+
+ if (size - offset < 3) {
+ GST_DEBUG ("Can't parse, buffer is to small size %" G_GSSIZE_FORMAT
+ " at offset %d", size, offset);
+ return GST_MPEG4_PARSER_ERROR;
+ }
+
+ off1 = find_psc (&br);
+
+ if (off1 == -1) {
+ GST_DEBUG ("No start code prefix in this buffer");
+ return GST_MPEG4_PARSER_NO_PACKET;
+ }
+
+ packet->offset = off1;
+ packet->data = data;
+
+ off2 = find_psc (&br);
+
+ if (off2 == -1) {
+ GST_DEBUG ("Packet start %d, No end found", off1);
+
+ packet->size = G_MAXUINT;
+ return GST_MPEG4_PARSER_NO_PACKET_END;
+ }
+
+ packet->size = (gsize) off2 - off1;
+
+ GST_DEBUG ("Complete packet found at: %d, Size: %" G_GSSIZE_FORMAT,
+ packet->offset, packet->size);
+
+ return GST_MPEG4_PARSER_OK;
+}
+
+/**
+ * gst_mpeg4_parse_visual_object_sequence:
+ * @vos: The #GstMpeg4VisualObjectSequence structure to fill
+ * @data: The data to parse, should contain the visual_object_sequence_start_code
+ * but not the start code prefix
+ * @size: The size of the @data to parse
+ *
+ * Parses @data containing the visual object sequence packet, and fills
+ * the @vos structure.
+ *
+ * Returns: a #GstMpeg4ParseResult
+ */
+GstMpeg4ParseResult
+gst_mpeg4_parse_visual_object_sequence (GstMpeg4VisualObjectSequence * vos,
+ const guint8 * data, gsize size)
+{
+ guint8 vos_start_code;
+ GstBitReader br = GST_BIT_READER_INIT (data, size);
+
+ g_return_val_if_fail (vos != NULL, GST_MPEG4_PARSER_ERROR);
+
+ READ_UINT8 (&br, vos_start_code, 8);
+ if (vos_start_code != GST_MPEG4_VISUAL_OBJ_SEQ_START)
+ goto wrong_start_code;
+
+ READ_UINT8 (&br, vos->profile_and_level_indication, 8);
+
+ switch (vos->profile_and_level_indication) {
+ case 0x01:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0x02:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0x03:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL3;
+ break;
+ case 0x08:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL0;
+ break;
+ case 0x10:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE_SCALABLE;
+ vos->level = GST_MPEG4_LEVEL0;
+ break;
+ case 0x11:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE_SCALABLE;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0x12:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE_SCALABLE;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0x21:
+ vos->profile = GST_MPEG4_PROFILE_CORE;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0x22:
+ vos->profile = GST_MPEG4_PROFILE_CORE;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0x32:
+ vos->profile = GST_MPEG4_PROFILE_MAIN;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0x33:
+ vos->profile = GST_MPEG4_PROFILE_MAIN;
+ vos->level = GST_MPEG4_LEVEL3;
+ break;
+ case 0x34:
+ vos->profile = GST_MPEG4_PROFILE_MAIN;
+ vos->level = GST_MPEG4_LEVEL4;
+ break;
+ case 0x42:
+ vos->profile = GST_MPEG4_PROFILE_N_BIT;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0x51:
+ vos->profile = GST_MPEG4_PROFILE_SCALABLE_TEXTURE;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0x61:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE_FACE_ANIMATION;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0x62:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE_FACE_ANIMATION;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0x63:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE_FBA;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0x64:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE_FBA;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0x71:
+ vos->profile = GST_MPEG4_PROFILE_BASIC_ANIMATED_TEXTURE;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0x72:
+ vos->profile = GST_MPEG4_PROFILE_BASIC_ANIMATED_TEXTURE;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0x81:
+ vos->profile = GST_MPEG4_PROFILE_HYBRID;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0x82:
+ vos->profile = GST_MPEG4_PROFILE_HYBRID;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0x91:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_REALTIME_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0x92:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_REALTIME_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0x93:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_REALTIME_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL3;
+ break;
+ case 0x94:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_REALTIME_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL4;
+ break;
+ case 0xa1:
+ vos->profile = GST_MPEG4_PROFILE_CORE_SCALABLE;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0xa2:
+ vos->profile = GST_MPEG4_PROFILE_CORE_SCALABLE;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0xa3:
+ vos->profile = GST_MPEG4_PROFILE_CORE_SCALABLE;
+ vos->level = GST_MPEG4_LEVEL3;
+ break;
+ case 0xb1:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0xb2:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0xb3:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY;
+ vos->level = GST_MPEG4_LEVEL3;
+ break;
+ case 0xb4:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY;
+ vos->level = GST_MPEG4_LEVEL4;
+ break;
+ case 0xc1:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_CORE;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0xc2:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_CORE;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0xc3:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_CORE;
+ vos->level = GST_MPEG4_LEVEL3;
+ break;
+ case 0xd1:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_SCALABLE_TEXTURE;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0xd2:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_SCALABLE_TEXTURE;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0xd3:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_SCALABLE_TEXTURE;
+ vos->level = GST_MPEG4_LEVEL3;
+ break;
+ case 0xe1:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE_STUDIO;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0xe2:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE_STUDIO;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0xe3:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE_STUDIO;
+ vos->level = GST_MPEG4_LEVEL3;
+ break;
+ case 0xe4:
+ vos->profile = GST_MPEG4_PROFILE_SIMPLE_STUDIO;
+ vos->level = GST_MPEG4_LEVEL4;
+ break;
+ case 0xe5:
+ vos->profile = GST_MPEG4_PROFILE_CORE_STUDIO;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0xe6:
+ vos->profile = GST_MPEG4_PROFILE_CORE_STUDIO;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0xe7:
+ vos->profile = GST_MPEG4_PROFILE_CORE_STUDIO;
+ vos->level = GST_MPEG4_LEVEL3;
+ break;
+ case 0xe8:
+ vos->profile = GST_MPEG4_PROFILE_CORE_STUDIO;
+ vos->level = GST_MPEG4_LEVEL4;
+ break;
+ case 0xf0:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL0;
+ break;
+ case 0xf1:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0xf2:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0xf3:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL3;
+ break;
+ case 0xf4:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL4;
+ break;
+ case 0xf5:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL5;
+ break;
+ case 0xf7:
+ vos->profile = GST_MPEG4_PROFILE_ADVANCED_SIMPLE;
+ vos->level = GST_MPEG4_LEVEL3b;
+ break;
+ case 0xf8:
+ vos->profile = GST_MPEG4_PROFILE_FINE_GRANULARITY_SCALABLE;
+ vos->level = GST_MPEG4_LEVEL0;
+ break;
+ case 0xf9:
+ vos->profile = GST_MPEG4_PROFILE_FINE_GRANULARITY_SCALABLE;
+ vos->level = GST_MPEG4_LEVEL1;
+ break;
+ case 0xfa:
+ vos->profile = GST_MPEG4_PROFILE_FINE_GRANULARITY_SCALABLE;
+ vos->level = GST_MPEG4_LEVEL2;
+ break;
+ case 0xfb:
+ vos->profile = GST_MPEG4_PROFILE_FINE_GRANULARITY_SCALABLE;
+ vos->level = GST_MPEG4_LEVEL3;
+ break;
+ case 0xfc:
+ vos->profile = GST_MPEG4_PROFILE_FINE_GRANULARITY_SCALABLE;
+ vos->level = GST_MPEG4_LEVEL4;
+ break;
+ case 0xfd:
+ vos->profile = GST_MPEG4_PROFILE_FINE_GRANULARITY_SCALABLE;
+ vos->level = GST_MPEG4_LEVEL5;
+ break;
+ default:
+ vos->profile = GST_MPEG4_PROFILE_RESERVED;
+ vos->level = GST_MPEG4_LEVEL_RESERVED;
+ break;
+ }
+
+ return GST_MPEG4_PARSER_OK;
+
+wrong_start_code:
+ GST_WARNING ("got buffer with wrong start code");
+ return GST_MPEG4_PARSER_ERROR;
+
+failed:
+ GST_WARNING ("failed parsing \"Visual Object\"");
+ return GST_MPEG4_PARSER_ERROR;
+}
+
+/**
+ * gst_mpeg4_parse_visual_object:
+ * @vo: The #GstMpeg4VisualObject structure to fill
+ * @signal_type: The #GstMpeg4VideoSignalType to fill or %NULL
+ * @data: The data to parse, should contain the vo_start_code
+ * but not the start code prefix
+ * @size: The size of the @data to parse
+ *
+ * Parses @data containing the visual object packet, and fills
+ * the @vo structure.
+ *
+ * Returns: a #GstMpeg4ParseResult
+ */
+GstMpeg4ParseResult
+gst_mpeg4_parse_visual_object (GstMpeg4VisualObject * vo,
+ GstMpeg4VideoSignalType * signal_type, const guint8 * data, gsize size)
+{
+ guint8 vo_start_code, type;
+ GstBitReader br = GST_BIT_READER_INIT (data, size);
+
+ g_return_val_if_fail (vo != NULL, GST_MPEG4_PARSER_ERROR);
+
+ GST_DEBUG ("Parsing visual object");
+
+ READ_UINT8 (&br, vo_start_code, 8);
+ if (vo_start_code != GST_MPEG4_VISUAL_OBJ)
+ goto wrong_start_code;
+
+ /* set default values */
+ vo->verid = 0x1;
+ vo->priority = 1;
+
+ READ_UINT8 (&br, vo->is_identifier, 1);
+ if (vo->is_identifier) {
+ READ_UINT8 (&br, vo->verid, 4);
+ READ_UINT8 (&br, vo->priority, 3);
+ }
+
+ READ_UINT8 (&br, type, 4);
+ vo->type = type;
+
+ if ((type == GST_MPEG4_VIDEO_ID ||
+ type == GST_MPEG4_STILL_TEXTURE_ID) && signal_type) {
+
+ if (!parse_signal_type (&br, signal_type))
+ goto failed;
+
+ } else if (signal_type) {
+ signal_type->type = 0;
+ }
+
+ return GST_MPEG4_PARSER_OK;
+
+wrong_start_code:
+ GST_WARNING ("got buffer with wrong start code");
+ return GST_MPEG4_PARSER_ERROR;
+
+failed:
+ GST_WARNING ("failed parsing \"Visual Object\"");
+ return GST_MPEG4_PARSER_ERROR;
+}
+
+/**
+ * gst_mpeg4_parse_video_object_layer:
+ * @vol: The #GstMpeg4VideoObjectLayer structure to fill
+ * @vo: The #GstMpeg4VisualObject currently being parsed or %NULL
+ * @data: The data to parse
+ * @size: The size of the @data to parse
+ *
+ * Parses @data containing the video object layer packet, and fills
+ * the @vol structure.
+ *
+ * Returns: a #GstMpeg4ParseResult
+ */
+GstMpeg4ParseResult
+gst_mpeg4_parse_video_object_layer (GstMpeg4VideoObjectLayer * vol,
+ GstMpeg4VisualObject * vo, const guint8 * data, gsize size)
+{
+ guint8 video_object_layer_start_code;
+
+ /* Used for enums types */
+ guint8 tmp;
+ GstBitReader br = GST_BIT_READER_INIT (data, size);
+
+ g_return_val_if_fail (vol != NULL, GST_MPEG4_PARSER_ERROR);
+
+ GST_DEBUG ("Parsing video object layer");
+
+ READ_UINT8 (&br, video_object_layer_start_code, 8);
+ if (!(video_object_layer_start_code >= GST_MPEG4_VIDEO_LAYER_FIRST &&
+ video_object_layer_start_code <= GST_MPEG4_VIDEO_LAYER_LAST))
+ goto wrong_start_code;
+
+ /* set default values */
+ if (vo) {
+ vol->verid = vo->verid;
+ vol->priority = vo->priority;
+ }
+
+ vol->low_delay = FALSE;
+ vol->chroma_format = 1;
+ vol->vbv_parameters = FALSE;
+ vol->quant_precision = 5;
+ vol->bits_per_pixel = 8;
+ vol->quarter_sample = FALSE;
+ vol->newpred_enable = FALSE;
+ vol->interlaced = 0;
+ vol->width = 0;
+ vol->height = 0;
+
+ READ_UINT8 (&br, vol->random_accessible_vol, 1);
+ READ_UINT8 (&br, vol->video_object_type_indication, 8);
+
+ READ_UINT8 (&br, vol->is_object_layer_identifier, 1);
+ if (vol->is_object_layer_identifier) {
+ READ_UINT8 (&br, vol->verid, 4);
+ READ_UINT8 (&br, vol->priority, 3);
+ }
+
+ READ_UINT8 (&br, tmp, 4);
+ vol->aspect_ratio_info = tmp;
+ if (vol->aspect_ratio_info != GST_MPEG4_EXTENDED_PAR) {
+ mpeg4_util_par_from_info (vol->aspect_ratio_info, &vol->par_width,
+ &vol->par_height);
+
+ } else {
+ gint v;
+
+ READ_UINT8 (&br, vol->par_width, 8);
+ v = vol->par_width;
+ CHECK_ALLOWED (v, 1, 255);
+
+ READ_UINT8 (&br, vol->par_height, 8);
+ v = vol->par_height;
+ CHECK_ALLOWED (v, 1, 255);
+ }
+ GST_DEBUG ("Pixel aspect ratio %d/%d", vol->par_width, vol->par_width);
+
+ READ_UINT8 (&br, vol->control_parameters, 1);
+ if (vol->control_parameters) {
+ guint8 chroma_format;
+
+ READ_UINT8 (&br, chroma_format, 2);
+ vol->chroma_format = chroma_format;
+ READ_UINT8 (&br, vol->low_delay, 1);
+
+ READ_UINT8 (&br, vol->vbv_parameters, 1);
+ if (vol->vbv_parameters) {
+ CHECK_REMAINING (&br, 79);
+
+ vol->first_half_bitrate =
+ gst_bit_reader_get_bits_uint16_unchecked (&br, 15);
+ MARKER_UNCHECKED (&br);
+
+ vol->latter_half_bitrate =
+ gst_bit_reader_get_bits_uint16_unchecked (&br, 15);
+ MARKER_UNCHECKED (&br);
+
+ vol->bit_rate =
+ (vol->first_half_bitrate << 15) | vol->latter_half_bitrate;
+
+ vol->first_half_vbv_buffer_size =
+ gst_bit_reader_get_bits_uint16_unchecked (&br, 15);
+ MARKER_UNCHECKED (&br);
+
+ vol->latter_half_vbv_buffer_size =
+ gst_bit_reader_get_bits_uint8_unchecked (&br, 3);
+ MARKER_UNCHECKED (&br);
+
+ vol->vbv_buffer_size = (vol->first_half_vbv_buffer_size << 15) |
+ vol->latter_half_vbv_buffer_size;
+
+ vol->first_half_vbv_occupancy =
+ gst_bit_reader_get_bits_uint16_unchecked (&br, 11);
+ MARKER_UNCHECKED (&br);
+
+ vol->latter_half_vbv_occupancy =
+ gst_bit_reader_get_bits_uint16_unchecked (&br, 15);
+ MARKER_UNCHECKED (&br);
+ }
+ }
+
+ READ_UINT8 (&br, tmp, 2);
+ vol->shape = tmp;
+
+ if (vol->shape == GST_MPEG4_GRAYSCALE) {
+ /* TODO support grayscale shapes, for now we just pass */
+
+ /* Something the standard starts to define... */
+ GST_WARNING ("Grayscale shaped not supported");
+ goto failed;
+ }
+
+ if (vol->shape == GST_MPEG4_GRAYSCALE && vol->verid != 0x01)
+ READ_UINT8 (&br, vol->shape_extension, 4);
+
+ CHECK_REMAINING (&br, 19);
+
+ MARKER_UNCHECKED (&br);
+ vol->vop_time_increment_resolution =
+ gst_bit_reader_get_bits_uint16_unchecked (&br, 16);
+ if (vol->vop_time_increment_resolution < 1) {
+ GST_WARNING ("value not in allowed range. value: %d, range %d-%d",
+ vol->vop_time_increment_resolution, 1, G_MAXUINT16);
+ goto failed;
+ }
+ vol->vop_time_increment_bits =
+ g_bit_storage (vol->vop_time_increment_resolution);
+
+ MARKER_UNCHECKED (&br);
+ vol->fixed_vop_rate = gst_bit_reader_get_bits_uint8_unchecked (&br, 1);
+ if (vol->fixed_vop_rate)
+ READ_UINT16 (&br, vol->fixed_vop_time_increment,
+ vol->vop_time_increment_bits);
+
+ if (vol->shape != GST_MPEG4_BINARY_ONLY) {
+ if (vol->shape == GST_MPEG4_RECTANGULAR) {
+ CHECK_REMAINING (&br, 29);
+
+ MARKER_UNCHECKED (&br);
+ vol->width = gst_bit_reader_get_bits_uint16_unchecked (&br, 13);
+ MARKER_UNCHECKED (&br);
+ vol->height = gst_bit_reader_get_bits_uint16_unchecked (&br, 13);
+ MARKER_UNCHECKED (&br);
+ }
+
+ READ_UINT8 (&br, vol->interlaced, 1);
+ READ_UINT8 (&br, vol->obmc_disable, 1);
+
+ if (vol->verid == 0x1) {
+ READ_UINT8 (&br, tmp, 1);
+ vol->sprite_enable = tmp;
+ } else
+ READ_UINT8 (&br, tmp, 2);
+ vol->sprite_enable = tmp;
+
+ if (vol->sprite_enable == GST_MPEG4_SPRITE_STATIC ||
+ vol->sprite_enable == GST_MPEG4_SPRITE_GMG) {
+
+ if (vol->sprite_enable == GST_MPEG4_SPRITE_GMG)
+ CHECK_REMAINING (&br, 9);
+ else {
+ CHECK_REMAINING (&br, 65);
+
+ vol->sprite_width = gst_bit_reader_get_bits_uint16_unchecked (&br, 13);
+ MARKER_UNCHECKED (&br);
+
+ vol->sprite_height = gst_bit_reader_get_bits_uint16_unchecked (&br, 13);
+ MARKER_UNCHECKED (&br);
+
+ vol->sprite_left_coordinate =
+ gst_bit_reader_get_bits_uint16_unchecked (&br, 13);
+ MARKER_UNCHECKED (&br);
+
+ vol->sprite_top_coordinate =
+ gst_bit_reader_get_bits_uint16_unchecked (&br, 13);
+ MARKER_UNCHECKED (&br);
+ }
+ vol->no_of_sprite_warping_points =
+ gst_bit_reader_get_bits_uint8_unchecked (&br, 6);
+ vol->sprite_warping_accuracy =
+ gst_bit_reader_get_bits_uint8_unchecked (&br, 2);
+ vol->sprite_brightness_change =
+ gst_bit_reader_get_bits_uint8_unchecked (&br, 1);
+
+ if (vol->sprite_enable != GST_MPEG4_SPRITE_GMG)
+ vol->low_latency_sprite_enable =
+ gst_bit_reader_get_bits_uint8_unchecked (&br, 1);
+ }
+
+ if (vol->shape != GST_MPEG4_RECTANGULAR)
+ READ_UINT8 (&br, vol->sadct_disable, 1);
+
+ READ_UINT8 (&br, vol->not_8_bit, 1);
+ if (vol->not_8_bit) {
+ READ_UINT8 (&br, vol->quant_precision, 4);
+ CHECK_ALLOWED (vol->quant_precision, 3, 9);
+
+ READ_UINT8 (&br, vol->bits_per_pixel, 4);
+ CHECK_ALLOWED (vol->bits_per_pixel, 4, 12);
+ }
+
+ if (vol->shape == GST_MPEG4_GRAYSCALE) {
+ /* We don't actually support it */
+ READ_UINT8 (&br, vol->no_gray_quant_update, 1);
+ READ_UINT8 (&br, vol->composition_method, 1);
+ READ_UINT8 (&br, vol->linear_composition, 1);
+ }
+
+ READ_UINT8 (&br, vol->quant_type, 1);
+ if (vol->quant_type) {
+ if (!parse_quant (&br, vol->intra_quant_mat, default_intra_quant_mat,
+ &vol->load_intra_quant_mat))
+ goto failed;
+
+ if (!parse_quant (&br, vol->non_intra_quant_mat,
+ default_non_intra_quant_mat, &vol->load_non_intra_quant_mat))
+ goto failed;
+
+ if (vol->shape == GST_MPEG4_GRAYSCALE) {
+ /* Something the standard starts to define... */
+ GST_WARNING ("Grayscale shaped not supported");
+ goto failed;
+ }
+
+ } else {
+ memset (&vol->intra_quant_mat, 0, 64);
+ memset (&vol->non_intra_quant_mat, 0, 64);
+ }
+
+ if (vol->verid != 0x1)
+ READ_UINT8 (&br, vol->quarter_sample, 1);
+
+ READ_UINT8 (&br, vol->complexity_estimation_disable, 1);
+ if (!vol->complexity_estimation_disable)
+ goto complexity_estimation_error;
+
+
+ READ_UINT8 (&br, vol->resync_marker_disable, 1);
+ READ_UINT8 (&br, vol->data_partitioned, 1);
+
+ if (vol->data_partitioned)
+ READ_UINT8 (&br, vol->reversible_vlc, 1);
+
+ if (vol->verid != 0x01)
+ READ_UINT8 (&br, vol->newpred_enable, 1);
+
+ if (vol->newpred_enable)
+ /* requested_upstream_message_type and newpred_segment_type */
+ SKIP (&br, 3);
+
+ READ_UINT8 (&br, vol->reduced_resolution_vop_enable, 1);
+
+ READ_UINT8 (&br, vol->scalability, 1);
+ if (vol->scalability) {
+ SKIP (&br, 26); /* Few not needed props */
+ READ_UINT8 (&br, vol->enhancement_type, 1);
+ }
+
+ /* More unused infos */
+ } else if (vol->verid != 0x01) {
+ GST_WARNING ("Binary only shapes not fully supported");
+ goto failed;
+ }
+ /* ... */
+
+ return GST_MPEG4_PARSER_OK;
+
+failed:
+ GST_WARNING ("failed parsing \"Video Object Layer\"");
+ return GST_MPEG4_PARSER_ERROR;
+
+wrong_start_code:
+ GST_WARNING ("got buffer with wrong start code");
+ goto failed;
+
+complexity_estimation_error:
+ GST_WARNING ("don't support complexity estimation");
+ goto failed;
+}
+
+/**
+ * gst_mpeg4_parse_group_of_vop:
+ * @gov: The #GstMpeg4GroupOfVOP structure to fill
+ * @data: The data to parse
+ * @size: The size of the @data to parse
+ *
+ * Parses @data containing the group of video object plane packet, and fills
+ * the @gov structure.
+ *
+ * Returns: a #GstMpeg4ParseResult
+ */
+GstMpeg4ParseResult
+gst_mpeg4_parse_group_of_vop (GstMpeg4GroupOfVOP *
+ gov, const guint8 * data, gsize size)
+{
+ guint8 gov_start_code;
+ GstBitReader br = GST_BIT_READER_INIT (data, size);
+
+ g_return_val_if_fail (gov != NULL, GST_MPEG4_PARSER_ERROR);
+
+ READ_UINT8 (&br, gov_start_code, 8);
+ if (gov_start_code != GST_MPEG4_GROUP_OF_VOP)
+ goto wrong_start_code;
+
+ CHECK_REMAINING (&br, 65);
+
+ gov->hours = gst_bit_reader_get_bits_uint8_unchecked (&br, 5);
+ gov->minutes = gst_bit_reader_get_bits_uint8_unchecked (&br, 6);
+ /* marker bit */
+ MARKER_UNCHECKED (&br);
+ gov->seconds = gst_bit_reader_get_bits_uint8_unchecked (&br, 6);
+
+ gov->closed = gst_bit_reader_get_bits_uint8_unchecked (&br, 1);
+ gov->broken_link = gst_bit_reader_get_bits_uint8_unchecked (&br, 1);
+
+ return GST_MPEG4_PARSER_OK;
+
+failed:
+ GST_WARNING ("failed parsing \"Group of Video Object Plane\"");
+ return GST_MPEG4_PARSER_ERROR;
+
+wrong_start_code:
+ GST_WARNING ("got buffer with wrong start code");
+ goto failed;
+}
+
+/**
+ * gst_mpeg4_parse_video_object_plane:
+ * @vop: The #GstMpeg4VideoObjectPlane currently being parsed
+ * @sprite_trajectory: A #GstMpeg4SpriteTrajectory to fill or %NULL
+ * @vol: The #GstMpeg4VideoObjectLayer structure to fill
+ * @data: The data to parse
+ * @size: The size of the @data to parse
+ *
+ * Parses @data containing the video object plane packet, and fills the @vol
+ * structure.
+ *
+ * Returns: a #GstMpeg4ParseResult
+ */
+GstMpeg4ParseResult
+gst_mpeg4_parse_video_object_plane (GstMpeg4VideoObjectPlane * vop,
+ GstMpeg4SpriteTrajectory * sprite_trajectory,
+ GstMpeg4VideoObjectLayer * vol, const guint8 * data, gsize size)
+{
+ guint8 vop_start_code, coding_type, modulo_time_base;
+ GstBitReader br = GST_BIT_READER_INIT (data, size);
+
+ g_return_val_if_fail (vop != NULL, GST_MPEG4_PARSER_ERROR);
+
+ if (vol->shape == GST_MPEG4_BINARY_ONLY) {
+ /* TODO: implement binary only shapes */
+ GST_WARNING ("Binary only shapes not supported");
+ goto failed;
+ }
+
+ READ_UINT8 (&br, vop_start_code, 8);
+ if (vop_start_code != GST_MPEG4_VIDEO_OBJ_PLANE)
+ goto wrong_start_code;
+
+
+ /* set default values */
+ vop->modulo_time_base = 0;
+ vop->rounding_type = 0;
+ vop->top_field_first = 1;
+ vop->alternate_vertical_scan_flag = 0;
+ vop->fcode_forward = 1;
+ vop->fcode_backward = 1;
+
+ /* Compute macroblock informations */
+ if (vol->interlaced)
+ vop->mb_height = (2 * (vol->height + 31) / 32);
+ else
+ vop->mb_height = (vol->height + 15) / 16;
+
+ vop->mb_width = (vol->width + 15) / 16;
+ vop->mb_num = vop->mb_height * vop->mb_width;
+
+ READ_UINT8 (&br, coding_type, 2);
+ vop->coding_type = coding_type;
+
+ READ_UINT8 (&br, modulo_time_base, 1);
+ while (modulo_time_base) {
+ vop->modulo_time_base++;
+
+ READ_UINT8 (&br, modulo_time_base, 1);
+ }
+
+ CHECK_REMAINING (&br, vol->vop_time_increment_bits + 3);
+
+ MARKER_UNCHECKED (&br);
+ vop->time_increment =
+ gst_bit_reader_get_bits_uint16_unchecked (&br,
+ vol->vop_time_increment_bits);
+ MARKER_UNCHECKED (&br);
+
+ vop->coded = gst_bit_reader_get_bits_uint8_unchecked (&br, 1);
+ if (!vop->coded)
+ return GST_MPEG4_PARSER_OK;
+
+ if (vol->newpred_enable) {
+ guint16 nbbits =
+ vop->time_increment + 3 < 15 ? vop->time_increment + 3 : 15;
+
+ READ_UINT16 (&br, vop->id, nbbits);
+ READ_UINT8 (&br, vop->id_for_prediction_indication, 1);
+ if (vop->id_for_prediction_indication) {
+ /* Would be nice if the standard actually told us... */
+ READ_UINT16 (&br, vop->id, nbbits);
+ CHECK_MARKER (&br);
+ }
+ }
+
+ if (vol->shape != GST_MPEG4_BINARY_ONLY &&
+ (vop->coding_type == GST_MPEG4_P_VOP ||
+ (vop->coding_type == GST_MPEG4_S_VOP &&
+ vol->sprite_enable == GST_MPEG4_SPRITE_GMG)))
+ READ_UINT8 (&br, vop->rounding_type, 1);
+
+ if ((vol->reduced_resolution_vop_enable) &&
+ (vol->shape == GST_MPEG4_RECTANGULAR ||
+ (vop->coding_type = GST_MPEG4_P_VOP ||
+ vop->coding_type == GST_MPEG4_I_VOP)))
+ READ_UINT8 (&br, vop->reduced_resolution, 1);
+
+ if (vol->shape != GST_MPEG4_RECTANGULAR) {
+ if (vol->sprite_enable == GST_MPEG4_SPRITE_STATIC &&
+ vop->coding_type == GST_MPEG4_I_VOP) {
+ CHECK_REMAINING (&br, 55);
+
+ vop->width = gst_bit_reader_get_bits_uint16_unchecked (&br, 13);
+ MARKER_UNCHECKED (&br);
+
+ vop->height = gst_bit_reader_get_bits_uint16_unchecked (&br, 13);
+ MARKER_UNCHECKED (&br);
+
+ vop->horizontal_mc_spatial_ref =
+ gst_bit_reader_get_bits_uint16_unchecked (&br, 13);
+ MARKER_UNCHECKED (&br);
+
+ vop->vertical_mc_spatial_ref =
+ gst_bit_reader_get_bits_uint16_unchecked (&br, 13);
+ MARKER_UNCHECKED (&br);
+
+ /* Recompute the Macroblock informations
+ * accordingly to the new values */
+ if (vol->interlaced)
+ vop->mb_height = (2 * (vol->height + 31) / 32);
+ else
+ vop->mb_height = (vol->height + 15) / 16;
+
+ vop->mb_width = (vol->width + 15) / 16;
+ vop->mb_num = vop->mb_height * vop->mb_width;
+ }
+
+ if ((vol->shape != GST_MPEG4_BINARY_ONLY) &&
+ vol->scalability && vol->enhancement_type)
+ READ_UINT8 (&br, vop->background_composition, 1);
+
+ READ_UINT8 (&br, vop->change_conv_ratio_disable, 1);
+
+ READ_UINT8 (&br, vop->constant_alpha, 1);
+ if (vop->constant_alpha)
+ READ_UINT8 (&br, vop->constant_alpha_value, 1);
+ }
+
+ if (vol->shape != GST_MPEG4_BINARY_ONLY) {
+ if (!vol->complexity_estimation_disable) {
+ GST_WARNING ("Complexity estimation not supported");
+ goto failed;
+ }
+
+ READ_UINT8 (&br, vop->intra_dc_vlc_thr, 3);
+
+ if (vol->interlaced) {
+ READ_UINT8 (&br, vop->top_field_first, 1);
+ READ_UINT8 (&br, vop->alternate_vertical_scan_flag, 1);
+ }
+ }
+
+ if ((vol->sprite_enable == GST_MPEG4_SPRITE_STATIC ||
+ vol->sprite_enable == GST_MPEG4_SPRITE_GMG) &&
+ vop->coding_type == GST_MPEG4_S_VOP) {
+
+ /* only if @sprite_trajectory is not NULL we parse it */
+ if (sprite_trajectory && vol->no_of_sprite_warping_points)
+ parse_sprite_trajectory (&br, sprite_trajectory,
+ vol->no_of_sprite_warping_points);
+
+ if (vol->sprite_brightness_change) {
+ GST_WARNING ("sprite_brightness_change not supported");
+ goto failed;
+ }
+
+ if (vol->sprite_enable == GST_MPEG4_SPRITE_STATIC) {
+ GST_WARNING ("sprite enable static not supported");
+ goto failed;
+ }
+ }
+
+ if (vol->shape != GST_MPEG4_BINARY_ONLY) {
+ READ_UINT16 (&br, vop->quant, vol->quant_precision);
+
+ if (vol->shape == GST_MPEG4_GRAYSCALE) {
+ /* TODO implement grayscale support */
+ GST_WARNING ("Grayscale shapes no supported");
+
+ /* TODO implement me */
+ goto failed;
+ }
+
+ if (vop->coding_type != GST_MPEG4_I_VOP) {
+ READ_UINT8 (&br, vop->fcode_forward, 3);
+ CHECK_ALLOWED (vop->fcode_forward, 1, 7);
+ }
+
+ if (vop->coding_type == GST_MPEG4_B_VOP) {
+ READ_UINT8 (&br, vop->fcode_backward, 3);
+ CHECK_ALLOWED (vop->fcode_backward, 1, 7);
+ }
+ }
+
+ if (!vol->scalability) {
+ if (vol->shape != GST_MPEG4_RECTANGULAR)
+ READ_UINT8 (&br, vop->shape_coding_type, 1);
+
+ } else {
+ if (vol->enhancement_type) {
+ READ_UINT8 (&br, vop->load_backward_shape, 1);
+
+ if (vop->load_backward_shape) {
+ GST_WARNING ("Load backward shape not supported");
+ goto failed;
+ }
+
+ READ_UINT8 (&br, vop->ref_select_code, 2);
+ }
+ }
+
+ vop->size = gst_bit_reader_get_pos (&br);
+ /* More things to possibly parse ... */
+
+ return GST_MPEG4_PARSER_OK;
+
+failed:
+ GST_WARNING ("failed parsing \"Video Object Plane\"");
+ return GST_MPEG4_PARSER_ERROR;
+
+wrong_start_code:
+ GST_WARNING ("got buffer with wrong start code");
+ goto failed;
+}
+
+/**
+ * gst_mpeg4_parse_video_plane_with_short_header:
+ * @shorthdr: The #GstMpeg4VideoPlaneShortHdr to parse
+ * @data: The data to parse
+ * @size: The size of the @data to parse
+ */
+GstMpeg4ParseResult
+gst_mpeg4_parse_video_plane_short_header (GstMpeg4VideoPlaneShortHdr *
+ shorthdr, const guint8 * data, gsize size)
+{
+ guint8 zero_bits;
+ guint32 gob_resync;
+
+ GstBitReader br = GST_BIT_READER_INIT (data, size);
+
+ g_return_val_if_fail (shorthdr != NULL, GST_MPEG4_PARSER_ERROR);
+
+ if (gst_bit_reader_get_remaining (&br) < 26)
+ goto failed;
+
+ shorthdr->temporal_reference =
+ gst_bit_reader_get_bits_uint8_unchecked (&br, 8);
+ CHECK_MARKER (&br);
+ shorthdr->split_screen_indicator =
+ gst_bit_reader_get_bits_uint8_unchecked (&br, 1);
+ shorthdr->document_camera_indicator =
+ gst_bit_reader_get_bits_uint8_unchecked (&br, 1);
+ shorthdr->full_picture_freeze_release =
+ gst_bit_reader_get_bits_uint8_unchecked (&br, 1);
+ shorthdr->source_format = gst_bit_reader_get_bits_uint8_unchecked (&br, 3);
+
+ /* Set parameters/Table 6-25 */
+ switch (shorthdr->source_format) {
+ case 0x01:
+ shorthdr->vop_width = 128;
+ shorthdr->vop_height = 96;
+ shorthdr->num_macroblocks_in_gob = 8;
+ shorthdr->num_gobs_in_vop = 6;
+ case 0x02:
+ shorthdr->vop_width = 176;
+ shorthdr->vop_height = 144;
+ shorthdr->num_macroblocks_in_gob = 11;
+ shorthdr->num_gobs_in_vop = 9;
+ case 0x03:
+ shorthdr->vop_width = 352;
+ shorthdr->vop_height = 288;
+ shorthdr->num_macroblocks_in_gob = 22;
+ shorthdr->num_gobs_in_vop = 18;
+ case 0x04:
+ shorthdr->vop_width = 704;
+ shorthdr->vop_height = 576;
+ shorthdr->num_macroblocks_in_gob = 88;
+ shorthdr->num_gobs_in_vop = 18;
+ case 0x05:
+ shorthdr->vop_width = 1408;
+ shorthdr->vop_height = 1152;
+ shorthdr->num_macroblocks_in_gob = 352;
+ shorthdr->num_gobs_in_vop = 18;
+ default:
+ shorthdr->vop_width = 0;
+ shorthdr->vop_height = 0;
+ shorthdr->num_macroblocks_in_gob = 0;
+ shorthdr->num_gobs_in_vop = 0;
+ }
+
+ shorthdr->picture_coding_type =
+ gst_bit_reader_get_bits_uint8_unchecked (&br, 1);
+ zero_bits = gst_bit_reader_get_bits_uint8_unchecked (&br, 4);
+
+ if (zero_bits != 0x00)
+ goto failed;
+
+ shorthdr->vop_quant = gst_bit_reader_get_bits_uint8_unchecked (&br, 5);
+ zero_bits = gst_bit_reader_get_bits_uint8_unchecked (&br, 1);
+
+ if (zero_bits != 0x00)
+ goto failed;
+
+ do {
+ READ_UINT8 (&br, shorthdr->pei, 1);
+
+ if (shorthdr->pei == 1)
+ READ_UINT8 (&br, shorthdr->psupp, 8);
+
+ } while (shorthdr->pei == 1);
+
+ if (!gst_bit_reader_peek_bits_uint32 (&br, &gob_resync, 17))
+ goto failed;
+
+ /* gob_layer() */
+
+ /* Setting default values */
+ shorthdr->gob_header_empty = 1;
+ shorthdr->gob_number = 0;
+ shorthdr->gob_frame_id = 0;
+ shorthdr->quant_scale = 0;
+
+ if (gob_resync == 0x01) {
+ shorthdr->gob_header_empty = 0;
+
+ gst_bit_reader_skip_unchecked (&br, 17);
+ READ_UINT8 (&br, shorthdr->gob_number, 5);
+ READ_UINT8 (&br, shorthdr->gob_frame_id, 2);
+ READ_UINT8 (&br, shorthdr->quant_scale, 5);
+ }
+
+ shorthdr->size = gst_bit_reader_get_pos (&br);
+
+ return GST_MPEG4_PARSER_OK;
+
+failed:
+ GST_WARNING ("Could not parse the Plane short header");
+
+ return GST_MPEG4_PARSER_ERROR;
+}
+
+/**
+ * gst_mpeg4_parse_video_packet_header:
+ * @videopackethdr: The #GstMpeg4VideoPacketHdr structure to fill
+ * @vol: The last parsed #GstMpeg4VideoObjectLayer, will be updated
+ * with the informations found during the parsing
+ * @vop: The last parsed #GstMpeg4VideoObjectPlane, will be updated
+ * with the informations found during the parsing
+ * @sprite_trajectory: A #GstMpeg4SpriteTrajectory to fill or %NULL
+ * with the informations found during the parsing
+ * @data: The data to parse, should be set after the resync marker.
+ * @size: The size of the data to parse
+ *
+ * Parsers @data containing the video packet header
+ * and fills the @videopackethdr structure
+ */
+GstMpeg4ParseResult
+gst_mpeg4_parse_video_packet_header (GstMpeg4VideoPacketHdr * videopackethdr,
+ GstMpeg4VideoObjectLayer * vol, GstMpeg4VideoObjectPlane * vop,
+ GstMpeg4SpriteTrajectory * sprite_trajectory, const guint8 * data,
+ gsize size)
+{
+ guint8 markersize;
+ GstBitReader br = GST_BIT_READER_INIT (data, size);
+
+ g_return_val_if_fail (videopackethdr != NULL, GST_MPEG4_PARSER_ERROR);
+ g_return_val_if_fail (vol != NULL, GST_MPEG4_PARSER_ERROR);
+
+ markersize = compute_resync_marker_size (vop, NULL, NULL);
+
+ CHECK_REMAINING (&br, markersize);
+
+ if (gst_bit_reader_get_bits_uint32_unchecked (&br, markersize + 1) != 0x01)
+ goto failed;
+
+ if (vol->shape != GST_MPEG4_RECTANGULAR) {
+ READ_UINT8 (&br, videopackethdr->header_extension_code, 1);
+ if (vol->sprite_enable == GST_MPEG4_SPRITE_STATIC &&
+ vop->coding_type == GST_MPEG4_I_VOP) {
+
+ CHECK_REMAINING (&br, 56);
+
+ U_READ_UINT16 (&br, vop->width, 13);
+ CHECK_MARKER (&br);
+ U_READ_UINT16 (&br, vop->height, 13);
+ CHECK_MARKER (&br);
+ U_READ_UINT16 (&br, vop->horizontal_mc_spatial_ref, 13);
+ CHECK_MARKER (&br);
+ U_READ_UINT16 (&br, vop->vertical_mc_spatial_ref, 13);
+ CHECK_MARKER (&br);
+
+ /* Update macroblock infirmations */
+ vop->mb_height = (vop->height + 15) / 16;
+ vop->mb_width = (vop->width + 15) / 16;
+ vop->mb_num = vop->mb_height * vop->mb_width;
+ }
+ }
+
+ READ_UINT16 (&br, videopackethdr->macroblock_number,
+ g_bit_storage (vop->mb_num - 1));
+
+ if (vol->shape != GST_MPEG4_BINARY_ONLY)
+ READ_UINT16 (&br, videopackethdr->quant_scale, vol->quant_precision);
+
+ if (vol->shape == GST_MPEG4_RECTANGULAR)
+ READ_UINT8 (&br, videopackethdr->header_extension_code, 1);
+
+ if (videopackethdr->header_extension_code) {
+ guint timeincr = 0;
+ guint8 bit = 0, coding_type;
+
+ do {
+ READ_UINT8 (&br, bit, 1);
+ timeincr++;
+ } while (bit);
+
+ vol->vop_time_increment_bits = timeincr;
+
+ CHECK_MARKER (&br);
+ READ_UINT16 (&br, vop->time_increment, timeincr);
+ CHECK_MARKER (&br);
+ READ_UINT8 (&br, coding_type, 2);
+ vop->coding_type = coding_type;
+
+ if (vol->shape != GST_MPEG4_RECTANGULAR) {
+ READ_UINT8 (&br, vop->change_conv_ratio_disable, 1);
+ if (vop->coding_type != GST_MPEG4_I_VOP)
+ READ_UINT8 (&br, vop->shape_coding_type, 1);
+ }
+
+ if (vol->shape != GST_MPEG4_BINARY_ONLY) {
+ READ_UINT8 (&br, vop->intra_dc_vlc_thr, 3);
+
+ if (sprite_trajectory && vol->sprite_enable == GST_MPEG4_SPRITE_GMG &&
+ vop->coding_type == GST_MPEG4_S_VOP &&
+ vol->no_of_sprite_warping_points > 0) {
+
+ parse_sprite_trajectory (&br, sprite_trajectory,
+ vol->no_of_sprite_warping_points);
+ }
+
+ if (vol->reduced_resolution_vop_enable &&
+ vol->shape == GST_MPEG4_RECTANGULAR &&
+ (vop->coding_type == GST_MPEG4_P_VOP ||
+ vop->coding_type == GST_MPEG4_I_VOP))
+ READ_UINT8 (&br, vop->reduced_resolution, 1);
+
+ if (vop->coding_type != GST_MPEG4_I_VOP) {
+ READ_UINT8 (&br, vop->fcode_forward, 3);
+ CHECK_ALLOWED (vop->fcode_forward, 1, 7);
+ }
+
+ if (vop->coding_type == GST_MPEG4_B_VOP) {
+ READ_UINT8 (&br, vop->fcode_backward, 3);
+ CHECK_ALLOWED (vop->fcode_backward, 1, 7);
+ }
+ }
+ }
+
+ if (vol->newpred_enable) {
+ guint16 nbbits =
+ vol->vop_time_increment_bits + 3 < 15 ? vop->time_increment + 3 : 15;
+
+ READ_UINT16 (&br, vop->id, nbbits);
+ READ_UINT8 (&br, vop->id_for_prediction_indication, 1);
+ if (vop->id_for_prediction_indication) {
+ /* Would be nice if the standard actually told us... */
+ READ_UINT16 (&br, vop->id, nbbits);
+ CHECK_MARKER (&br);
+ }
+ }
+
+ videopackethdr->size = gst_bit_reader_get_pos (&br);
+
+failed:
+ GST_DEBUG ("Failed to parse video packet header");
+
+ return GST_MPEG4_PARSER_NO_PACKET;
+}
diff --git a/gst-libs/gst/codecparsers/gstmpeg4parser.h b/gst-libs/gst/codecparsers/gstmpeg4parser.h
new file mode 100644
index 000000000..22f52b5d3
--- /dev/null
+++ b/gst-libs/gst/codecparsers/gstmpeg4parser.h
@@ -0,0 +1,578 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_MPEG4UTIL_H__
+#define __GST_MPEG4UTIL_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbitreader.h>
+
+typedef struct _GstMpeg4VisualObjectSequence GstMpeg4VisualObjectSequence;
+typedef struct _GstMpeg4VisualObject GstMpeg4VisualObject;
+typedef struct _GstMpeg4VideoObjectLayer GstMpeg4VideoObjectLayer;
+typedef struct _GstMpeg4GroupOfVOP GstMpeg4GroupOfVOP;
+typedef struct _GstMpeg4VideoObjectPlane GstMpeg4VideoObjectPlane;
+typedef struct _GstMpeg4VideoSignalType GstMpeg4VideoSignalType;
+typedef struct _GstMpeg4VideoPlaneShortHdr GstMpeg4VideoPlaneShortHdr;
+typedef struct _GstMpeg4VideoPacketHdr GstMpeg4VideoPacketHdr;
+
+typedef struct _GstMpeg4SpriteTrajectory GstMpeg4SpriteTrajectory;
+
+typedef struct _GstMpeg4Packet GstMpeg4Packet;
+
+/**
+ * GstMpeg4StartCode:
+ *
+ * Defines the different startcodes present in the bitstream as
+ * defined in: Table 6-3 — Start code values
+ */
+typedef enum
+{
+ GST_MPEG4_VIDEO_OBJ_FIRST = 0x00,
+ GST_MPEG4_VIDEO_OBJ_LAST = 0x1f,
+ GST_MPEG4_VIDEO_LAYER_FIRST = 0x20,
+ GST_MPEG4_VIDEO_LAYER_LAST = 0x2f,
+ GST_MPEG4_VISUAL_OBJ_SEQ_START = 0xb0,
+ GST_MPEG4_VISUAL_OBJ_SEQ_END = 0xb1,
+ GST_MPEG4_USER_DATA = 0xb2,
+ GST_MPEG4_GROUP_OF_VOP = 0xb3,
+ GST_MPEG4_VIDEO_SESSION_ERR = 0xb4,
+ GST_MPEG4_VISUAL_OBJ = 0xb5,
+ GST_MPEG4_VIDEO_OBJ_PLANE = 0xb6,
+ GST_MPEG4_FBA = 0xba,
+ GST_MPEG4_FBA_PLAN = 0xbb,
+ GST_MPEG4_MESH = 0xbc,
+ GST_MPEG4_MESH_PLAN = 0xbd,
+ GST_MPEG4_STILL_TEXTURE_OBJ = 0xbe,
+ GST_MPEG4_TEXTURE_SPATIAL = 0xbf,
+ GST_MPEG4_TEXTURE_SNR_LAYER = 0xc0,
+ GST_MPEG4_TEXTURE_TILE = 0xc1,
+ GST_MPEG4_SHAPE_LAYER = 0xc2,
+ GST_MPEG4_STUFFING = 0xc3,
+ GST_MPEG4_SYSTEM_FIRST = 0xc6,
+ GST_MPEG4_SYSTEM_LAST = 0xff,
+ GST_MPEG4_RESYNC = 0xfff
+} GstMpeg4StartCode;
+
+/**
+ * GstMpeg4VisualObjectType:
+ *
+ * Defines the different visual object types as
+ * defined in: Table 6-5 -- Meaning of visual object type
+ */
+typedef enum {
+ GST_MPEG4_VIDEO_ID = 0x01,
+ GST_MPEG4_STILL_TEXTURE_ID = 0x02,
+ GST_MPEG4_STILL_MESH_ID = 0x03,
+ GST_MPEG4_STILL_FBA_ID = 0x04,
+ GST_MPEG4_STILL_3D_MESH_ID = 0x05,
+ /*... reserved */
+
+} GstMpeg4VisualObjectType;
+
+/**
+ * GstMpeg4AspectRatioInfo:
+ * @GST_MPEG4_SQUARE: 1:1 square
+ * @GST_MPEG4_625_TYPE_4_3: 12:11 (625-type for 4:3 picture)
+ * @GST_MPEG4_525_TYPE_4_3: 10:11 (525-type for 4:3 picture)
+ * @GST_MPEG4_625_TYPE_16_9: 16:11 (625-type stretched for 16:9 picture)
+ * @GST_MPEG4_525_TYPE_16_9: 40:33 (525-type stretched for 16:9 picture)
+ * @GST_MPEG4_EXTENDED_PAR: Extended par
+ *
+ * Defines the different pixel aspect ratios as
+ * defined in: Table 6-12 -- Meaning of pixel aspect ratio
+ */
+typedef enum {
+ GST_MPEG4_SQUARE = 0x01,
+ GST_MPEG4_625_TYPE_4_3 = 0x02,
+ GST_MPEG4_525_TYPE_4_3 = 0x03,
+ GST_MPEG4_625_TYPE_16_9 = 0x04,
+ GST_MPEG4_525_TYPE_16_9 = 0x05,
+ GST_MPEG4_EXTENDED_PAR = 0x0f,
+} GstMpeg4AspectRatioInfo;
+
+/**
+ * GstMpeg4ParseResult:
+ * @GST_MPEG4_PARSER_OK: The parsing went well
+ * @GST_MPEG4_PARSER_BROKEN_DATA: The bitstream was broken
+ * @GST_MPEG4_PARSER_NO_PACKET: There was no packet in the buffer
+ * @GST_MPEG4_PARSER_NO_PACKET_END: There was no packet end in the buffer
+ * @GST_MPEG4_PARSER_NO_PACKET_ERROR: An error accured durint the parsing
+ *
+ * Result type of any parsing function.
+ */
+typedef enum {
+ GST_MPEG4_PARSER_OK,
+ GST_MPEG4_PARSER_BROKEN_DATA,
+ GST_MPEG4_PARSER_NO_PACKET,
+ GST_MPEG4_PARSER_NO_PACKET_END,
+ GST_MPEG4_PARSER_ERROR,
+} GstMpeg4ParseResult;
+
+/**
+ * GstMpeg4VideoObjectCodingType:
+ * @GST_MPEG4_I_VOP: intra-coded (I)
+ * @GST_MPEG4_P_VOP: predictive-coded (P)
+ * @GST_MPEG4_B_VOP: bidirectionally-predictive-coded (B)
+ * @GST_MPEG4_S_VOP: sprite (S)
+ *
+ * The vop coding types as defined in:
+ * Table 6-20 -- Meaning of vop_coding_type
+ */
+typedef enum {
+ GST_MPEG4_I_VOP = 0x0,
+ GST_MPEG4_P_VOP = 0x1,
+ GST_MPEG4_B_VOP = 0x2,
+ GST_MPEG4_S_VOP = 0x3
+} GstMpeg4VideoObjectCodingType;
+
+/**
+ * GstMpeg4ChromaFormat
+ *
+ * The chroma format in use as
+ * defined in: Table 6-13 -- Meaning of chroma_format
+ */
+typedef enum {
+ /* Other value are reserved */
+ GST_MPEG4_CHROMA_4_2_0 = 0x01
+} GstMpeg4ChromaFormat;
+
+/**
+ * GstMpeg4VideoObjectLayerShape:
+ *
+ * The different video object layer shapes as defined in:
+ * Table 6-16 — Video Object Layer shape type
+ */
+typedef enum {
+ GST_MPEG4_RECTANGULAR,
+ GST_MPEG4_BINARY,
+ GST_MPEG4_BINARY_ONLY,
+ GST_MPEG4_GRAYSCALE
+} GstMpeg4VideoObjectLayerShape;
+
+/**
+ * GstMpeg4SpriteEnable:
+ *
+ * Indicates the usage of static sprite coding
+ * or global motion compensation (GMC) as defined in:
+ * Table V2 - 2 -- Meaning of sprite_enable codewords
+ */
+typedef enum {
+ GST_MPEG4_SPRITE_UNUSED,
+ GST_MPEG4_SPRITE_STATIC,
+ GST_MPEG4_SPRITE_GMG
+} GstMpeg4SpriteEnable;
+
+/**
+ * GstMpeg4Profile:
+ *
+ * Different defined profiles as defined in:
+ * 9- Profiles and levels
+ *
+ * It is computed using:
+ * Table G.1 — FLC table for profile_and_level_indication
+ */
+typedef enum {
+ GST_MPEG4_PROFILE_CORE,
+ GST_MPEG4_PROFILE_MAIN,
+ GST_MPEG4_PROFILE_N_BIT,
+ GST_MPEG4_PROFILE_SIMPLE,
+ GST_MPEG4_PROFILE_HYBRID,
+ GST_MPEG4_PROFILE_RESERVED,
+ GST_MPEG4_PROFILE_SIMPLE_FBA,
+ GST_MPEG4_PROFILE_CORE_STUDIO,
+ GST_MPEG4_PROFILE_SIMPLE_STUDIO,
+ GST_MPEG4_PROFILE_CORE_SCALABLE,
+ GST_MPEG4_PROFILE_ADVANCED_CORE,
+ GST_MPEG4_PROFILE_ADVANCED_SIMPLE,
+ GST_MPEG4_PROFILE_SIMPLE_SCALABLE,
+ GST_MPEG4_PROFILE_SCALABLE_TEXTURE,
+ GST_MPEG4_PROFILE_SIMPLE_FACE_ANIMATION,
+ GST_MPEG4_PROFILE_BASIC_ANIMATED_TEXTURE,
+ GST_MPEG4_PROFILE_ADVANCED_REALTIME_SIMPLE,
+ GST_MPEG4_PROFILE_ADVANCED_SCALABLE_TEXTURE,
+ GST_MPEG4_PROFILE_FINE_GRANULARITY_SCALABLE,
+ GST_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY
+} GstMpeg4Profile;
+
+/**
+ * GstMpeg4Level:
+ *
+ * Different levels as defined in:
+ * 9- Profiles and levels
+ *
+ * It is computed using:
+ * Table G.1 — FLC table for profile_and_level_indication
+ */
+typedef enum {
+ GST_MPEG4_LEVEL0,
+ GST_MPEG4_LEVEL1,
+ GST_MPEG4_LEVEL2,
+ GST_MPEG4_LEVEL3,
+ GST_MPEG4_LEVEL3b,
+ GST_MPEG4_LEVEL4,
+ GST_MPEG4_LEVEL5,
+ GST_MPEG4_LEVEL_RESERVED
+} GstMpeg4Level;
+
+/**
+ * GstMpeg4VisualObjectSequence:
+ *
+ * The visual object sequence structure as defined in:
+ * 6.2.2 Visual Object Sequence and Visual Object
+ */
+struct _GstMpeg4VisualObjectSequence {
+ guint8 profile_and_level_indication;
+
+ /* Computed according to:
+ * Table G.1 — FLC table for profile_and_level_indication */
+ GstMpeg4Level level;
+ GstMpeg4Profile profile;
+};
+
+/**
+ * The visual object structure as defined in:
+ * 6.2.2 Visual Object Sequence and Visual Object
+ */
+struct _GstMpeg4VisualObject {
+ guint8 is_identifier;
+ /* If is_identifier */
+ guint8 verid;
+ guint8 priority;
+
+ GstMpeg4VisualObjectType type;
+};
+
+/**
+ * GstMpeg4VideoSignalType:
+ *
+ * The video signal type structure as defined in:
+ * 6.2.2 Visual Object Sequence and Visual Object.
+ */
+struct _GstMpeg4VideoSignalType {
+ guint8 type;
+
+ guint8 format;
+ guint8 range;
+ guint8 color_description;
+ guint8 color_primaries;
+ guint8 transfer_characteristics;
+ guint8 matrix_coefficients;
+};
+
+/**
+ * GstMpeg4VideoPlaneShortHdr:
+ *
+ * The video plane short header structure as defined in:
+ * 6.2.5.2 Video Plane with Short Header
+ */
+struct _GstMpeg4VideoPlaneShortHdr {
+ guint8 temporal_reference;
+ guint8 split_screen_indicator;
+ guint8 document_camera_indicator;
+ guint8 full_picture_freeze_release;
+ guint8 source_format;
+ guint8 picture_coding_type;
+ guint8 vop_quant;
+ guint8 pei;
+ guint8 psupp;
+
+ /* Gob layer specific fields */
+ guint8 gob_header_empty;
+ guint8 gob_number;
+ guint8 gob_frame_id;
+ guint8 quant_scale;
+
+ /* Computed
+ * If all the values are set to 0, then it is reserved
+ * Table 6-25 -- Parameters Defined by source_format Field
+ */
+ guint16 vop_width;
+ guint16 vop_height;
+ guint16 num_macroblocks_in_gob;
+ guint8 num_gobs_in_vop;
+
+ /* The size in bits */
+ guint size;
+};
+
+/**
+ * GstMpeg4VideoObjectLayer:
+ *
+ * The video object layer structure as defined in:
+ * 6.2.3 Video Object Layer
+ */
+struct _GstMpeg4VideoObjectLayer {
+ guint8 random_accessible_vol;
+ guint8 video_object_type_indication;
+
+ guint8 is_object_layer_identifier;
+ /* if is_object_layer_identifier */
+ guint8 verid;
+ guint8 priority;
+
+ GstMpeg4AspectRatioInfo aspect_ratio_info;
+ guint8 par_width;
+ guint8 par_height;
+
+ guint8 control_parameters;
+ /* if control_parameters */
+ GstMpeg4ChromaFormat chroma_format;
+ guint8 low_delay;
+ guint8 vbv_parameters;
+ /* if vbv_parameters */
+ guint16 first_half_bitrate;
+ guint16 latter_half_bitrate;
+ guint16 first_half_vbv_buffer_size;
+ guint16 latter_half_vbv_buffer_size;
+ guint16 first_half_vbv_occupancy;
+ guint16 latter_half_vbv_occupancy;
+
+ /* Computed values */
+ guint32 bit_rate;
+ guint32 vbv_buffer_size;
+
+ GstMpeg4VideoObjectLayerShape shape;
+ /* if shape == GST_MPEG4_GRAYSCALE && verid =! 1 */
+ guint8 shape_extension;
+
+ guint16 vop_time_increment_resolution;
+ guint8 vop_time_increment_bits;
+ guint8 fixed_vop_rate;
+ /* if fixed_vop_rate */
+ guint16 fixed_vop_time_increment;
+
+ guint16 width;
+ guint16 height;
+ guint8 interlaced;
+ guint8 obmc_disable;
+
+ GstMpeg4SpriteEnable sprite_enable;
+ /* if vol->sprite_enable == SPRITE_GMG or SPRITE_STATIC*/
+ /* if vol->sprite_enable != GST_MPEG4_SPRITE_GMG */
+ guint16 sprite_width;
+ guint16 sprite_height;
+ guint16 sprite_left_coordinate;
+ guint16 sprite_top_coordinate;
+
+ guint8 no_of_sprite_warping_points;
+ guint8 sprite_warping_accuracy;
+ guint8 sprite_brightness_change;
+ /* if vol->sprite_enable != GST_MPEG4_SPRITE_GMG */
+ guint8 low_latency_sprite_enable;
+
+ /* if shape != GST_MPEG4_RECTANGULAR */
+ guint8 sadct_disable;
+
+ guint8 not_8_bit;
+
+ /* if no_8_bit */
+ guint8 quant_precision;
+ guint8 bits_per_pixel;
+
+ /* if shape == GRAYSCALE */
+ guint8 no_gray_quant_update;
+ guint8 composition_method;
+ guint8 linear_composition;
+
+ guint8 quant_type;
+ /* if quant_type */
+ guint8 load_intra_quant_mat;
+ guint8 intra_quant_mat[64];
+ guint8 load_non_intra_quant_mat;
+ guint8 non_intra_quant_mat[64];
+
+ guint8 quarter_sample;
+ guint8 complexity_estimation_disable;
+ guint8 resync_marker_disable;
+ guint8 data_partitioned;
+ guint8 reversible_vlc;
+ guint8 newpred_enable;
+ guint8 reduced_resolution_vop_enable;
+ guint8 scalability;
+ guint8 enhancement_type;
+
+ GstMpeg4VideoPlaneShortHdr short_hdr;
+};
+
+/**
+ * GstMpeg4SpriteTrajectory:
+ *
+ * The sprite trajectory structure as defined in:
+ * 7.8.4 Sprite reference point decoding and
+ * 6.2.5.4 Sprite coding
+ */
+struct _GstMpeg4SpriteTrajectory {
+ guint16 vop_ref_points[63]; /* Defined as "du" in 6.2.5.4 */
+ guint16 sprite_ref_points[63]; /* Defined as "dv" in 6.2.5.4 */
+};
+
+/**
+ * GstMpeg4GroupOfVOP:
+ *
+ * The group of video object plane structure as defined in:
+ * 6.2.4 Group of Video Object Plane
+ */
+struct _GstMpeg4GroupOfVOP {
+ guint8 hours;
+ guint8 minutes;
+ guint8 seconds;
+
+ guint8 closed;
+ guint8 broken_link;
+};
+
+/**
+ * GstMpeg4VideoObjectPlane:
+ *
+ * The Video object plane structure as defined in:
+ * 6.2.5 Video Object Plane and Video Plane with Short Header
+ */
+struct _GstMpeg4VideoObjectPlane {
+ GstMpeg4VideoObjectCodingType coding_type;
+
+ guint8 modulo_time_base;
+ guint16 time_increment;
+
+ guint8 coded;
+ /* if newpred_enable */
+ guint16 id;
+ guint8 id_for_prediction_indication;
+ guint16 id_for_prediction;
+
+ guint16 width;
+ guint16 height;
+ guint16 horizontal_mc_spatial_ref;
+ guint16 vertical_mc_spatial_ref;
+
+ guint8 rounding_type;
+ /*if vol->shape != GST_MPEG4_RECTANGULAR */
+ guint8 background_composition;
+ guint8 change_conv_ratio_disable;
+ guint8 constant_alpha;
+ guint8 constant_alpha_value;
+ guint8 reduced_resolution;
+
+ guint8 intra_dc_vlc_thr;
+
+
+ guint8 top_field_first;
+ guint8 alternate_vertical_scan_flag;
+
+ guint16 quant;
+
+ guint8 fcode_forward;
+ guint8 fcode_backward;
+
+ guint8 shape_coding_type;
+ guint8 load_backward_shape;
+ guint8 ref_select_code;
+
+ /* Computed macroblock informations */
+ guint16 mb_height;
+ guint16 mb_width;
+ guint mb_num;
+
+ /* The size of the header */
+ guint size;
+};
+
+/**
+ * GstMpeg4VideoPacketHdr:
+ * @size: Size of the header in bit.
+ *
+ * The video packet header structure as defined in:
+ * 6.2.5.2 Video Plane with Short Header
+ */
+struct _GstMpeg4VideoPacketHdr {
+ guint8 header_extension_code;
+ guint16 macroblock_number;
+ guint16 quant_scale;
+ guint size;
+};
+
+/**
+ * GstMpeg4Packet:
+ * @type: the type of the packet that start at @offset
+ * @data: the data of the packet, statring at @offset
+ * @offset: offset of the start of the packet (without the 3 bytes startcode), but
+ * including the #GstMpeg4StartCode byte.
+ * @size: The size in bytes of the packet or %G_MAXUINT if the end wasn't found.
+ * @marker_size: The size in bit of the resync marker.
+ *
+ * A structure that contains the type of a packet, its offset and its size
+ */
+struct _GstMpeg4Packet
+{
+ const guint8 *data;
+ guint offset;
+ gsize size;
+ guint marker_size;
+
+ GstMpeg4StartCode type;
+};
+
+GstMpeg4ParseResult gst_h263_parse (GstMpeg4Packet * packet,
+ const guint8 * data, guint offset,
+ gsize size);
+
+
+GstMpeg4ParseResult gst_mpeg4_parse (GstMpeg4Packet * packet,
+ gboolean skip_user_data,
+ GstMpeg4VideoObjectPlane *vop,
+ const guint8 * data, guint offset,
+ gsize size);
+
+GstMpeg4ParseResult
+gst_mpeg4_parse_video_object_plane (GstMpeg4VideoObjectPlane *vop,
+ GstMpeg4SpriteTrajectory *sprite_trajectory,
+ GstMpeg4VideoObjectLayer *vol,
+ const guint8 * data,
+ gsize size);
+
+GstMpeg4ParseResult
+gst_mpeg4_parse_group_of_vop (GstMpeg4GroupOfVOP *gov,
+ const guint8 * data, gsize size);
+
+GstMpeg4ParseResult
+gst_mpeg4_parse_video_object_layer (GstMpeg4VideoObjectLayer *vol,
+ GstMpeg4VisualObject *vo,
+ const guint8 * data, gsize size);
+
+GstMpeg4ParseResult
+gst_mpeg4_parse_visual_object (GstMpeg4VisualObject *vo,
+ GstMpeg4VideoSignalType *signal_type,
+ const guint8 * data, gsize size);
+
+GstMpeg4ParseResult
+gst_mpeg4_parse_visual_object_sequence (GstMpeg4VisualObjectSequence *vos,
+ const guint8 * data, gsize size);
+GstMpeg4ParseResult
+gst_mpeg4_parse_video_plane_short_header (GstMpeg4VideoPlaneShortHdr * shorthdr,
+ const guint8 * data, gsize size);
+
+GstMpeg4ParseResult
+gst_mpeg4_parse_video_packet_header (GstMpeg4VideoPacketHdr * videopackethdr,
+ GstMpeg4VideoObjectLayer * vol,
+ GstMpeg4VideoObjectPlane * vop,
+ GstMpeg4SpriteTrajectory * sprite_trajectory,
+ const guint8 * data, gsize size);
+
+#endif /* __GST_MPEG4UTIL_H__ */
diff --git a/gst-libs/gst/codecparsers/gstmpegvideoparser.c b/gst-libs/gst/codecparsers/gstmpegvideoparser.c
index 85d8b1d63..009c02bd1 100644
--- a/gst-libs/gst/codecparsers/gstmpegvideoparser.c
+++ b/gst-libs/gst/codecparsers/gstmpegvideoparser.c
@@ -40,6 +40,7 @@
#endif
#include "gstmpegvideoparser.h"
+#include "parserutils.h"
#include <string.h>
#include <gst/base/gstbitreader.h>
@@ -47,35 +48,8 @@
#define MARKER_BIT 0x1
-#define GET_BITS(b, num, bits) G_STMT_START { \
- if (!gst_bit_reader_get_bits_uint32(b, bits, num)) \
- goto failed; \
- GST_TRACE ("parsed %d bits: %d", num, *(bits)); \
-} G_STMT_END
-
-#define READ_UINT8(br, val, nbits) G_STMT_START { \
- if (!gst_bit_reader_get_bits_uint8 (br, &val, nbits)) { \
- GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
- goto failed; \
- } \
-} G_STMT_END
-
-#define READ_UINT16(br, val, nbits) G_STMT_START { \
- if (!gst_bit_reader_get_bits_uint16 (br, &val, nbits)) { \
- GST_WARNING ("failed to read uint16, nbits: %d", nbits); \
- goto failed; \
- } \
-} G_STMT_END
-
-#define READ_UINT32(br, val, nbits) G_STMT_START { \
- if (!gst_bit_reader_get_bits_uint32 (br, &val, nbits)) { \
- GST_WARNING ("failed to read uint32, nbits: %d", nbits); \
- goto failed; \
- } \
-} G_STMT_END
-
/* default intra quant matrix, in zig-zag order */
-const guint8 default_intra_quantizer_matrix[64] = {
+static const guint8 default_intra_quantizer_matrix[64] = {
8,
16, 16,
19, 16, 19,
@@ -93,7 +67,7 @@ const guint8 default_intra_quantizer_matrix[64] = {
83
};
-const guint8 mpeg_zigzag_8x8[64] = {
+static const guint8 mpeg_zigzag_8x8[64] = {
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34,
diff --git a/gst-libs/gst/codecparsers/gstvc1parser.c b/gst-libs/gst/codecparsers/gstvc1parser.c
index 7407b5585..8f2937f01 100644
--- a/gst-libs/gst/codecparsers/gstvc1parser.c
+++ b/gst-libs/gst/codecparsers/gstvc1parser.c
@@ -33,6 +33,7 @@
#endif
#include "gstvc1parser.h"
+#include "parserutils.h"
#include <gst/base/gstbytereader.h>
#include <gst/base/gstbitreader.h>
#include <string.h>
@@ -64,50 +65,7 @@ ensure_debug_category (void)
#endif /* GST_DISABLE_GST_DEBUG */
-/* ------------------------------------------------------------------------- */
-
-#define GET_BITS(b, num, bits) G_STMT_START { \
- if (!gst_bit_reader_get_bits_uint32(b, bits, num)) \
- goto failed; \
- GST_TRACE ("parsed %d bits: %d", num, *(bits)); \
-} G_STMT_END
-
-#define READ_UINT8(br, val, nbits) G_STMT_START { \
- if (!gst_bit_reader_get_bits_uint8 (br, &val, nbits)) { \
- GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
- goto failed; \
- } \
-} G_STMT_END
-
-#define READ_UINT16(br, val, nbits) G_STMT_START { \
- if (!gst_bit_reader_get_bits_uint16 (br, &val, nbits)) { \
- GST_WARNING ("failed to read uint16, nbits: %d", nbits); \
- goto failed; \
- } \
-} G_STMT_END
-
-#define READ_UINT32(br, val, nbits) G_STMT_START { \
- if (!gst_bit_reader_get_bits_uint32 (br, &val, nbits)) { \
- GST_WARNING ("failed to read uint32, nbits: %d", nbits); \
- goto failed; \
- } \
-} G_STMT_END
-
-#define SKIP(br, nbits) G_STMT_START { \
- if (!gst_bit_reader_skip (br, nbits)) { \
- GST_WARNING ("Failed to skip nbits: %d", nbits); \
- goto failed; \
- } \
-} G_STMT_END
-
-typedef struct _VLCTable
-{
- guint value;
- guint cword;
- guint cbits;
-} VLCTable;
-
-const guint8 vc1_pquant_table[3][32] = {
+static const guint8 vc1_pquant_table[3][32] = {
{ /* Implicit quantizer */
0, 1, 2, 3, 4, 5, 6, 7, 8, 6, 7, 8, 9, 10, 11, 12,
13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 29, 31},
@@ -468,41 +426,6 @@ failed:
}
}
-static gboolean
-decode_vlc (GstBitReader * br, guint * res, const VLCTable * table,
- guint length)
-{
- guint8 i;
- guint cbits = 0;
- guint32 value = 0;
-
- for (i = 0; i < length; i++) {
- if (cbits != table[i].cbits) {
- cbits = table[i].cbits;
- if (!gst_bit_reader_peek_bits_uint32 (br, &value, cbits)) {
- goto failed;
- }
- }
-
- if (value == table[i].cword) {
- SKIP (br, cbits);
- if (res)
- *res = table[i].value;
-
- return TRUE;
- }
- }
-
- GST_DEBUG ("Did not find code");
-
-failed:
- {
- GST_WARNING ("Could not decode VLC returning");
-
- return FALSE;
- }
-}
-
/*** bitplanes decoding ***/
static gboolean
bitplane_decoding (GstBitReader * br, guint8 * data,
@@ -1734,8 +1657,6 @@ gst_vc1_identify_next_bdu (const guint8 * data, gsize size, GstVC1BDU * bdu)
g_return_val_if_fail (bdu != NULL, GST_VC1_PARSER_ERROR);
- ensure_debug_category ();
-
if (size < 4) {
GST_DEBUG ("Can't parse, buffer has too small size %" G_GSSIZE_FORMAT,
size);
@@ -1796,8 +1717,6 @@ gst_vc1_parse_sequence_layer (const guint8 * data, gsize size,
g_return_val_if_fail (seqlayer != NULL, GST_VC1_PARSER_ERROR);
- ensure_debug_category ();
-
READ_UINT32 (&br, tmp, 8);
if (tmp != 0xC5)
goto failed;
@@ -1807,7 +1726,7 @@ gst_vc1_parse_sequence_layer (const guint8 * data, gsize size,
READ_UINT32 (&br, tmp, 32);
if (tmp != 0x04)
goto failed;
-
+
if (parse_sequence_header_struct_c (&br, &seqlayer->struct_c) ==
GST_VC1_PARSER_ERROR)
goto failed;
@@ -1850,9 +1769,6 @@ gst_vc1_parse_sequence_header_struct_a (const guint8 * data,
g_return_val_if_fail (structa != NULL, GST_VC1_PARSER_ERROR);
- ensure_debug_category ();
-
-
return parse_sequence_header_struct_a (&br, structa);
}
@@ -1874,8 +1790,6 @@ gst_vc1_parse_sequence_header_struct_b (const guint8 * data,
g_return_val_if_fail (structb != NULL, GST_VC1_PARSER_ERROR);
- ensure_debug_category ();
-
return parse_sequence_header_struct_b (&br, structb);
}
@@ -1897,8 +1811,6 @@ gst_vc1_parse_sequence_header_struct_c (const guint8 * data, gsize size,
g_return_val_if_fail (structc != NULL, GST_VC1_PARSER_ERROR);
- ensure_debug_category ();
-
return parse_sequence_header_struct_c (&br, structc);
}
@@ -1920,8 +1832,6 @@ gst_vc1_parse_sequence_header (const guint8 * data, gsize size,
g_return_val_if_fail (seqhdr != NULL, GST_VC1_PARSER_ERROR);
- ensure_debug_category ();
-
if (parse_sequence_header_struct_c (&br, &seqhdr->struct_c) ==
GST_VC1_PARSER_ERROR)
goto failed;
@@ -1965,8 +1875,6 @@ gst_vc1_parse_entry_point_header (const guint8 * data, gsize size,
g_return_val_if_fail (entrypoint != NULL, GST_VC1_PARSER_ERROR);
- ensure_debug_category ();
-
gst_bit_reader_init (&br, data, size);
if (gst_bit_reader_get_remaining (&br) < 13)
@@ -2089,8 +1997,6 @@ gst_vc1_parse_frame_header (const guint8 * data, gsize size,
GstBitReader br;
GstVC1ParserResult result;
- ensure_debug_category ();
-
gst_bit_reader_init (&br, data, size);
if (seqhdr->profile == GST_VC1_PROFILE_ADVANCED)
@@ -2123,8 +2029,6 @@ gst_vc1_parse_field_header (const guint8 * data, gsize size,
GstBitReader br;
GstVC1ParserResult result;
- ensure_debug_category ();
-
gst_bit_reader_init (&br, data, size);
result = parse_frame_header_advanced (&br, fieldhdr, seqhdr, bitplanes, TRUE);
diff --git a/gst-libs/gst/codecparsers/parserutils.c b/gst-libs/gst/codecparsers/parserutils.c
new file mode 100644
index 000000000..a31fe48f5
--- /dev/null
+++ b/gst-libs/gst/codecparsers/parserutils.c
@@ -0,0 +1,57 @@
+/* Gstreamer
+ * Copyright (C) <2011> Intel Corporation
+ * Copyright (C) <2011> Collabora Ltd.
+ * Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include "parserutils.h"
+
+gboolean
+decode_vlc (GstBitReader * br, guint * res, const VLCTable * table,
+ guint length)
+{
+ guint8 i;
+ guint cbits = 0;
+ guint32 value = 0;
+
+ for (i = 0; i < length; i++) {
+ if (cbits != table[i].cbits) {
+ cbits = table[i].cbits;
+ if (!gst_bit_reader_peek_bits_uint32 (br, &value, cbits)) {
+ goto failed;
+ }
+ }
+
+ if (value == table[i].cword) {
+ SKIP (br, cbits);
+ if (res)
+ *res = table[i].value;
+
+ return TRUE;
+ }
+ }
+
+ GST_DEBUG ("Did not find code");
+
+failed:
+ {
+ GST_WARNING ("Could not decode VLC returning");
+
+ return FALSE;
+ }
+}
diff --git a/gst-libs/gst/codecparsers/parserutils.h b/gst-libs/gst/codecparsers/parserutils.h
new file mode 100644
index 000000000..009b250cf
--- /dev/null
+++ b/gst-libs/gst/codecparsers/parserutils.h
@@ -0,0 +1,108 @@
+/* Gstreamer
+ * Copyright (C) <2011> Intel
+ * Copyright (C) <2011> Collabora Ltd.
+ * Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __PARSER_UTILS__
+#define __PARSER_UTILS__
+
+#include <gst/gst.h>
+#include <gst/base/gstbitreader.h>
+
+/* Parsing utils */
+#define GET_BITS(b, num, bits) G_STMT_START { \
+ if (!gst_bit_reader_get_bits_uint32(b, bits, num)) \
+ goto failed; \
+ GST_TRACE ("parsed %d bits: %d", num, *(bits)); \
+} G_STMT_END
+
+#define CHECK_ALLOWED(val, min, max) G_STMT_START { \
+ if (val < min || val > max) { \
+ GST_WARNING ("value not in allowed range. value: %d, range %d-%d", \
+ val, min, max); \
+ goto failed; \
+ } \
+} G_STMT_END
+
+#define READ_UINT8(reader, val, nbits) G_STMT_START { \
+ if (!gst_bit_reader_get_bits_uint8 (reader, &val, nbits)) { \
+ GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
+ goto failed; \
+ } \
+} G_STMT_END
+
+#define READ_UINT16(reader, val, nbits) G_STMT_START { \
+ if (!gst_bit_reader_get_bits_uint16 (reader, &val, nbits)) { \
+ GST_WARNING ("failed to read uint16, nbits: %d", nbits); \
+ goto failed; \
+ } \
+} G_STMT_END
+
+#define READ_UINT32(reader, val, nbits) G_STMT_START { \
+ if (!gst_bit_reader_get_bits_uint32 (reader, &val, nbits)) { \
+ GST_WARNING ("failed to read uint32, nbits: %d", nbits); \
+ goto failed; \
+ } \
+} G_STMT_END
+
+#define READ_UINT64(reader, val, nbits) G_STMT_START { \
+ if (!gst_bit_reader_get_bits_uint64 (reader, &val, nbits)) { \
+ GST_WARNING ("failed to read uint64, nbits: %d", nbits); \
+ goto failed; \
+ } \
+} G_STMT_END
+
+
+#define U_READ_UINT8(reader, val, nbits) G_STMT_START { \
+ val = gst_bit_reader_get_bits_uint8_unchecked (reader, nbits); \
+} G_STMT_END
+
+#define U_READ_UINT16(reader, val, nbits) G_STMT_START { \
+ val = gst_bit_reader_get_bits_uint16_unchecked (reader, nbits); \
+} G_STMT_END
+
+#define U_READ_UINT32(reader, val, nbits) G_STMT_START { \
+ val = gst_bit_reader_get_bits_uint32_unchecked (reader, nbits); \
+} G_STMT_END
+
+#define U_READ_UINT64(reader, val, nbits) G_STMT_START { \
+ val = gst_bit_reader_get_bits_uint64_unchecked (reader, nbits); \
+} G_STMT_END
+
+#define SKIP(reader, nbits) G_STMT_START { \
+ if (!gst_bit_reader_skip (reader, nbits)) { \
+ GST_WARNING ("failed to skip nbits: %d", nbits); \
+ goto failed; \
+ } \
+} G_STMT_END
+
+typedef struct _VLCTable VLCTable;
+
+struct _VLCTable
+{
+ guint value;
+ guint cword;
+ guint cbits;
+};
+
+gboolean
+decode_vlc (GstBitReader * br, guint * res, const VLCTable * table,
+ guint length);
+
+#endif /* __PARSER_UTILS__ */
diff --git a/gst-libs/gst/video/gstbasevideocodec.c b/gst-libs/gst/video/gstbasevideocodec.c
index b37b75178..816e62e35 100644
--- a/gst-libs/gst/video/gstbasevideocodec.c
+++ b/gst-libs/gst/video/gstbasevideocodec.c
@@ -49,14 +49,14 @@ static GstElementClass *parent_class = NULL;
G_DEFINE_BOXED_TYPE (GstVideoFrameState, gst_video_frame_state,
(GBoxedCopyFunc) gst_video_frame_state_ref,
- (GBoxedFreeFunc) gst_video_frame_state_unref)
+ (GBoxedFreeFunc) gst_video_frame_state_unref);
/* NOTE (Edward): Do not use G_DEFINE_* because we need to have
* a GClassInitFunc called with the target class (which the macros
- * don't handle). */
- static void gst_base_video_codec_class_init (GstBaseVideoCodecClass *
- klass);
- static void gst_base_video_codec_init (GstBaseVideoCodec * dec,
+ * don't handle).
+ */
+static void gst_base_video_codec_class_init (GstBaseVideoCodecClass * klass);
+static void gst_base_video_codec_init (GstBaseVideoCodec * dec,
GstBaseVideoCodecClass * klass);
GType
diff --git a/gst-libs/gst/video/gstbasevideocodec.h b/gst-libs/gst/video/gstbasevideocodec.h
index f7e015ad1..fb4ee7fec 100644
--- a/gst-libs/gst/video/gstbasevideocodec.h
+++ b/gst-libs/gst/video/gstbasevideocodec.h
@@ -138,6 +138,7 @@ struct _GstVideoFrameState
GstClockTime deadline;
gboolean force_keyframe;
+ gboolean force_keyframe_headers;
/* Events that should be pushed downstream *before*
* the next src_buffer */
diff --git a/gst-libs/gst/video/gstbasevideoencoder.c b/gst-libs/gst/video/gstbasevideoencoder.c
index deb20df4a..f4b1ee2d1 100644
--- a/gst-libs/gst/video/gstbasevideoencoder.c
+++ b/gst-libs/gst/video/gstbasevideoencoder.c
@@ -113,6 +113,34 @@
GST_DEBUG_CATEGORY (basevideoencoder_debug);
#define GST_CAT_DEFAULT basevideoencoder_debug
+typedef struct _ForcedKeyUnitEvent ForcedKeyUnitEvent;
+struct _ForcedKeyUnitEvent
+{
+ GstClockTime running_time;
+ gboolean pending; /* TRUE if this was requested already */
+ gboolean all_headers;
+ guint count;
+};
+
+static void
+forced_key_unit_event_free (ForcedKeyUnitEvent * evt)
+{
+ g_slice_free (ForcedKeyUnitEvent, evt);
+}
+
+static ForcedKeyUnitEvent *
+forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers,
+ guint count)
+{
+ ForcedKeyUnitEvent *evt = g_slice_new0 (ForcedKeyUnitEvent);
+
+ evt->running_time = running_time;
+ evt->all_headers = all_headers;
+ evt->count = count;
+
+ return evt;
+}
+
static void gst_base_video_encoder_finalize (GObject * object);
static GstCaps *gst_base_video_encoder_sink_getcaps (GstPad * pad,
@@ -132,8 +160,7 @@ static gboolean gst_base_video_encoder_src_query (GstPad * pad,
#define gst_base_video_encoder_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstBaseVideoEncoder, gst_base_video_encoder,
- GST_TYPE_BASE_VIDEO_CODEC, G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL);
- );
+ GST_TYPE_BASE_VIDEO_CODEC, G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL););
static void
gst_base_video_encoder_class_init (GstBaseVideoEncoderClass * klass)
@@ -160,16 +187,17 @@ gst_base_video_encoder_reset (GstBaseVideoEncoder * base_video_encoder)
base_video_encoder->presentation_frame_number = 0;
base_video_encoder->distance_from_sync = 0;
- base_video_encoder->force_keyframe = FALSE;
+
+ g_list_foreach (base_video_encoder->force_key_unit,
+ (GFunc) forced_key_unit_event_free, NULL);
+ g_list_free (base_video_encoder->force_key_unit);
+ base_video_encoder->force_key_unit = NULL;
base_video_encoder->drained = TRUE;
base_video_encoder->min_latency = 0;
base_video_encoder->max_latency = 0;
- if (base_video_encoder->force_keyunit_event) {
- gst_event_unref (base_video_encoder->force_keyunit_event);
- base_video_encoder->force_keyunit_event = NULL;
- }
+ gst_buffer_replace (&base_video_encoder->headers, NULL);
g_list_foreach (base_video_encoder->current_frame_events,
(GFunc) gst_event_unref, NULL);
@@ -202,12 +230,21 @@ gst_base_video_encoder_init (GstBaseVideoEncoder * base_video_encoder)
gst_pad_set_event_function (pad,
GST_DEBUG_FUNCPTR (gst_base_video_encoder_src_event));
- base_video_encoder->a.at_eos = FALSE;
+ base_video_encoder->at_eos = FALSE;
+ base_video_encoder->headers = NULL;
/* encoder is expected to do so */
base_video_encoder->sink_clipping = TRUE;
}
+void
+gst_base_video_encoder_set_headers (GstBaseVideoEncoder * base_video_encoder,
+ GstBuffer * headers)
+{
+ GST_DEBUG_OBJECT (base_video_encoder, "new headers %p", headers);
+ gst_buffer_replace (&base_video_encoder->headers, headers);
+}
+
static gboolean
gst_base_video_encoder_drain (GstBaseVideoEncoder * enc)
{
@@ -447,8 +484,11 @@ gst_base_video_encoder_sink_query (GstPad * pad, GstObject * parent,
static void
gst_base_video_encoder_finalize (GObject * object)
{
+ GstBaseVideoEncoder *base_video_encoder = (GstBaseVideoEncoder *) object;
GST_DEBUG_OBJECT (object, "finalize");
+ gst_buffer_replace (&base_video_encoder->headers, NULL);
+
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@@ -477,7 +517,7 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
GstFlowReturn flow_ret;
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
- base_video_encoder->a.at_eos = TRUE;
+ base_video_encoder->at_eos = TRUE;
if (base_video_encoder_class->finish) {
flow_ret = base_video_encoder_class->finish (base_video_encoder);
@@ -508,7 +548,7 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
break;
}
- base_video_encoder->a.at_eos = FALSE;
+ base_video_encoder->at_eos = FALSE;
gst_segment_copy_into (segment, &GST_BASE_VIDEO_CODEC
(base_video_encoder)->segment);
@@ -517,17 +557,26 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
}
case GST_EVENT_CUSTOM_DOWNSTREAM:
{
- const GstStructure *s;
-
- s = gst_event_get_structure (event);
-
- if (gst_structure_has_name (s, "GstForceKeyUnit")) {
- GST_OBJECT_LOCK (base_video_encoder);
- base_video_encoder->force_keyframe = TRUE;
- if (base_video_encoder->force_keyunit_event)
- gst_event_unref (base_video_encoder->force_keyunit_event);
- base_video_encoder->force_keyunit_event = gst_event_copy (event);
- GST_OBJECT_UNLOCK (base_video_encoder);
+ if (gst_video_event_is_force_key_unit (event)) {
+ GstClockTime running_time;
+ gboolean all_headers;
+ guint count;
+
+ if (gst_video_event_parse_downstream_force_key_unit (event,
+ NULL, NULL, &running_time, &all_headers, &count)) {
+ ForcedKeyUnitEvent *fevt;
+
+ GST_OBJECT_LOCK (base_video_encoder);
+ fevt = forced_key_unit_event_new (running_time, all_headers, count);
+ base_video_encoder->force_key_unit =
+ g_list_append (base_video_encoder->force_key_unit, fevt);
+ GST_OBJECT_UNLOCK (base_video_encoder);
+
+ GST_DEBUG_OBJECT (base_video_encoder,
+ "force-key-unit event: running-time %" GST_TIME_FORMAT
+ ", all_headers %d, count %u",
+ GST_TIME_ARGS (running_time), all_headers, count);
+ }
gst_event_unref (event);
ret = TRUE;
}
@@ -605,15 +654,26 @@ gst_base_video_encoder_src_event (GstPad * pad, GstObject * parent,
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_UPSTREAM:
{
- const GstStructure *s;
-
- s = gst_event_get_structure (event);
-
- if (gst_structure_has_name (s, "GstForceKeyUnit")) {
- GST_OBJECT_LOCK (base_video_encoder);
- base_video_encoder->force_keyframe = TRUE;
- GST_OBJECT_UNLOCK (base_video_encoder);
-
+ if (gst_video_event_is_force_key_unit (event)) {
+ GstClockTime running_time;
+ gboolean all_headers;
+ guint count;
+
+ if (gst_video_event_parse_upstream_force_key_unit (event,
+ &running_time, &all_headers, &count)) {
+ ForcedKeyUnitEvent *fevt;
+
+ GST_OBJECT_LOCK (base_video_encoder);
+ fevt = forced_key_unit_event_new (running_time, all_headers, count);
+ base_video_encoder->force_key_unit =
+ g_list_append (base_video_encoder->force_key_unit, fevt);
+ GST_OBJECT_UNLOCK (base_video_encoder);
+
+ GST_DEBUG_OBJECT (base_video_encoder,
+ "force-key-unit event: running-time %" GST_TIME_FORMAT
+ ", all_headers %d, count %u",
+ GST_TIME_ARGS (running_time), all_headers, count);
+ }
gst_event_unref (event);
ret = TRUE;
} else {
@@ -716,7 +776,7 @@ gst_base_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
- if (base_video_encoder->a.at_eos) {
+ if (base_video_encoder->at_eos) {
ret = GST_FLOW_UNEXPECTED;
goto done;
}
@@ -751,8 +811,47 @@ gst_base_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
frame->presentation_frame_number =
base_video_encoder->presentation_frame_number;
base_video_encoder->presentation_frame_number++;
- frame->force_keyframe = base_video_encoder->force_keyframe;
- base_video_encoder->force_keyframe = FALSE;
+
+ GST_OBJECT_LOCK (base_video_encoder);
+ if (base_video_encoder->force_key_unit) {
+ ForcedKeyUnitEvent *fevt = NULL;
+ GstClockTime running_time;
+ GList *l;
+
+ running_time = gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
+ (base_video_encoder)->segment, GST_FORMAT_TIME,
+ GST_BUFFER_TIMESTAMP (buf));
+
+ for (l = base_video_encoder->force_key_unit; l; l = l->next) {
+ ForcedKeyUnitEvent *tmp = l->data;
+
+ /* Skip pending keyunits */
+ if (tmp->pending)
+ continue;
+
+ /* Simple case, keyunit ASAP */
+ if (tmp->running_time == GST_CLOCK_TIME_NONE) {
+ fevt = tmp;
+ break;
+ }
+
+ /* Event for before this frame */
+ if (tmp->running_time <= running_time) {
+ fevt = tmp;
+ break;
+ }
+ }
+
+ if (fevt) {
+ GST_DEBUG_OBJECT (base_video_encoder,
+ "Forcing a key unit at running time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time));
+ frame->force_keyframe = TRUE;
+ frame->force_keyframe_headers = fevt->all_headers;
+ fevt->pending = TRUE;
+ }
+ }
+ GST_OBJECT_UNLOCK (base_video_encoder);
GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
g_list_append (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);
@@ -839,6 +938,7 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
GstFlowReturn ret = GST_FLOW_OK;
GstBaseVideoEncoderClass *base_video_encoder_class;
GList *l;
+ GstBuffer *headers = NULL;
base_video_encoder_class =
GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);
@@ -866,45 +966,78 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
break;
}
- if (frame->force_keyframe) {
- GstClockTime stream_time;
- GstClockTime running_time;
+ /* no buffer data means this frame is skipped/dropped */
+ if (!frame->src_buffer) {
+ GST_DEBUG_OBJECT (base_video_encoder, "skipping frame %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (frame->presentation_timestamp));
+ goto done;
+ }
+
+ if (frame->is_sync_point && base_video_encoder->force_key_unit) {
+ GstClockTime stream_time, running_time;
GstEvent *ev;
+ ForcedKeyUnitEvent *fevt = NULL;
+ GList *l;
- running_time =
- gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
- (base_video_encoder)->segment, GST_FORMAT_TIME,
- frame->presentation_timestamp);
- stream_time =
- gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC
+ running_time = gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
(base_video_encoder)->segment, GST_FORMAT_TIME,
frame->presentation_timestamp);
/* re-use upstream event if any so it also conveys any additional
* info upstream arranged in there */
GST_OBJECT_LOCK (base_video_encoder);
- if (base_video_encoder->force_keyunit_event) {
- ev = base_video_encoder->force_keyunit_event;
- base_video_encoder->force_keyunit_event = NULL;
- } else {
- ev = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
- gst_structure_new_empty ("GstForceKeyUnit"));
+ for (l = base_video_encoder->force_key_unit; l; l = l->next) {
+ ForcedKeyUnitEvent *tmp = l->data;
+
+ /* Skip non-pending keyunits */
+ if (!tmp->pending)
+ continue;
+
+ /* Simple case, keyunit ASAP */
+ if (tmp->running_time == GST_CLOCK_TIME_NONE) {
+ fevt = tmp;
+ break;
+ }
+
+ /* Event for before this frame */
+ if (tmp->running_time <= running_time) {
+ fevt = tmp;
+ break;
+ }
+ }
+
+ if (fevt) {
+ base_video_encoder->force_key_unit =
+ g_list_remove (base_video_encoder->force_key_unit, fevt);
}
GST_OBJECT_UNLOCK (base_video_encoder);
- gst_structure_set (gst_event_writable_structure (ev),
- "timestamp", G_TYPE_UINT64, frame->presentation_timestamp,
- "stream-time", G_TYPE_UINT64, stream_time,
- "running-time", G_TYPE_UINT64, running_time, NULL);
+ if (fevt) {
+ stream_time =
+ gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC
+ (base_video_encoder)->segment, GST_FORMAT_TIME,
+ frame->presentation_timestamp);
- gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), ev);
- }
+ ev = gst_video_event_new_downstream_force_key_unit
+ (frame->presentation_timestamp, stream_time, running_time,
+ fevt->all_headers, fevt->count);
- /* no buffer data means this frame is skipped/dropped */
- if (!frame->src_buffer) {
- GST_DEBUG_OBJECT (base_video_encoder, "skipping frame %" GST_TIME_FORMAT,
- GST_TIME_ARGS (frame->presentation_timestamp));
- goto done;
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
+ ev);
+
+ if (fevt->all_headers) {
+ if (base_video_encoder->headers) {
+ headers = gst_buffer_ref (base_video_encoder->headers);
+ headers = gst_buffer_make_writable (headers);
+ }
+ }
+
+ GST_DEBUG_OBJECT (base_video_encoder,
+ "Forced key unit: running-time %" GST_TIME_FORMAT
+ ", all_headers %d, count %u",
+ GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
+ forced_key_unit_event_free (fevt);
+ }
}
if (frame->is_sync_point) {
@@ -931,6 +1064,12 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
GST_BUFFER_OFFSET (frame->src_buffer) = frame->decode_timestamp;
+ if (G_UNLIKELY (headers)) {
+ GST_BUFFER_TIMESTAMP (headers) = frame->presentation_timestamp;
+ GST_BUFFER_DURATION (headers) = 0;
+ GST_BUFFER_OFFSET (headers) = frame->decode_timestamp;
+ }
+
/* update rate estimate */
GST_BASE_VIDEO_CODEC (base_video_encoder)->bytes +=
gst_buffer_get_size (frame->src_buffer);
diff --git a/gst-libs/gst/video/gstbasevideoencoder.h b/gst-libs/gst/video/gstbasevideoencoder.h
index 56340da98..547d0ac2d 100644
--- a/gst-libs/gst/video/gstbasevideoencoder.h
+++ b/gst-libs/gst/video/gstbasevideoencoder.h
@@ -84,26 +84,22 @@ struct _GstBaseVideoEncoder
guint64 presentation_frame_number;
int distance_from_sync;
- gboolean force_keyframe;
-
/*< private >*/
/* FIXME move to real private part ?
* (and introduce a context ?) */
gboolean drained;
+ gboolean at_eos;
gint64 min_latency;
gint64 max_latency;
- GstEvent *force_keyunit_event;
GList *current_frame_events;
- union {
- void *padding;
- gboolean at_eos;
- } a;
+ GstBuffer *headers;
- /* FIXME before moving to base */
- void *padding[GST_PADDING_LARGE-1];
+ GList *force_key_unit; /* List of pending forced keyunits */
+
+ void *padding[GST_PADDING_LARGE];
};
/**
@@ -178,7 +174,8 @@ void gst_base_video_encoder_set_latency (GstBaseVideoEncoder *
GstClockTime min_latency, GstClockTime max_latency);
void gst_base_video_encoder_set_latency_fields (GstBaseVideoEncoder *base_video_encoder,
int n_fields);
-
+void gst_base_video_encoder_set_headers (GstBaseVideoEncoder *base_video_encoder,
+ GstBuffer *headers);
G_END_DECLS
#endif
diff --git a/gst/asfmux/gstasfmux.c b/gst/asfmux/gstasfmux.c
index 42b8b753c..e7184f601 100644
--- a/gst/asfmux/gstasfmux.c
+++ b/gst/asfmux/gstasfmux.c
@@ -335,9 +335,9 @@ gst_asf_mux_init (GstAsfMux * asfmux)
gst_pad_use_fixed_caps (asfmux->srcpad);
gst_element_add_pad (GST_ELEMENT (asfmux), asfmux->srcpad);
- asfmux->collect = gst_collect_pads_new ();
- gst_collect_pads_set_function (asfmux->collect,
- (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_asf_mux_collected),
+ asfmux->collect = gst_collect_pads2_new ();
+ gst_collect_pads2_set_function (asfmux->collect,
+ (GstCollectPads2Function) GST_DEBUG_FUNCPTR (gst_asf_mux_collected),
asfmux);
asfmux->payloads = NULL;
@@ -1851,7 +1851,7 @@ gst_asf_mux_process_buffer (GstAsfMux * asfmux, GstAsfPad * pad,
AsfPayload *payload;
payload = g_malloc0 (sizeof (AsfPayload));
- payload->pad = (GstCollectData *) pad;
+ payload->pad = (GstCollectData2 *) pad;
payload->data = buf;
GST_LOG_OBJECT (asfmux,
@@ -1911,7 +1911,7 @@ gst_asf_mux_process_buffer (GstAsfMux * asfmux, GstAsfPad * pad,
}
static GstFlowReturn
-gst_asf_mux_collected (GstCollectPads * collect, gpointer data)
+gst_asf_mux_collected (GstCollectPads2 * collect, gpointer data)
{
GstAsfMux *asfmux = GST_ASF_MUX_CAST (data);
GstFlowReturn ret = GST_FLOW_OK;
@@ -1937,15 +1937,15 @@ gst_asf_mux_collected (GstCollectPads * collect, gpointer data)
walk = asfmux->collect->data;
while (walk) {
GstAsfPad *pad;
- GstCollectData *data;
+ GstCollectData2 *data;
GstClockTime time;
- data = (GstCollectData *) walk->data;
+ data = (GstCollectData2 *) walk->data;
pad = (GstAsfPad *) data;
walk = g_slist_next (walk);
- buf = gst_collect_pads_peek (collect, data);
+ buf = gst_collect_pads2_peek (collect, data);
if (buf == NULL) {
GST_LOG_OBJECT (asfmux, "Pad %s has no buffers",
GST_PAD_NAME (pad->collect.pad));
@@ -1980,7 +1980,7 @@ gst_asf_mux_collected (GstCollectPads * collect, gpointer data)
/* we have data */
GST_LOG_OBJECT (asfmux, "selected pad %s with time %" GST_TIME_FORMAT,
GST_PAD_NAME (best_pad->collect.pad), GST_TIME_ARGS (best_time));
- buf = gst_collect_pads_pop (collect, &best_pad->collect);
+ buf = gst_collect_pads2_pop (collect, &best_pad->collect);
ret = gst_asf_mux_process_buffer (asfmux, best_pad, buf);
} else {
/* no data, let's finish it up */
@@ -2251,16 +2251,16 @@ gst_asf_mux_request_new_pad (GstElement * element,
return NULL;
}
- if (templ == gst_element_class_get_pad_template (klass, "audio_%u")) {
- name = g_strdup_printf ("audio_%u", asfmux->stream_number + 1);
+ if (templ == gst_element_class_get_pad_template (klass, "audio_%d")) {
+ name = g_strdup_printf ("audio_%02d", asfmux->stream_number + 1);
GST_DEBUG_OBJECT (asfmux, "Adding new pad %s", name);
newpad = gst_pad_new_from_template (templ, name);
g_free (name);
is_audio = TRUE;
gst_pad_set_setcaps_function (newpad,
GST_DEBUG_FUNCPTR (gst_asf_mux_audio_set_caps));
- } else if (templ == gst_element_class_get_pad_template (klass, "video_%u")) {
- name = g_strdup_printf ("video_%u", asfmux->stream_number + 1);
+ } else if (templ == gst_element_class_get_pad_template (klass, "video_%d")) {
+ name = g_strdup_printf ("video_%02d", asfmux->stream_number + 1);
GST_DEBUG_OBJECT (asfmux, "Adding new pad %s", name);
newpad = gst_pad_new_from_template (templ, name);
g_free (name);
@@ -2279,8 +2279,8 @@ gst_asf_mux_request_new_pad (GstElement * element,
collect_size = sizeof (GstAsfVideoPad);
}
collect_pad = (GstAsfPad *)
- gst_collect_pads_add_pad (asfmux->collect, newpad, collect_size,
- (GstCollectDataDestroyNotify) (gst_asf_mux_pad_reset));
+ gst_collect_pads2_add_pad_full (asfmux->collect, newpad, collect_size,
+ (GstCollectData2DestroyNotify) (gst_asf_mux_pad_reset), TRUE);
/* set up pad */
collect_pad->is_audio = is_audio;
@@ -2294,7 +2294,7 @@ gst_asf_mux_request_new_pad (GstElement * element,
collect_pad->stream_number = asfmux->stream_number;
/* FIXME: hacked way to override/extend the event function of
- * GstCollectPads; because it sets its own event function giving
+ * GstCollectPads2; because it sets its own event function giving
* the element no access to events.
*/
asfmux->collect_event = (GstPadEventFunction) GST_PAD_EVENTFUNC (newpad);
@@ -2391,12 +2391,12 @@ gst_asf_mux_change_state (GstElement * element, GstStateChange transition)
asfmux->packet_size = asfmux->prop_packet_size;
asfmux->preroll = asfmux->prop_preroll;
asfmux->merge_stream_tags = asfmux->prop_merge_stream_tags;
- gst_collect_pads_start (asfmux->collect);
+ gst_collect_pads2_start (asfmux->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_collect_pads_stop (asfmux->collect);
+ gst_collect_pads2_stop (asfmux->collect);
asfmux->state = GST_ASF_MUX_STATE_NONE;
break;
default:
diff --git a/gst/asfmux/gstasfmux.h b/gst/asfmux/gstasfmux.h
index 4036aa169..49bd92d8f 100644
--- a/gst/asfmux/gstasfmux.h
+++ b/gst/asfmux/gstasfmux.h
@@ -23,7 +23,7 @@
#include <gst/gst.h>
-#include <gst/base/gstcollectpads.h>
+#include <gst/base/gstcollectpads2.h>
#include <gst/riff/riff-media.h>
#include "gstasfobjects.h"
@@ -57,7 +57,7 @@ enum _GstAsfMuxState
struct _GstAsfPad
{
- GstCollectData collect;
+ GstCollectData2 collect;
gboolean is_audio;
guint8 stream_number;
@@ -143,7 +143,7 @@ struct _GstAsfMux
/* pads */
GstPad *srcpad;
- GstCollectPads *collect;
+ GstCollectPads2 *collect;
GstPadEventFunction collect_event;
};
diff --git a/gst/asfmux/gstasfobjects.h b/gst/asfmux/gstasfobjects.h
index 2a91a4c79..00546fff4 100644
--- a/gst/asfmux/gstasfobjects.h
+++ b/gst/asfmux/gstasfobjects.h
@@ -22,7 +22,7 @@
#include <glib.h>
#include <gst/gst.h>
#include <gst/base/gstbytereader.h>
-#include <gst/base/gstcollectpads.h>
+#include <gst/base/gstcollectpads2.h>
#define ASF_PAYLOAD_IS_KEYFRAME(pay) ((pay->stream_number & 0x80) != 0)
#define ASF_MILI_TO_100NANO(v) (v * 10000)
@@ -75,7 +75,7 @@ typedef struct _AsfPayload
guint32 presentation_time;
GstBuffer *data;
- GstCollectData *pad;
+ GstCollectData2 *pad;
/* simple index info */
gboolean has_packet_info;
diff --git a/gst/audiovisualizers/README b/gst/audiovisualizers/README
index d1a8c05eb..be2491e06 100644
--- a/gst/audiovisualizers/README
+++ b/gst/audiovisualizers/README
@@ -3,9 +3,8 @@ video-rate. It receives audio-data at the sampling-rate. It needs to render
video-frames at frame-rate. The rendering needs n audio samples (depends on
subclass). The baseclass takes care of that.
-Some effects could be enhanced by running geometrictransform elements
-afterwards. A blur and/or videozoom element would be great (vertigotv looks
-great but has some negotiation issues).
+Some effects could be enhanced by running geometrictransform/effecttc elements
+afterwards.
= Feedback =
* put 'Audio' to klass as well ?
@@ -36,17 +35,35 @@ spectrascope - done
spacescope - stereo wavescope
- left->x, right->y - done
- polar mapping
-multiscope :
-- like wave/space scope, but run the signal through two filters to split it into
- bass, mid and high (200 Hz, 2000 Hz)
-- draw 3 wave-scopes into red/gree/blue
-- when drawing only draw that component to mix colors
-- eventually use the spacescope-position to rotate/shift the wave
+wavescope
+- we could have a bouncing line as a base, like a quix:
+ - two dots moving on a linear path and getting a new random dx,dy when hitting
+ a border
+ - the abs(dx/dy) - speed of movement - could be scaled by the sound level
+ - we would need to rotate, stretch and clip the waveform drawing to fit the
+ line
+ - we could scratch the rotate part and just stretch/squeeze x and shift/clip y
+
+xxxscope
+- have a matrix of source and drawing-functions
+ - sources: audio, spectrum, audio-low, audio-mid, audio-hi
+ - drawing: waves (style, color), space (style,color)
+- have the usual shade and move operations
+- have a way to draw each operator in one or more color-channels
+- we could calculate the sound-level (like in level element) and modulate
+ colors/movements
+ - for filtered low/mid/hi audio we could use different peak-falloffs
= TODO =
- element maker template
- test for baseclass
+- we probably want a VisBin like the gnome video effects
+- this way we can specify pipeline fragments
+- VisBin can use a videomixer to switch effects based on time or song
+- VisBin can e.g. control a text-overlay to render the title into the
+ visualisation for a while
+
= Test it =
GST_DEBUG="*:2,*scope*:4"
@@ -55,6 +72,7 @@ GST_PLUGIN_PATH=$GST_PLUGIN_PATH:$PWD gst-inspect scopes
GST_PLUGIN_PATH=$GST_PLUGIN_PATH:$PWD gst-launch audiotestsrc ! audioconvert ! wavescope ! colorspace ! ximagesink
GST_PLUGIN_PATH=$GST_PLUGIN_PATH:$PWD gst-launch filesrc location=$HOME/Music/1.mp3 ! decodebin2 ! audioconvert ! wavescope ! colorspace ! ximagesink
+GST_PLUGIN_PATH=$GST_PLUGIN_PATH:$PWD gst-launch filesrc location=$HOME/Music/1.mp3 ! decodebin2 ! audioconvert ! wavescope style=lines shade-amount=0x00080402 ! edgetv ! vertigotv ! ximagesink
GST_PLUGIN_PATH=$GST_PLUGIN_PATH:$PWD gst-launch filesrc location=$HOME/Music/1.mp3 ! decodebin2 ! audioconvert ! spacescope style=lines shade-amount=0x00080402 ! ximagesink
GST_PLUGIN_PATH=$GST_PLUGIN_PATH:$PWD gst-launch filesrc location=$HOME/Music/1.mp3 ! decodebin2 ! audioconvert ! spacescope style=lines shade-amount=0x00080402 ! vertigotv ! ximagesink
diff --git a/gst/audiovisualizers/gstdrawhelpers.h b/gst/audiovisualizers/gstdrawhelpers.h
index 2c2cd36ef..815fae35e 100644
--- a/gst/audiovisualizers/gstdrawhelpers.h
+++ b/gst/audiovisualizers/gstdrawhelpers.h
@@ -24,6 +24,23 @@
_vd[(_y * _st) + _x] = _c; \
} G_STMT_END
+#define draw_dot_c(_vd, _x, _y, _st, _c) G_STMT_START { \
+ _vd[(_y * _st) + _x] |= _c; \
+} G_STMT_END
+
+#define draw_dot_aa(_vd, _x, _y, _st, _c, _f) G_STMT_START { \
+ guint32 _oc, _c1, _c2, _c3; \
+ \
+ _oc = _vd[(_y * _st) + _x]; \
+ _c3 = (_oc & 0xff) + ((_c & 0xff) * _f); \
+ _c3 = MIN(_c3, 255); \
+ _c2 = ((_oc & 0xff00) >> 8) + (((_c & 0xff00) >> 8) * _f); \
+ _c2 = MIN(_c2, 255); \
+ _c1 = ((_oc & 0xff0000) >> 16) + (((_c & 0xff0000) >> 16) * _f); \
+ _c1 = MIN(_c1, 255); \
+ _vd[(_y * _st) + _x] = (_c1 << 16) | (_c2 << 8) | _c3; \
+} G_STMT_END
+
#define draw_line(_vd, _x1, _x2, _y1, _y2, _st, _c) G_STMT_START { \
guint _i, _j, _x, _y; \
gint _dx = _x2 - _x1, _dy = _y2 - _y1; \
@@ -42,7 +59,6 @@
guint _i, _j, _x, _y; \
gint _dx = _x2 - _x1, _dy = _y2 - _y1; \
gfloat _f, _rx, _ry, _fx, _fy; \
- guint32 _oc, _nc, _c1, _c2, _c3; \
\
_j = abs (_dx) > abs (_dy) ? abs (_dx) : abs (_dy); \
for (_i = 0; _i < _j; _i++) { \
@@ -55,48 +71,16 @@
_fy = _ry - (gfloat)_y; \
\
_f = ((1.0 - _fx) + (1.0 - _fy)) / 2.0; \
- _oc = _vd[(_y * _st) + _x]; \
- _c3 = (_oc & 0xff) + ((_c & 0xff) * _f); \
- _c3 = MIN(_c3, 255); \
- _c2 = ((_oc & 0xff00) >> 8) + (((_c & 0xff00) >> 8) * _f); \
- _c2 = MIN(_c2, 255); \
- _c1 = ((_oc & 0xff0000) >> 16) + (((_c & 0xff0000) >> 16) * _f); \
- _c1 = MIN(_c1, 255); \
- _nc = 0x00 | (_c1 << 16) | (_c2 << 8) | _c3; \
- _vd[(_y * _st) + _x] = _nc; \
+ draw_dot_aa (_vd, _x, _y, _st, _c, _f); \
\
_f = (_fx + (1.0 - _fy)) / 2.0; \
- _oc = _vd[(_y * _st) + _x + 1]; \
- _c3 = (_oc & 0xff) + ((_c & 0xff) * _f); \
- _c3 = MIN(_c3, 255); \
- _c2 = ((_oc & 0xff00) >> 8) + (((_c & 0xff00) >> 8) * _f); \
- _c2 = MIN(_c2, 255); \
- _c1 = ((_oc & 0xff0000) >> 16) + (((_c & 0xff0000) >> 16) * _f); \
- _c1 = MIN(_c1, 255); \
- _nc = 0x00 | (_c1 << 16) | (_c2 << 8) | _c3; \
- _vd[(_y * _st) + _x + 1] = _nc; \
+ draw_dot_aa (_vd, (_x + 1), _y, _st, _c, _f); \
\
_f = ((1.0 - _fx) + _fy) / 2.0; \
- _oc = _vd[((_y + 1) * _st) + _x]; \
- _c3 = (_oc & 0xff) + ((_c & 0xff) * _f); \
- _c3 = MIN(_c3, 255); \
- _c2 = ((_oc & 0xff00) >> 8) + (((_c & 0xff00) >> 8) * _f); \
- _c2 = MIN(_c2, 255); \
- _c1 = ((_oc & 0xff0000) >> 16) + (((_c & 0xff0000) >> 16) * _f); \
- _c1 = MIN(_c1, 255); \
- _nc = 0x00 | (_c1 << 16) | (_c2 << 8) | _c3; \
- _vd[((_y + 1) * _st) + _x] = _nc; \
+ draw_dot_aa (_vd, _x, (_y + 1), _st, _c, _f); \
\
_f = (_fx + _fy) / 2.0; \
- _oc = _vd[((_y + 1) * _st) + _x + 1]; \
- _c3 = (_oc & 0xff) + ((_c & 0xff) * _f); \
- _c3 = MIN(_c3, 255); \
- _c2 = ((_oc & 0xff00) >> 8) + (((_c & 0xff00) >> 8) * _f); \
- _c2 = MIN(_c2, 255); \
- _c1 = ((_oc & 0xff0000) >> 16) + (((_c & 0xff0000) >> 16) * _f); \
- _c1 = MIN(_c1, 255); \
- _nc = 0x00 | (_c1 << 16) | (_c2 << 8) | _c3; \
- _vd[((_y + 1) * _st) + _x + 1] = _nc; \
+ draw_dot_aa (_vd, (_x + 1), (_y + 1), _st, _c, _f); \
} \
} G_STMT_END
diff --git a/gst/audiovisualizers/gstspacescope.c b/gst/audiovisualizers/gstspacescope.c
index 973dd74be..b9efaf08a 100644
--- a/gst/audiovisualizers/gstspacescope.c
+++ b/gst/audiovisualizers/gstspacescope.c
@@ -72,6 +72,8 @@ enum
{
STYLE_DOTS = 0,
STYLE_LINES,
+ STYLE_COLOR_DOTS,
+ STYLE_COLOR_LINES,
NUM_STYLES
};
@@ -85,6 +87,8 @@ gst_space_scope_style_get_type (void)
static const GEnumValue values[] = {
{STYLE_DOTS, "draw dots (default)", "dots"},
{STYLE_LINES, "draw lines", "lines"},
+ {STYLE_COLOR_DOTS, "draw color dots", "color-dots"},
+ {STYLE_COLOR_LINES, "draw color lines", "color-lines"},
{0, NULL, NULL}
};
@@ -98,9 +102,13 @@ static void gst_space_scope_set_property (GObject * object, guint prop_id,
static void gst_space_scope_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static void render_dots (GstBaseAudioVisualizer * scope, guint32 * vdata,
+static void render_dots (GstBaseAudioVisualizer * base, guint32 * vdata,
gint16 * adata, guint num_samples);
-static void render_lines (GstBaseAudioVisualizer * scope, guint32 * vdata,
+static void render_lines (GstBaseAudioVisualizer * base, guint32 * vdata,
+ gint16 * adata, guint num_samples);
+static void render_color_dots (GstBaseAudioVisualizer * base, guint32 * vdata,
+ gint16 * adata, guint num_samples);
+static void render_color_lines (GstBaseAudioVisualizer * base, guint32 * vdata,
gint16 * adata, guint num_samples);
static gboolean gst_space_scope_render (GstBaseAudioVisualizer * scope,
@@ -160,6 +168,12 @@ gst_space_scope_set_property (GObject * object, guint prop_id,
case STYLE_LINES:
scope->process = render_lines;
break;
+ case STYLE_COLOR_DOTS:
+ scope->process = render_color_dots;
+ break;
+ case STYLE_COLOR_LINES:
+ scope->process = render_color_lines;
+ break;
}
break;
default:
@@ -187,18 +201,19 @@ gst_space_scope_get_property (GObject * object, guint prop_id,
#include "gstdrawhelpers.h"
static void
-render_dots (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
+render_dots (GstBaseAudioVisualizer * base, guint32 * vdata, gint16 * adata,
guint num_samples)
{
guint i, s, x, y, ox, oy;
gfloat dx, dy;
- guint w = scope->width;
+ guint w = base->width;
+ guint h = base->height;
/* draw dots 1st channel x, 2nd channel y */
- dx = scope->width / 65536.0;
- ox = scope->width / 2;
- dy = scope->height / 65536.0;
- oy = scope->height / 2;
+ dx = w / 65536.0;
+ ox = w / 2;
+ dy = h / 65536.0;
+ oy = h / 2;
s = 0;
for (i = 0; i < num_samples; i++) {
x = (guint) (ox + (gfloat) adata[s++] * dx);
@@ -208,13 +223,13 @@ render_dots (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
}
static void
-render_lines (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
+render_lines (GstBaseAudioVisualizer * base, guint32 * vdata, gint16 * adata,
guint num_samples)
{
guint i, s, x, y, ox, oy;
gfloat dx, dy;
- guint w = scope->width;
- guint h = scope->height;
+ guint w = base->width;
+ guint h = base->height;
gint x2, y2;
/* draw lines 1st channel x, 2nd channel y */
@@ -234,6 +249,179 @@ render_lines (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
}
}
+#define CUTOFF_1 0.15
+#define CUTOFF_2 0.45
+#define RESONANCE (1.0/0.5)
+
+#define filter(il, ir) G_STMT_START { \
+ f1l_h = il - (f1l_m * RESONANCE) - f1l_l; \
+ f1l_m += (f1l_h * CUTOFF_1); \
+ f1l_l += (f1l_m * CUTOFF_1); \
+ \
+ f2l_h = (f1l_m + f1l_h) - (f2l_m * RESONANCE) - f2l_l; \
+ f2l_m += (f2l_h * CUTOFF_2); \
+ f2l_l += (f2l_m * CUTOFF_2); \
+ \
+ f1r_h = ir - (f1r_m * RESONANCE) - f1r_l; \
+ f1r_m += (f1r_h * CUTOFF_1); \
+ f1r_l += (f1r_m * CUTOFF_1); \
+ \
+ f2r_h = (f1r_m + f1r_h) - (f2r_m * RESONANCE) - f2r_l; \
+ f2r_m += (f2r_h * CUTOFF_2); \
+ f2r_l += (f2r_m * CUTOFF_2); \
+} G_STMT_END
+
+static void
+render_color_dots (GstBaseAudioVisualizer * base, guint32 * vdata,
+ gint16 * adata, guint num_samples)
+{
+ GstSpaceScope *scope = (GstSpaceScope *) base;
+ guint i, s;
+ gint x, y, ox, oy;
+ gfloat dx, dy;
+ gint w = base->width, w1 = w - 2;
+ gint h = base->height, h1 = h - 2;
+ gdouble il, ir;
+ gdouble f1l_l = scope->f1l_l, f1l_m = scope->f1l_m, f1l_h = scope->f1l_h;
+ gdouble f1r_l = scope->f1r_l, f1r_m = scope->f1r_m, f1r_h = scope->f1r_h;
+ gdouble f2l_l = scope->f2l_l, f2l_m = scope->f2l_m, f2l_h = scope->f2l_h;
+ gdouble f2r_l = scope->f2r_l, f2r_m = scope->f2r_m, f2r_h = scope->f2r_h;
+
+ /* draw dots 1st channel x, 2nd channel y */
+ ox = w / 2;
+ oy = h / 2;
+ dx = w / 65536.0;
+ dy = h / 65536.0;
+ s = 0;
+ for (i = 0; i < num_samples; i++) {
+ il = (gdouble) adata[s++];
+ ir = (gdouble) adata[s++];
+
+ filter (il, ir);
+
+ x = (gint) (ox + f1l_l * dx);
+ y = (gint) (oy + f1r_l * dy);
+ x = CLAMP (x, 0, w1);
+ y = CLAMP (y, 0, h1);
+ draw_dot_c (vdata, x, y, w, 0x00FF0000);
+
+ x = (gint) (ox + f2l_l * dx);
+ y = (gint) (oy + f2r_l * dy);
+ x = CLAMP (x, 0, w1);
+ y = CLAMP (y, 0, h1);
+ draw_dot_c (vdata, x, y, w, 0x0000FF00);
+
+ x = (gint) (ox + (f2l_m + f2l_h) * dx);
+ y = (gint) (oy + (f2r_m + f2r_h) * dy);
+ x = CLAMP (x, 0, w1);
+ y = CLAMP (y, 0, h1);
+ draw_dot_c (vdata, x, y, w, 0x000000FF);
+ }
+
+ scope->f1l_l = f1l_l;
+ scope->f1l_m = f1l_m;
+ scope->f1l_h = f1l_h;
+ scope->f1r_l = f1r_l;
+ scope->f1r_m = f1r_m;
+ scope->f1r_h = f1r_h;
+ scope->f2l_l = f2l_l;
+ scope->f2l_m = f2l_m;
+ scope->f2l_h = f2l_h;
+ scope->f2r_l = f2r_l;
+ scope->f2r_m = f2r_m;
+ scope->f2r_h = f2r_h;
+}
+
+static void
+render_color_lines (GstBaseAudioVisualizer * base, guint32 * vdata,
+ gint16 * adata, guint num_samples)
+{
+ GstSpaceScope *scope = (GstSpaceScope *) base;
+ guint i, s;
+ gint x, y, ox, oy;
+ gfloat dx, dy;
+ gint w = base->width, w1 = w - 2;
+ gint h = base->height, h1 = h - 2;
+ gdouble il, ir;
+ gdouble f1l_l = scope->f1l_l, f1l_m = scope->f1l_m, f1l_h = scope->f1l_h;
+ gdouble f1r_l = scope->f1r_l, f1r_m = scope->f1r_m, f1r_h = scope->f1r_h;
+ gdouble f2l_l = scope->f2l_l, f2l_m = scope->f2l_m, f2l_h = scope->f2l_h;
+ gdouble f2r_l = scope->f2r_l, f2r_m = scope->f2r_m, f2r_h = scope->f2r_h;
+ gint x2, y2, x3, y3, x4, y4;
+
+ /* draw lines 1st channel x, 2nd channel y */
+ ox = w / 2;
+ oy = h / 2;
+ dx = w / 65536.0;
+ dy = h / 65536.0;
+ s = 0;
+
+ /* do first pixels */
+ il = (gdouble) adata[s++];
+ ir = (gdouble) adata[s++];
+
+ filter (il, ir);
+
+ x = (gint) (ox + f1l_l * dx);
+ y = (gint) (oy + f1r_l * dy);
+ x2 = CLAMP (x, 0, w1);
+ y2 = CLAMP (y, 0, h1);
+
+ x = (gint) (ox + f2l_l * dx);
+ y = (gint) (oy + f2r_l * dy);
+ x3 = CLAMP (x, 0, w1);
+ y3 = CLAMP (y, 0, h1);
+
+ x = (gint) (ox + (f2l_m + f2l_h) * dx);
+ y = (gint) (oy + (f2r_m + f2r_h) * dy);
+ x4 = CLAMP (x, 0, w1);
+ y4 = CLAMP (y, 0, h1);
+
+ for (i = 1; i < num_samples; i++) {
+ il = (gdouble) adata[s++];
+ ir = (gdouble) adata[s++];
+
+ filter (il, ir);
+
+ x = (gint) (ox + f1l_l * dx);
+ y = (gint) (oy + f1r_l * dy);
+ x = CLAMP (x, 0, w1);
+ y = CLAMP (y, 0, h1);
+ draw_line_aa (vdata, x2, x, y2, y, w, 0x00FF0000);
+ x2 = x;
+ y2 = y;
+
+ x = (gint) (ox + f2l_l * dx);
+ y = (gint) (oy + f2r_l * dy);
+ x = CLAMP (x, 0, w1);
+ y = CLAMP (y, 0, h1);
+ draw_line_aa (vdata, x3, x, y3, y, w, 0x0000FF00);
+ x3 = x;
+ y3 = y;
+
+ x = (gint) (ox + (f2l_m + f2l_h) * dx);
+ y = (gint) (oy + (f2r_m + f2r_h) * dy);
+ x = CLAMP (x, 0, w1);
+ y = CLAMP (y, 0, h1);
+ draw_line_aa (vdata, x4, x, y4, y, w, 0x000000FF);
+ x4 = x;
+ y4 = y;
+ }
+
+ scope->f1l_l = f1l_l;
+ scope->f1l_m = f1l_m;
+ scope->f1l_h = f1l_h;
+ scope->f1r_l = f1r_l;
+ scope->f1r_m = f1r_m;
+ scope->f1r_h = f1r_h;
+ scope->f2l_l = f2l_l;
+ scope->f2l_m = f2l_m;
+ scope->f2l_h = f2l_h;
+ scope->f2r_l = f2r_l;
+ scope->f2r_m = f2r_m;
+ scope->f2r_h = f2r_h;
+}
+
static gboolean
gst_space_scope_render (GstBaseAudioVisualizer * base, GstBuffer * audio,
GstBuffer * video)
diff --git a/gst/audiovisualizers/gstspacescope.h b/gst/audiovisualizers/gstspacescope.h
index 62600f4b4..a2b06f777 100644
--- a/gst/audiovisualizers/gstspacescope.h
+++ b/gst/audiovisualizers/gstspacescope.h
@@ -42,6 +42,12 @@ struct _GstSpaceScope
/* < private > */
GstSpaceScopeProcessFunc process;
gint style;
+
+ /* filter specific data */
+ gdouble f1l_l, f1l_m, f1l_h;
+ gdouble f1r_l, f1r_m, f1r_h;
+ gdouble f2l_l, f2l_m, f2l_h;
+ gdouble f2r_l, f2r_m, f2r_h;
};
struct _GstSpaceScopeClass
diff --git a/gst/audiovisualizers/gstwavescope.c b/gst/audiovisualizers/gstwavescope.c
index 354072399..cd93a2683 100644
--- a/gst/audiovisualizers/gstwavescope.c
+++ b/gst/audiovisualizers/gstwavescope.c
@@ -72,6 +72,8 @@ enum
{
STYLE_DOTS = 0,
STYLE_LINES,
+ STYLE_COLOR_DOTS,
+ STYLE_COLOR_LINES,
NUM_STYLES
};
@@ -85,6 +87,8 @@ gst_wave_scope_style_get_type (void)
static const GEnumValue values[] = {
{STYLE_DOTS, "draw dots (default)", "dots"},
{STYLE_LINES, "draw lines", "lines"},
+ {STYLE_COLOR_DOTS, "draw color dots", "color-dots"},
+ {STYLE_COLOR_LINES, "draw color lines", "color-lines"},
{0, NULL, NULL}
};
@@ -97,15 +101,22 @@ static void gst_wave_scope_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_wave_scope_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
+static void gst_wave_scope_finalize (GObject * object);
static void render_dots (GstBaseAudioVisualizer * scope, guint32 * vdata,
gint16 * adata, guint num_samples);
static void render_lines (GstBaseAudioVisualizer * scope, guint32 * vdata,
gint16 * adata, guint num_samples);
+static void render_color_dots (GstBaseAudioVisualizer * base, guint32 * vdata,
+ gint16 * adata, guint num_samples);
+static void render_color_lines (GstBaseAudioVisualizer * base, guint32 * vdata,
+ gint16 * adata, guint num_samples);
+static gboolean gst_wave_scope_setup (GstBaseAudioVisualizer * scope);
static gboolean gst_wave_scope_render (GstBaseAudioVisualizer * base,
GstBuffer * audio, GstBuffer * video);
+#define gst_wave_scope_parent_class parent_class
G_DEFINE_TYPE (GstWaveScope, gst_wave_scope, GST_TYPE_BASE_AUDIO_VISUALIZER);
static void
@@ -118,6 +129,10 @@ gst_wave_scope_class_init (GstWaveScopeClass * g_class)
gobject_class->set_property = gst_wave_scope_set_property;
gobject_class->get_property = gst_wave_scope_get_property;
+ gobject_class->finalize = gst_wave_scope_finalize;
+
+ scope_class->setup = GST_DEBUG_FUNCPTR (gst_wave_scope_setup);
+ scope_class->render = GST_DEBUG_FUNCPTR (gst_wave_scope_render);
g_object_class_install_property (gobject_class, PROP_STYLE,
g_param_spec_enum ("style", "drawing style",
@@ -144,6 +159,32 @@ gst_wave_scope_init (GstWaveScope * scope)
}
static void
+gst_wave_scope_finalize (GObject * object)
+{
+ GstWaveScope *scope = GST_WAVE_SCOPE (object);
+
+ if (scope->flt) {
+ g_free (scope->flt);
+ scope->flt = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_wave_scope_setup (GstBaseAudioVisualizer * bscope)
+{
+ GstWaveScope *scope = GST_WAVE_SCOPE (bscope);
+
+ if (scope->flt)
+ g_free (scope->flt);
+
+ scope->flt = g_new0 (gdouble, 6 * bscope->channels);
+
+ return TRUE;
+}
+
+static void
gst_wave_scope_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
@@ -159,6 +200,12 @@ gst_wave_scope_set_property (GObject * object, guint prop_id,
case STYLE_LINES:
scope->process = render_lines;
break;
+ case STYLE_COLOR_DOTS:
+ scope->process = render_color_dots;
+ break;
+ case STYLE_COLOR_LINES:
+ scope->process = render_color_lines;
+ break;
}
break;
default:
@@ -186,18 +233,19 @@ gst_wave_scope_get_property (GObject * object, guint prop_id,
#include "gstdrawhelpers.h"
static void
-render_dots (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
+render_dots (GstBaseAudioVisualizer * base, guint32 * vdata, gint16 * adata,
guint num_samples)
{
- gint channels = scope->channels;
+ gint channels = base->channels;
guint i, c, s, x, y, oy;
gfloat dx, dy;
- guint w = scope->width;
+ guint w = base->width;
+ guint h = base->height;
/* draw dots */
dx = (gfloat) w / (gfloat) num_samples;
- dy = scope->height / 65536.0;
- oy = scope->height / 2;
+ dy = h / 65536.0;
+ oy = h / 2;
for (c = 0; c < channels; c++) {
s = c;
for (i = 0; i < num_samples; i++) {
@@ -210,14 +258,14 @@ render_dots (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
}
static void
-render_lines (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
+render_lines (GstBaseAudioVisualizer * base, guint32 * vdata, gint16 * adata,
guint num_samples)
{
- gint channels = scope->channels;
+ gint channels = base->channels;
guint i, c, s, x, y, oy;
gfloat dx, dy;
- guint w = scope->width;
- guint h = scope->height;
+ guint w = base->width;
+ guint h = base->height;
gint x2, y2;
/* draw lines */
@@ -239,6 +287,119 @@ render_lines (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
}
}
+#define CUTOFF_1 0.15
+#define CUTOFF_2 0.45
+#define RESONANCE (1.0/0.5)
+
+#define filter(in) G_STMT_START { \
+ flt[2] = in - (flt[1] * RESONANCE) - flt[0]; \
+ flt[1] += (flt[2] * CUTOFF_1); \
+ flt[0] += (flt[1] * CUTOFF_1); \
+ \
+ flt[5] = (flt[1] + flt[2]) - (flt[4] * RESONANCE) - flt[3]; \
+ flt[4] += (flt[5] * CUTOFF_2); \
+ flt[3] += (flt[4] * CUTOFF_2); \
+} G_STMT_END
+
+static void
+render_color_dots (GstBaseAudioVisualizer * base, guint32 * vdata,
+ gint16 * adata, guint num_samples)
+{
+ GstWaveScope *scope = (GstWaveScope *) base;
+ gint channels = base->channels;
+ guint i, c, s, x, y, oy;
+ gfloat dx, dy;
+ guint w = base->width;
+ guint h = base->height, h1 = h - 2;
+ gdouble *flt = scope->flt;
+
+ /* draw dots */
+ dx = (gfloat) w / (gfloat) num_samples;
+ dy = h / 65536.0;
+ oy = h / 2;
+ for (c = 0; c < channels; c++) {
+ s = c;
+ for (i = 0; i < num_samples; i++) {
+ x = (guint) ((gfloat) i * dx);
+ filter ((gfloat) adata[s]);
+
+ y = (guint) (oy + flt[0] * dy);
+ y = CLAMP (y, 0, h1);
+ draw_dot_c (vdata, x, y, w, 0x00FF0000);
+
+ y = (guint) (oy + flt[3] * dy);
+ y = CLAMP (y, 0, h1);
+ draw_dot_c (vdata, x, y, w, 0x0000FF00);
+
+ y = (guint) (oy + (flt[4] + flt[5]) * dy);
+ y = CLAMP (y, 0, h1);
+ draw_dot_c (vdata, x, y, w, 0x000000FF);
+
+ s += channels;
+ }
+ flt += 6;
+ }
+}
+
+static void
+render_color_lines (GstBaseAudioVisualizer * base, guint32 * vdata,
+ gint16 * adata, guint num_samples)
+{
+ GstWaveScope *scope = (GstWaveScope *) base;
+ gint channels = base->channels;
+ guint i, c, s, x, y, oy;
+ gfloat dx, dy;
+ guint w = base->width;
+ guint h = base->height, h1 = h - 2;
+ gdouble *flt = scope->flt;
+ gint x2, y2, y3, y4;
+
+ /* draw lines */
+ dx = (gfloat) (w - 1) / (gfloat) num_samples;
+ dy = (h - 1) / 65536.0;
+ oy = (h - 1) / 2;
+ for (c = 0; c < channels; c++) {
+ s = c;
+
+ /* do first pixels */
+ x2 = 0;
+ filter ((gfloat) adata[s]);
+
+ y = (guint) (oy + flt[0] * dy);
+ y2 = CLAMP (y, 0, h1);
+
+ y = (guint) (oy + flt[3] * dy);
+ y3 = CLAMP (y, 0, h1);
+
+ y = (guint) (oy + (flt[4] + flt[5]) * dy);
+ y4 = CLAMP (y, 0, h1);
+
+ for (i = 1; i < num_samples; i++) {
+ x = (guint) ((gfloat) i * dx);
+ filter ((gfloat) adata[s]);
+
+ y = (guint) (oy + flt[0] * dy);
+ y = CLAMP (y, 0, h1);
+ draw_line_aa (vdata, x2, x, y2, y, w, 0x00FF0000);
+ y2 = y;
+
+ y = (guint) (oy + flt[3] * dy);
+ y = CLAMP (y, 0, h1);
+ draw_line_aa (vdata, x2, x, y3, y, w, 0x0000FF00);
+ y3 = y;
+
+ y = (guint) (oy + (flt[4] + flt[5]) * dy);
+ y = CLAMP (y, 0, h1);
+ draw_line_aa (vdata, x2, x, y4, y, w, 0x000000FF);
+ y4 = y;
+
+ x2 = x;
+ s += channels;
+ }
+ flt += 6;
+ }
+}
+
static gboolean
gst_wave_scope_render (GstBaseAudioVisualizer * base, GstBuffer * audio,
GstBuffer * video)
diff --git a/gst/audiovisualizers/gstwavescope.h b/gst/audiovisualizers/gstwavescope.h
index ffe3411f5..42137f7cf 100644
--- a/gst/audiovisualizers/gstwavescope.h
+++ b/gst/audiovisualizers/gstwavescope.h
@@ -42,6 +42,9 @@ struct _GstWaveScope
/* < private > */
GstWaveScopeProcessFunc process;
gint style;
+
+ /* filter specific data */
+ gdouble *flt;
};
struct _GstWaveScopeClass
diff --git a/gst/autoconvert/gstautoconvert.c b/gst/autoconvert/gstautoconvert.c
index df094d85b..ef88c7038 100644
--- a/gst/autoconvert/gstautoconvert.c
+++ b/gst/autoconvert/gstautoconvert.c
@@ -45,8 +45,6 @@
GST_DEBUG_CATEGORY (autoconvert_debug);
#define GST_CAT_DEFAULT (autoconvert_debug)
-#define DEFAULT_INITIAL_IDENTITY FALSE
-
#define GST_AUTOCONVERT_LOCK(ac) GST_OBJECT_LOCK (ac)
#define GST_AUTOCONVERT_UNLOCK(ac) GST_OBJECT_UNLOCK (ac)
@@ -83,8 +81,7 @@ enum
enum
{
PROP_0,
- PROP_FACTORIES,
- PROP_INITIAL_IDENTITY
+ PROP_FACTORIES
};
static void gst_auto_convert_set_property (GObject * object,
@@ -103,6 +100,8 @@ static GstPad *gst_auto_convert_get_internal_sinkpad (GstAutoConvert *
static GstPad *gst_auto_convert_get_internal_srcpad (GstAutoConvert *
autoconvert);
+static GstIterator *gst_auto_convert_iterate_internal_links (GstPad * pad);
+
static gboolean gst_auto_convert_sink_setcaps (GstPad * pad, GstCaps * caps);
static GstCaps *gst_auto_convert_sink_getcaps (GstPad * pad);
static GstFlowReturn gst_auto_convert_sink_chain (GstPad * pad,
@@ -199,14 +198,6 @@ gst_auto_convert_class_init (GstAutoConvertClass * klass)
" elements), can only be set once",
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_INITIAL_IDENTITY,
- g_param_spec_boolean ("initial-identity",
- "Install identity initially",
- "If true, then the identity element will be installed initially "
- "and used for event passing until the first data buffer arrives ",
- DEFAULT_INITIAL_IDENTITY,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_auto_convert_change_state);
}
@@ -233,6 +224,8 @@ gst_auto_convert_init (GstAutoConvert * autoconvert,
GST_DEBUG_FUNCPTR (gst_auto_convert_sink_query_type));
gst_pad_set_bufferalloc_function (autoconvert->sinkpad,
GST_DEBUG_FUNCPTR (gst_auto_convert_sink_buffer_alloc));
+ gst_pad_set_iterate_internal_links_function (autoconvert->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_iterate_internal_links));
gst_pad_set_event_function (autoconvert->srcpad,
GST_DEBUG_FUNCPTR (gst_auto_convert_src_event));
@@ -240,13 +233,13 @@ gst_auto_convert_init (GstAutoConvert * autoconvert,
GST_DEBUG_FUNCPTR (gst_auto_convert_src_query));
gst_pad_set_query_type_function (autoconvert->srcpad,
GST_DEBUG_FUNCPTR (gst_auto_convert_src_query_type));
+ gst_pad_set_iterate_internal_links_function (autoconvert->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_iterate_internal_links));
gst_element_add_pad (GST_ELEMENT (autoconvert), autoconvert->sinkpad);
gst_element_add_pad (GST_ELEMENT (autoconvert), autoconvert->srcpad);
gst_segment_init (&autoconvert->sink_segment, GST_FORMAT_UNDEFINED);
-
- autoconvert->initial_identity = DEFAULT_INITIAL_IDENTITY;
}
static void
@@ -299,11 +292,6 @@ gst_auto_convert_set_property (GObject * object,
" have been set or auto-discovered");
GST_AUTOCONVERT_UNLOCK (autoconvert);
break;
- case PROP_INITIAL_IDENTITY:
- GST_AUTOCONVERT_LOCK (autoconvert);
- autoconvert->initial_identity = g_value_get_boolean (value);
- GST_AUTOCONVERT_UNLOCK (autoconvert);
- break;
}
}
@@ -322,11 +310,6 @@ gst_auto_convert_get_property (GObject * object,
g_value_set_pointer (value, &autoconvert->factories);
GST_AUTOCONVERT_UNLOCK (autoconvert);
break;
- case PROP_INITIAL_IDENTITY:
- GST_AUTOCONVERT_LOCK (autoconvert);
- g_value_set_boolean (value, autoconvert->initial_identity);
- GST_AUTOCONVERT_UNLOCK (autoconvert);
- break;
}
}
@@ -474,37 +457,12 @@ gst_auto_convert_get_subelement (GstAutoConvert * autoconvert,
gboolean query_only)
{
GstElement *element = NULL;
- gboolean initial_identity;
GST_AUTOCONVERT_LOCK (autoconvert);
if (autoconvert->current_subelement)
element = gst_object_ref (autoconvert->current_subelement);
- initial_identity = autoconvert->initial_identity;
GST_AUTOCONVERT_UNLOCK (autoconvert);
- if (G_UNLIKELY (!query_only && element == NULL && initial_identity)) {
- /* No current sub-element - create an identity and install it */
- GstElementFactory *identity_feature;
- GstElement *identity;
-
- GST_INFO_OBJECT (autoconvert,
- "No existing child element - instantiating identity");
- /* if the identity feature doesn't exist - something is very wrong */
- identity_feature =
- GST_ELEMENT_FACTORY_CAST (gst_default_registry_find_feature ("identity",
- GST_TYPE_ELEMENT_FACTORY));
- identity =
- gst_auto_convert_get_or_make_element_from_factory (autoconvert,
- identity_feature);
- if (identity
- && gst_auto_convert_activate_element (autoconvert, identity, NULL)) {
- GST_AUTOCONVERT_LOCK (autoconvert);
- if (autoconvert->current_subelement)
- element = gst_object_ref (autoconvert->current_subelement);
- GST_AUTOCONVERT_UNLOCK (autoconvert);
- }
- }
-
return element;
}
@@ -796,6 +754,32 @@ gst_auto_convert_activate_element (GstAutoConvert * autoconvert,
return TRUE;
}
+static GstIterator *
+gst_auto_convert_iterate_internal_links (GstPad * pad)
+{
+ GstAutoConvert *autoconvert = GST_AUTO_CONVERT (gst_pad_get_parent (pad));
+ GstIterator *it = NULL;
+ GstPad *internal;
+
+ if (!autoconvert)
+ return NULL;
+
+ if (pad == autoconvert->sinkpad)
+ internal = gst_auto_convert_get_internal_srcpad (autoconvert);
+ else
+ internal = gst_auto_convert_get_internal_sinkpad (autoconvert);
+
+ if (internal) {
+ it = gst_iterator_new_single (GST_TYPE_PAD, internal,
+ (GstCopyFunction) gst_object_ref, (GFreeFunc) gst_object_unref);
+ gst_object_unref (internal);
+ }
+
+ gst_object_unref (autoconvert);
+
+ return it;
+}
+
/*
* If there is already an internal element, it will try to call set_caps on it
*
@@ -1094,15 +1078,6 @@ gst_auto_convert_sink_event (GstPad * pad, GstEvent * event)
}
internal_srcpad = gst_auto_convert_get_internal_srcpad (autoconvert);
- if (internal_srcpad == NULL) {
- /* Query the subelement - triggers creation of an identity if necessary */
- GstElement *subelement =
- gst_auto_convert_get_subelement (autoconvert, FALSE);
- if (subelement)
- gst_object_unref (subelement);
- internal_srcpad = gst_auto_convert_get_internal_srcpad (autoconvert);
- }
-
if (internal_srcpad) {
ret = gst_pad_push_event (internal_srcpad, event);
gst_object_unref (internal_srcpad);
diff --git a/gst/autoconvert/gstautoconvert.h b/gst/autoconvert/gstautoconvert.h
index 612bcb18d..b2f3abfba 100644
--- a/gst/autoconvert/gstautoconvert.h
+++ b/gst/autoconvert/gstautoconvert.h
@@ -56,8 +56,6 @@ struct _GstAutoConvert
GList *cached_events;
GstSegment sink_segment;
gboolean drop_newseg;
-
- gboolean initial_identity;
};
struct _GstAutoConvertClass
diff --git a/gst/camerabin2/camerabingeneral.c b/gst/camerabin2/camerabingeneral.c
index ee39b9171..25b057e49 100644
--- a/gst/camerabin2/camerabingeneral.c
+++ b/gst/camerabin2/camerabingeneral.c
@@ -125,7 +125,8 @@ gst_camerabin_try_add_element (GstBin * bin, const gchar * srcpad,
GST_DEBUG_PAD_NAME (bin_pad));
bin_elem = gst_pad_get_parent_element (bin_pad);
gst_object_unref (bin_pad);
- if (!gst_element_link_pads (bin_elem, srcpad, new_elem, dstpad)) {
+ if (!gst_element_link_pads_full (bin_elem, srcpad, new_elem, dstpad,
+ GST_PAD_LINK_CHECK_CAPS)) {
gst_object_ref (new_elem);
gst_bin_remove (bin, new_elem);
ret = FALSE;
@@ -257,29 +258,3 @@ gst_camerabin_remove_elements_from_bin (GstBin * bin)
}
gst_iterator_free (iter);
}
-
-/**
- * gst_camerabin_drop_eos_probe:
- * @pad: pad receiving the event
- * @event: received event
- * @u_data: not used
- *
- * Event probe that drop all eos events.
- *
- * Returns: FALSE to drop the event, TRUE otherwise
- */
-gboolean
-gst_camerabin_drop_eos_probe (GstPad * pad, GstEvent * event, gpointer u_data)
-{
- gboolean ret = TRUE;
-
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_EOS:
- GST_DEBUG ("dropping eos in %s:%s", GST_DEBUG_PAD_NAME (pad));
- ret = FALSE;
- break;
- default:
- break;
- }
- return ret;
-}
diff --git a/gst/camerabin2/camerabingeneral.h b/gst/camerabin2/camerabingeneral.h
index 5c0815411..a7700457b 100644
--- a/gst/camerabin2/camerabingeneral.h
+++ b/gst/camerabin2/camerabingeneral.h
@@ -34,6 +34,4 @@ GstElement * gst_camerabin_setup_default_element (GstBin * bin, GstElement *user
void gst_camerabin_remove_elements_from_bin (GstBin * bin);
-gboolean gst_camerabin_drop_eos_probe (GstPad * pad, GstEvent * event, gpointer u_data);
-
#endif /* #ifndef __CAMERABIN_GENERAL_H_ */
diff --git a/gst/camerabin2/gstcamerabin2.c b/gst/camerabin2/gstcamerabin2.c
index 888ca92d8..12c52b0a2 100644
--- a/gst/camerabin2/gstcamerabin2.c
+++ b/gst/camerabin2/gstcamerabin2.c
@@ -430,6 +430,7 @@ gst_camera_bin_start_capture (GstCameraBin2 * camerabin)
g_signal_emit_by_name (camerabin->src, "start-capture", NULL);
if (camerabin->mode == MODE_VIDEO) {
+ camerabin->audio_send_newseg = TRUE;
if (camerabin->audio_src)
gst_element_set_state (camerabin->audio_src, GST_STATE_PLAYING);
@@ -1043,7 +1044,6 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message)
}
dec_counter = TRUE;
} else if (gst_structure_has_name (structure, "preview-image")) {
- GValue *value;
gchar *location = NULL;
g_mutex_lock (camerabin->preview_list_mutex);
@@ -1063,11 +1063,11 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message)
g_mutex_unlock (camerabin->preview_list_mutex);
if (location) {
- value = g_new0 (GValue, 1);
- g_value_init (value, G_TYPE_STRING);
- g_value_take_string (value, location);
+ GValue value = { 0 };
+ g_value_init (&value, G_TYPE_STRING);
+ g_value_take_string (&value, location);
gst_structure_take_value ((GstStructure *) structure, "location",
- value);
+ &value);
}
GST_LOG_OBJECT (bin, "received preview-image message");
@@ -1089,6 +1089,8 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message)
}
dec_counter = TRUE;
}
+ g_error_free (err);
+ g_free (debug);
}
break;
case GST_MESSAGE_EOS:{
@@ -1440,19 +1442,48 @@ gst_camera_bin_image_sink_event_probe (GstPad * pad, GstPadProbeInfo * info,
}
static GstPadProbeReturn
-gst_camera_bin_audio_src_event_probe (GstPad * pad, GstPadProbeInfo * info,
+gst_camera_bin_audio_src_data_probe (GstPad * pad, GstPadProbeInfo * info,
gpointer data)
{
GstCameraBin2 *camera = data;
gboolean ret = GST_PAD_PROBE_OK;
- GstEvent *event = GST_EVENT (info->data);
- if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
- /* we only let an EOS pass when the user is stopping a capture */
- if (camera->audio_drop_eos) {
+ if (GST_IS_BUFFER (data)) {
+ if (G_UNLIKELY (camera->audio_send_newseg)) {
+ GstBuffer *buf = GST_BUFFER_CAST (data);
+ GstClockTime ts = GST_BUFFER_TIMESTAMP (buf);
+ GstPad *peer;
+ GstSegment segment;
+
+ if (!GST_CLOCK_TIME_IS_VALID (ts)) {
+ ts = 0;
+ }
+
+ peer = gst_pad_get_peer (pad);
+ g_return_val_if_fail (peer != NULL, TRUE);
+
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ segment.start = ts;
+ gst_pad_send_event (peer, gst_event_new_segment (&segment));
+
+ gst_object_unref (peer);
+
+ camera->audio_send_newseg = FALSE;
+ }
+ } else {
+ GstEvent *event = GST_EVENT_CAST (data);
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ /* we only let an EOS pass when the user is stopping a capture */
+ if (camera->audio_drop_eos) {
+ ret = GST_PAD_PROBE_DROP;
+ } else {
+ camera->audio_drop_eos = TRUE;
+ /* should already be false, but reinforce in case no buffers get
+ * pushed */
+ camera->audio_send_newseg = FALSE;
+ }
+ } else if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) {
ret = GST_PAD_PROBE_DROP;
- } else {
- camera->audio_drop_eos = TRUE;
}
}
@@ -1517,12 +1548,12 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
GstEncodingContainerProfile *prof;
GstCaps *caps;
- caps = gst_caps_new_simple ("application/ogg", NULL, NULL);
+ caps = gst_caps_new_empty_simple ("application/ogg");
prof = gst_encoding_container_profile_new ("ogg", "theora+vorbis+ogg",
caps, NULL);
gst_caps_unref (caps);
- caps = gst_caps_new_simple ("video/x-theora", NULL, NULL);
+ caps = gst_caps_new_empty_simple ("video/x-theora");
if (!gst_encoding_container_profile_add_profile (prof,
(GstEncodingProfile *) gst_encoding_video_profile_new (caps,
NULL, NULL, 1))) {
@@ -1530,7 +1561,7 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
}
gst_caps_unref (caps);
- caps = gst_caps_new_simple ("audio/x-vorbis", NULL, NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-vorbis");
if (!gst_encoding_container_profile_add_profile (prof,
(GstEncodingProfile *) gst_encoding_audio_profile_new (caps,
NULL, NULL, 1))) {
@@ -1569,7 +1600,7 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
GstEncodingVideoProfile *vprof;
GstCaps *caps;
- caps = gst_caps_new_simple ("image/jpeg", NULL, NULL);
+ caps = gst_caps_new_empty_simple ("image/jpeg");
vprof = gst_encoding_video_profile_new (caps, NULL, NULL, 1);
gst_encoding_video_profile_set_variableframerate (vprof, TRUE);
@@ -1803,15 +1834,17 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
gst_bin_add (GST_BIN_CAST (camera),
gst_object_ref (camera->audio_capsfilter));
- gst_element_link_many (camera->audio_src, camera->audio_volume,
- camera->audio_capsfilter, NULL);
+ gst_element_link_pads_full (camera->audio_src, "src",
+ camera->audio_volume, "sink", GST_PAD_LINK_CHECK_CAPS);
+ gst_element_link_pads_full (camera->audio_volume, "src",
+ camera->audio_capsfilter, "sink", GST_PAD_LINK_CHECK_CAPS);
srcpad = gst_element_get_static_pad (camera->audio_src, "src");
/* drop EOS for audiosrc elements that push them on state_changes
* (basesrc does this) */
- gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
- gst_camera_bin_audio_src_event_probe, gst_object_ref (camera),
+ gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_DATA_DOWNSTREAM,
+ gst_camera_bin_audio_src_data_probe, gst_object_ref (camera),
gst_object_unref);
gst_object_unref (srcpad);
@@ -1870,6 +1903,7 @@ gst_camera_bin_change_state (GstElement * element, GstStateChange trans)
case GST_STATE_CHANGE_READY_TO_PAUSED:
GST_CAMERA_BIN2_RESET_PROCESSING_COUNTER (camera);
camera->audio_drop_eos = TRUE;
+ camera->audio_send_newseg = FALSE;
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
if (GST_STATE (camera->videosink) >= GST_STATE_PAUSED)
@@ -1939,6 +1973,9 @@ gst_camera_bin_send_event (GstElement * element, GstEvent * event)
GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (element);
gboolean res;
+ /* avoid losing our ref to send_event */
+ gst_event_ref (event);
+
res = GST_ELEMENT_CLASS (parent_class)->send_event (element, event);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
@@ -1964,6 +2001,7 @@ gst_camera_bin_send_event (GstElement * element, GstEvent * event)
break;
}
+ gst_event_unref (event);
return res;
}
diff --git a/gst/camerabin2/gstcamerabin2.h b/gst/camerabin2/gstcamerabin2.h
index 4c509bb90..29e1c9a58 100644
--- a/gst/camerabin2/gstcamerabin2.h
+++ b/gst/camerabin2/gstcamerabin2.h
@@ -126,6 +126,7 @@ struct _GstCameraBin2
gboolean image_profile_switch;
gboolean audio_drop_eos;
+ gboolean audio_send_newseg;
GMutex *video_capture_mutex;
GCond *video_state_cond;
diff --git a/gst/camerabin2/gstviewfinderbin.c b/gst/camerabin2/gstviewfinderbin.c
index 78d5a6e11..90c78d5ab 100644
--- a/gst/camerabin2/gstviewfinderbin.c
+++ b/gst/camerabin2/gstviewfinderbin.c
@@ -185,11 +185,11 @@ gst_viewfinder_bin_create_elements (GstViewfinderBin * vfbin)
/* remove the elements, user doesn't want them */
gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), NULL);
- csp = gst_bin_get_by_name (GST_BIN (vfbin), "vfbin-csp");
- videoscale = gst_bin_get_by_name (GST_BIN (vfbin), "vfbin-videoscale");
+ csp = gst_bin_get_by_name (GST_BIN_CAST (vfbin), "vfbin-csp");
+ videoscale = gst_bin_get_by_name (GST_BIN_CAST (vfbin), "vfbin-videoscale");
- gst_bin_remove (GST_BIN (vfbin), csp);
- gst_bin_remove (GST_BIN (vfbin), videoscale);
+ gst_bin_remove (GST_BIN_CAST (vfbin), csp);
+ gst_bin_remove (GST_BIN_CAST (vfbin), videoscale);
gst_object_unref (csp);
gst_object_unref (videoscale);
@@ -199,21 +199,22 @@ gst_viewfinder_bin_create_elements (GstViewfinderBin * vfbin)
gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), NULL);
/* add the elements, user wants them */
- csp =
- gst_camerabin_create_and_add_element (GST_BIN (vfbin),
- "ffmpegcolorspace", "vfbin-csp");
+ csp = gst_element_factory_make ("ffmpegcolorspace", "vfbin-csp");
if (!csp) {
missing_element_name = "ffmpegcolorspace";
goto missing_element;
}
+ gst_bin_add (GST_BIN_CAST (vfbin), csp);
- videoscale =
- gst_camerabin_create_and_add_element (GST_BIN (vfbin), "videoscale",
- "vfbin-videoscale");
+ videoscale = gst_element_factory_make ("videoscale", "vfbin->videoscale");
if (!videoscale) {
missing_element_name = "videoscale";
goto missing_element;
}
+ gst_bin_add (GST_BIN_CAST (vfbin), videoscale);
+
+ gst_element_link_pads_full (csp, "src", videoscale, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
vfbin->elements_created = TRUE;
GST_DEBUG_OBJECT (vfbin, "Elements succesfully created and linked");
@@ -231,7 +232,8 @@ gst_viewfinder_bin_create_elements (GstViewfinderBin * vfbin)
unref = TRUE;
}
- if (!gst_element_link_pads (videoscale, "src", vfbin->video_sink, "sink")) {
+ if (!gst_element_link_pads_full (videoscale, "src", vfbin->video_sink,
+ "sink", GST_PAD_LINK_CHECK_CAPS)) {
GST_ELEMENT_ERROR (vfbin, CORE, NEGOTIATION, (NULL),
("linking videoscale and viewfindersink failed"));
}
diff --git a/gst/camerabin2/gstwrappercamerabinsrc.c b/gst/camerabin2/gstwrappercamerabinsrc.c
index 4ad5d2fad..e0f52df40 100644
--- a/gst/camerabin2/gstwrappercamerabinsrc.c
+++ b/gst/camerabin2/gstwrappercamerabinsrc.c
@@ -31,6 +31,8 @@
# include <config.h>
#endif
+#include <gst/interfaces/photography.h>
+
#include "gstwrappercamerabinsrc.h"
#include "camerabingeneral.h"
@@ -257,9 +259,19 @@ gst_wrapper_camera_bin_src_vidsrc_probe (GstPad * pad, GstPadProbeInfo * info,
if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_DONE) {
/* NOP */
} else if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_STARTING) {
+ GstClockTime ts;
+ GstSegment segment;
+
GST_DEBUG_OBJECT (self, "Starting video recording");
self->video_rec_status = GST_VIDEO_RECORDING_STATUS_RUNNING;
+ ts = GST_BUFFER_TIMESTAMP (buffer);
+ if (!GST_CLOCK_TIME_IS_VALID (ts))
+ ts = 0;
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ segment.start = ts;
+ gst_pad_push_event (self->vidsrc, gst_event_new_segment (&segment));
+
/* post preview */
GST_DEBUG_OBJECT (self, "Posting preview for video");
gst_base_camera_src_post_preview (camerasrc, buffer);
@@ -706,7 +718,9 @@ static gboolean
start_image_capture (GstWrapperCameraBinSrc * self)
{
GstBaseCameraSrc *bcamsrc = GST_BASE_CAMERA_SRC (self);
- GstPhotography *photography = gst_base_camera_src_get_photography (bcamsrc);
+ GstPhotography *photography =
+ (GstPhotography *) gst_bin_get_by_interface (GST_BIN_CAST (bcamsrc),
+ GST_TYPE_PHOTOGRAPHY);
gboolean ret = FALSE;
GstCaps *caps;
@@ -747,7 +761,9 @@ static gboolean
gst_wrapper_camera_bin_src_set_mode (GstBaseCameraSrc * bcamsrc,
GstCameraBinMode mode)
{
- GstPhotography *photography = gst_base_camera_src_get_photography (bcamsrc);
+ GstPhotography *photography =
+ (GstPhotography *) gst_bin_get_by_interface (GST_BIN_CAST (bcamsrc),
+ GST_TYPE_PHOTOGRAPHY);
GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (bcamsrc);
if (self->output_selector) {
diff --git a/gst/debugutils/gstcompare.c b/gst/debugutils/gstcompare.c
index 29feaaa8e..7e4a5c11a 100644
--- a/gst/debugutils/gstcompare.c
+++ b/gst/debugutils/gstcompare.c
@@ -26,7 +26,7 @@
#include <string.h>
#include <gst/gst.h>
-#include <gst/base/gstcollectpads.h>
+#include <gst/base/gstcollectpads2.h>
#include <gst/video/video.h>
#include "gstcompare.h"
@@ -109,7 +109,7 @@ static void gst_compare_get_property (GObject * object,
static void gst_compare_reset (GstCompare * overlay);
static GstCaps *gst_compare_getcaps (GstPad * pad);
-static GstFlowReturn gst_compare_collect_pads (GstCollectPads * cpads,
+static GstFlowReturn gst_compare_collect_pads (GstCollectPads2 * cpads,
GstCompare * comp);
static GstStateChangeReturn gst_compare_change_state (GstElement * element,
@@ -189,9 +189,9 @@ gst_compare_class_init (GstCompareClass * klass)
static void
gst_compare_init (GstCompare * comp, GstCompareClass * klass)
{
- comp->cpads = gst_collect_pads_new ();
- gst_collect_pads_set_function (comp->cpads,
- (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_compare_collect_pads),
+ comp->cpads = gst_collect_pads2_new ();
+ gst_collect_pads2_set_function (comp->cpads,
+ (GstCollectPads2Function) GST_DEBUG_FUNCPTR (gst_compare_collect_pads),
comp);
comp->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
@@ -203,10 +203,10 @@ gst_compare_init (GstCompare * comp, GstCompareClass * klass)
gst_pad_set_getcaps_function (comp->checkpad, gst_compare_getcaps);
gst_element_add_pad (GST_ELEMENT (comp), comp->checkpad);
- gst_collect_pads_add_pad (comp->cpads, comp->sinkpad,
- sizeof (GstCollectData), NULL);
- gst_collect_pads_add_pad (comp->cpads, comp->checkpad,
- sizeof (GstCollectData), NULL);
+ gst_collect_pads2_add_pad_full (comp->cpads, comp->sinkpad,
+ sizeof (GstCollectData2), NULL, TRUE);
+ gst_collect_pads2_add_pad_full (comp->cpads, comp->checkpad,
+ sizeof (GstCollectData2), NULL, TRUE);
comp->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
gst_pad_set_getcaps_function (comp->srcpad, gst_compare_getcaps);
@@ -539,14 +539,14 @@ gst_compare_buffers (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2)
}
static GstFlowReturn
-gst_compare_collect_pads (GstCollectPads * cpads, GstCompare * comp)
+gst_compare_collect_pads (GstCollectPads2 * cpads, GstCompare * comp)
{
GstBuffer *buf1, *buf2;
- buf1 = gst_collect_pads_pop (comp->cpads,
+ buf1 = gst_collect_pads2_pop (comp->cpads,
gst_pad_get_element_private (comp->sinkpad));
- buf2 = gst_collect_pads_pop (comp->cpads,
+ buf2 = gst_collect_pads2_pop (comp->cpads,
gst_pad_get_element_private (comp->checkpad));
if (!buf1 && !buf2) {
@@ -638,10 +638,10 @@ gst_compare_change_state (GstElement * element, GstStateChange transition)
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
case GST_STATE_CHANGE_READY_TO_PAUSED:
- gst_collect_pads_start (comp->cpads);
+ gst_collect_pads2_start (comp->cpads);
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_collect_pads_stop (comp->cpads);
+ gst_collect_pads2_stop (comp->cpads);
break;
default:
break;
diff --git a/gst/debugutils/gstcompare.h b/gst/debugutils/gstcompare.h
index bee5108e3..65a9658a1 100644
--- a/gst/debugutils/gstcompare.h
+++ b/gst/debugutils/gstcompare.h
@@ -52,7 +52,7 @@ struct _GstCompare {
GstPad *sinkpad;
GstPad *checkpad;
- GstCollectPads *cpads;
+ GstCollectPads2 *cpads;
gint count;
diff --git a/gst/frei0r/gstfrei0rmixer.c b/gst/frei0r/gstfrei0rmixer.c
index c337e07aa..fd8ff7e60 100644
--- a/gst/frei0r/gstfrei0rmixer.c
+++ b/gst/frei0r/gstfrei0rmixer.c
@@ -120,7 +120,7 @@ gst_frei0r_mixer_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
- gst_collect_pads_start (self->collect);
+ gst_collect_pads2_start (self->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
@@ -129,11 +129,11 @@ gst_frei0r_mixer_change_state (GstElement * element, GstStateChange transition)
}
/* Stop before calling the parent's state change function as
- * GstCollectPads might take locks and we would deadlock in that
+ * GstCollectPads2 might take locks and we would deadlock in that
* case
*/
if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
- gst_collect_pads_stop (self->collect);
+ gst_collect_pads2_stop (self->collect);
ret =
GST_ELEMENT_CLASS (g_type_class_peek_parent (klass))->change_state
@@ -488,7 +488,7 @@ gst_frei0r_mixer_src_event (GstPad * pad, GstEvent * event)
/* check if we are flushing */
if (flags & GST_SEEK_FLAG_FLUSH) {
/* make sure we accept nothing anymore and return WRONG_STATE */
- gst_collect_pads_set_flushing (self->collect, TRUE);
+ gst_collect_pads2_set_flushing (self->collect, TRUE);
/* flushing seek, start flush downstream, the flush will be done
* when all pads received a FLUSH_STOP. */
@@ -532,7 +532,7 @@ gst_frei0r_mixer_sink0_event (GstPad * pad, GstEvent * event)
break;
}
- /* now GstCollectPads can take care of the rest, e.g. EOS */
+ /* now GstCollectPads2 can take care of the rest, e.g. EOS */
ret = self->collect_event (pad, event);
gst_object_unref (self);
@@ -541,7 +541,7 @@ gst_frei0r_mixer_sink0_event (GstPad * pad, GstEvent * event)
}
static GstFlowReturn
-gst_frei0r_mixer_collected (GstCollectPads * pads, GstFrei0rMixer * self)
+gst_frei0r_mixer_collected (GstCollectPads2 * pads, GstFrei0rMixer * self)
{
GstBuffer *inbuf0 = NULL, *inbuf1 = NULL, *inbuf2 = NULL;
GstBuffer *outbuf = NULL;
@@ -575,15 +575,15 @@ gst_frei0r_mixer_collected (GstCollectPads * pads, GstFrei0rMixer * self)
return ret;
for (l = pads->data; l; l = l->next) {
- GstCollectData *cdata = l->data;
+ GstCollectData2 *cdata = l->data;
if (cdata->pad == self->sink0) {
- inbuf0 = gst_collect_pads_pop (pads, cdata);
+ inbuf0 = gst_collect_pads2_pop (pads, cdata);
segment = &cdata->segment;
} else if (cdata->pad == self->sink1) {
- inbuf1 = gst_collect_pads_pop (pads, cdata);
+ inbuf1 = gst_collect_pads2_pop (pads, cdata);
} else if (cdata->pad == self->sink2) {
- inbuf2 = gst_collect_pads_pop (pads, cdata);
+ inbuf2 = gst_collect_pads2_pop (pads, cdata);
}
}
@@ -675,22 +675,26 @@ gst_frei0r_mixer_class_init (GstFrei0rMixerClass * klass,
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (gstelement_class, templ);
+ gst_object_unref (templ);
templ =
gst_pad_template_new ("sink_0", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (gstelement_class, templ);
+ gst_object_unref (templ);
templ =
gst_pad_template_new ("sink_1", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (gstelement_class, templ);
+ gst_object_unref (templ);
if (klass->info->plugin_type == F0R_PLUGIN_TYPE_MIXER3) {
templ =
gst_pad_template_new ("sink_2", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (gstelement_class, templ);
+ gst_object_unref (templ);
}
gst_caps_unref (caps);
@@ -704,9 +708,9 @@ gst_frei0r_mixer_init (GstFrei0rMixer * self, GstFrei0rMixerClass * klass)
self->property_cache =
gst_frei0r_property_cache_init (klass->properties, klass->n_properties);
- self->collect = gst_collect_pads_new ();
- gst_collect_pads_set_function (self->collect,
- (GstCollectPadsFunction) gst_frei0r_mixer_collected, self);
+ self->collect = gst_collect_pads2_new ();
+ gst_collect_pads2_set_function (self->collect,
+ (GstCollectPads2Function) gst_frei0r_mixer_collected, self);
self->src =
gst_pad_new_from_template (gst_element_class_get_pad_template
@@ -730,8 +734,8 @@ gst_frei0r_mixer_init (GstFrei0rMixer * self, GstFrei0rMixerClass * klass)
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_set_caps));
gst_pad_set_query_function (self->sink0,
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_sink_query));
- gst_collect_pads_add_pad (self->collect, self->sink0,
- sizeof (GstCollectData), NULL);
+ gst_collect_pads2_add_pad (self->collect, self->sink0,
+ sizeof (GstCollectData2));
self->collect_event = (GstPadEventFunction) GST_PAD_EVENTFUNC (self->sink0);
gst_pad_set_event_function (self->sink0,
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_sink0_event));
@@ -746,8 +750,8 @@ gst_frei0r_mixer_init (GstFrei0rMixer * self, GstFrei0rMixerClass * klass)
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_set_caps));
gst_pad_set_query_function (self->sink0,
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_sink_query));
- gst_collect_pads_add_pad (self->collect, self->sink1,
- sizeof (GstCollectData), NULL);
+ gst_collect_pads2_add_pad (self->collect, self->sink1,
+ sizeof (GstCollectData2));
gst_element_add_pad (GST_ELEMENT_CAST (self), self->sink1);
if (klass->info->plugin_type == F0R_PLUGIN_TYPE_MIXER3) {
@@ -760,8 +764,8 @@ gst_frei0r_mixer_init (GstFrei0rMixer * self, GstFrei0rMixerClass * klass)
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_set_caps));
gst_pad_set_query_function (self->sink0,
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_sink_query));
- gst_collect_pads_add_pad (self->collect, self->sink2,
- sizeof (GstCollectData), NULL);
+ gst_collect_pads2_add_pad (self->collect, self->sink2,
+ sizeof (GstCollectData2));
gst_element_add_pad (GST_ELEMENT_CAST (self), self->sink2);
}
diff --git a/gst/frei0r/gstfrei0rmixer.h b/gst/frei0r/gstfrei0rmixer.h
index 7cb66005e..19bb2d987 100644
--- a/gst/frei0r/gstfrei0rmixer.h
+++ b/gst/frei0r/gstfrei0rmixer.h
@@ -22,7 +22,7 @@
#include <gst/gst.h>
#include <gst/video/video.h>
-#include <gst/base/gstcollectpads.h>
+#include <gst/base/gstcollectpads2.h>
#include "frei0r.h"
#include "gstfrei0r.h"
@@ -42,7 +42,7 @@ typedef struct _GstFrei0rMixerClass GstFrei0rMixerClass;
struct _GstFrei0rMixer {
GstElement parent;
- GstCollectPads *collect;
+ GstCollectPads2 *collect;
GstPad *src;
GstPad *sink0, *sink1, *sink2;
diff --git a/gst/liveadder/liveadder.c b/gst/liveadder/liveadder.c
index c56320e96..ec232a709 100644
--- a/gst/liveadder/liveadder.c
+++ b/gst/liveadder/liveadder.c
@@ -1091,10 +1091,11 @@ gst_live_live_adder_chain (GstPad * pad, GstBuffer * buffer)
if (skip) {
GstClockTime subbuffer_duration = GST_BUFFER_DURATION (buffer) - skip;
GstClockTime subbuffer_ts = GST_BUFFER_TIMESTAMP (buffer) + skip;
-
- buffer = gst_buffer_create_sub (buffer,
+ GstBuffer *new_buffer = gst_buffer_create_sub (buffer,
gst_live_adder_length_from_duration (adder, skip),
gst_live_adder_length_from_duration (adder, subbuffer_duration));
+ gst_buffer_unref (buffer);
+ buffer = new_buffer;
GST_BUFFER_TIMESTAMP (buffer) = subbuffer_ts;
GST_BUFFER_DURATION (buffer) = subbuffer_duration;
}
diff --git a/gst/mpeg4videoparse/Makefile.am b/gst/mpeg4videoparse/Makefile.am
deleted file mode 100644
index 8259ed54f..000000000
--- a/gst/mpeg4videoparse/Makefile.am
+++ /dev/null
@@ -1,24 +0,0 @@
-
-plugin_LTLIBRARIES = libgstmpeg4videoparse.la
-
-libgstmpeg4videoparse_la_SOURCES = mpeg4videoparse.c mpeg4parse.c
-libgstmpeg4videoparse_la_CFLAGS = $(GST_BASE_CFLAGS) $(GST_CFLAGS)
-libgstmpeg4videoparse_la_LIBADD = $(GST_BASE_LIBS) $(GST_LIBS)
-libgstmpeg4videoparse_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
-libgstmpeg4videoparse_la_LIBTOOLFLAGS = --tag=disable-static
-
-noinst_HEADERS = mpeg4videoparse.h mpeg4parse.h
-
-Android.mk: Makefile.am $(BUILT_SOURCES)
- androgenizer \
- -:PROJECT libgstmpeg4videoparse -:SHARED libgstmpeg4videoparse \
- -:TAGS eng debug \
- -:REL_TOP $(top_srcdir) -:ABS_TOP $(abs_top_srcdir) \
- -:SOURCES $(libgstmpeg4videoparse_la_SOURCES) \
- -:CFLAGS $(DEFS) $(DEFAULT_INCLUDES) $(libgstmpeg4videoparse_la_CFLAGS) \
- -:LDFLAGS $(libgstmpeg4videoparse_la_LDFLAGS) \
- $(libgstmpeg4videoparse_la_LIBADD) \
- -ldl \
- -:PASSTHROUGH LOCAL_ARM_MODE:=arm \
- LOCAL_MODULE_PATH:='$$(TARGET_OUT)/lib/gstreamer-0.10' \
- > $@
diff --git a/gst/mpeg4videoparse/mpeg4parse.c b/gst/mpeg4videoparse/mpeg4parse.c
deleted file mode 100644
index 5c8ce9574..000000000
--- a/gst/mpeg4videoparse/mpeg4parse.c
+++ /dev/null
@@ -1,294 +0,0 @@
-/* GStreamer MPEG4-2 video Parser
- * Copyright (C) <2008> Mindfruit B.V.
- * @author Sjoerd Simons <sjoerd@luon.net>
- * Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifdef HAVE_CONFIG_H
-# include "config.h"
-#endif
-
-#include "mpeg4parse.h"
-
-#include <gst/base/gstbitreader.h>
-
-GST_DEBUG_CATEGORY_EXTERN (mpeg4v_parse_debug);
-#define GST_CAT_DEFAULT mpeg4v_parse_debug
-
-
-#define GET_BITS(b, num, bits) G_STMT_START { \
- if (!gst_bit_reader_get_bits_uint32(b, bits, num)) \
- goto failed; \
- GST_TRACE ("parsed %d bits: %d", num, *(bits)); \
-} G_STMT_END
-
-#define MARKER_BIT(b) G_STMT_START { \
- guint32 i; \
- GET_BITS(b, 1, &i); \
- if (i != 0x1) \
- goto failed; \
-} G_STMT_END
-
-static inline gboolean
-next_start_code (GstBitReader * b)
-{
- guint32 bits = 0;
-
- GET_BITS (b, 1, &bits);
- if (bits != 0)
- goto failed;
-
- while (b->bit != 0) {
- GET_BITS (b, 1, &bits);
- if (bits != 0x1)
- goto failed;
- }
-
- return TRUE;
-
-failed:
- return FALSE;
-}
-
-static inline gboolean
-skip_user_data (GstBitReader * bs, guint32 * bits)
-{
- while (*bits == MPEG4_USER_DATA_STARTCODE_MARKER) {
- guint32 b = 0;
-
- do {
- GET_BITS (bs, 8, &b);
- *bits = (*bits << 8) | b;
- } while ((*bits >> 8) != MPEG4_START_MARKER);
- }
-
- return TRUE;
-
-failed:
- return FALSE;
-}
-
-
-static gint aspect_ratio_table[6][2] = {
- {-1, -1}, {1, 1}, {12, 11}, {10, 11}, {16, 11}, {40, 33}
-};
-
-static gboolean
-gst_mpeg4_params_parse_vo (MPEG4Params * params, GstBitReader * br)
-{
- guint32 bits;
- guint16 time_increment_resolution = 0;
- guint16 fixed_time_increment = 0;
- gint aspect_ratio_width = -1, aspect_ratio_height = -1;
- gint height = -1, width = -1;
-
- /* expecting a video object startcode */
- GET_BITS (br, 32, &bits);
- if (bits > 0x11F)
- goto failed;
-
- /* expecting a video object layer startcode */
- GET_BITS (br, 32, &bits);
- if (bits < 0x120 || bits > 0x12F)
- goto failed;
-
- /* ignore random accessible vol and video object type indication */
- GET_BITS (br, 9, &bits);
-
- GET_BITS (br, 1, &bits);
- if (bits) {
- /* skip video object layer verid and priority */
- GET_BITS (br, 7, &bits);
- }
-
- /* aspect ratio info */
- GET_BITS (br, 4, &bits);
- if (bits == 0)
- goto failed;
-
- /* check if aspect ratio info is extended par */
- if (bits == 0xf) {
- GET_BITS (br, 8, &bits);
- aspect_ratio_width = bits;
- GET_BITS (br, 8, &bits);
- aspect_ratio_height = bits;
- } else if (bits < 0x6) {
- aspect_ratio_width = aspect_ratio_table[bits][0];
- aspect_ratio_height = aspect_ratio_table[bits][1];
- }
- GST_DEBUG ("aspect ratio %d/%d", aspect_ratio_width, aspect_ratio_height);
-
- GET_BITS (br, 1, &bits);
- if (bits) {
- /* vol control parameters, skip chroma and low delay */
- GET_BITS (br, 3, &bits);
- GET_BITS (br, 1, &bits);
- if (bits) {
- /* skip vbv_parameters */
- if (!gst_bit_reader_skip (br, 79))
- goto failed;
- }
- }
-
- /* layer shape */
- GET_BITS (br, 2, &bits);
- /* only support rectangular */
- if (bits != 0)
- goto failed;
-
- MARKER_BIT (br);
- GET_BITS (br, 16, &bits);
- time_increment_resolution = bits;
- MARKER_BIT (br);
-
- GST_DEBUG ("time increment resolution %d", time_increment_resolution);
-
- GET_BITS (br, 1, &bits);
- if (bits) {
- /* fixed time increment */
- int n;
-
- /* Length of the time increment is the minimal number of bits needed to
- * represent time_increment_resolution-1 */
- for (n = 0; ((time_increment_resolution - 1) >> n) != 0; n++);
- GET_BITS (br, n, &bits);
-
- fixed_time_increment = bits;
- } else {
- /* When fixed_vop_rate is not set we can't guess any framerate */
- fixed_time_increment = 0;
- }
- GST_DEBUG ("fixed time increment %d", fixed_time_increment);
-
- /* assuming rectangular shape */
- MARKER_BIT (br);
- GET_BITS (br, 13, &bits);
- width = bits;
- MARKER_BIT (br);
- GET_BITS (br, 13, &bits);
- height = bits;
- MARKER_BIT (br);
- GST_DEBUG ("width x height: %d x %d", width, height);
-
- /* so we got it all, report back */
- params->width = width;
- params->height = height;
- params->time_increment_resolution = time_increment_resolution;
- params->fixed_time_increment = fixed_time_increment;
- params->aspect_ratio_width = aspect_ratio_width;
- params->aspect_ratio_height = aspect_ratio_height;
-
- return TRUE;
-
- /* ERRORS */
-failed:
- {
- GST_WARNING ("Failed to parse config data");
- return FALSE;
- }
-}
-
-static gboolean
-gst_mpeg4_params_parse_vos (MPEG4Params * params, GstBitReader * br)
-{
- guint32 bits = 0;
-
- GET_BITS (br, 32, &bits);
- if (bits != MPEG4_VOS_STARTCODE_MARKER)
- goto failed;
-
- GET_BITS (br, 8, &bits);
- params->profile = bits;
-
- /* invalid profile, warn but carry on */
- if (params->profile == 0) {
- GST_WARNING ("Invalid profile in VOS");
- }
-
- /* Expect Visual Object startcode */
- GET_BITS (br, 32, &bits);
-
- /* but skip optional user data */
- if (!skip_user_data (br, &bits))
- goto failed;
-
- if (bits != MPEG4_VISUAL_OBJECT_STARTCODE_MARKER)
- goto failed;
-
- GET_BITS (br, 1, &bits);
- if (bits == 0x1) {
- /* Skip visual_object_verid and priority */
- GET_BITS (br, 7, &bits);
- }
-
- GET_BITS (br, 4, &bits);
- /* Only support video ID */
- if (bits != 0x1)
- goto failed;
-
- /* video signal type */
- GET_BITS (br, 1, &bits);
-
- if (bits == 0x1) {
- /* video signal type, ignore format and range */
- GET_BITS (br, 4, &bits);
-
- GET_BITS (br, 1, &bits);
- if (bits == 0x1) {
- /* ignore color description */
- GET_BITS (br, 24, &bits);
- }
- }
-
- if (!next_start_code (br))
- goto failed;
-
- /* skip optional user data */
- GET_BITS (br, 32, &bits);
- if (!skip_user_data (br, &bits))
- goto failed;
-
- /* rewind to start code */
- gst_bit_reader_set_pos (br, gst_bit_reader_get_pos (br) - 32);
-
- return gst_mpeg4_params_parse_vo (params, br);
-
- /* ERRORS */
-failed:
- {
- GST_WARNING ("Failed to parse config data");
- return FALSE;
- }
-}
-
-gboolean
-gst_mpeg4_params_parse_config (MPEG4Params * params, const guint8 * data,
- guint size)
-{
- GstBitReader br;
-
- if (size < 4)
- return FALSE;
-
- gst_bit_reader_init (&br, data, size);
-
- if (data[3] == MPEG4_VOS_STARTCODE)
- return gst_mpeg4_params_parse_vos (params, &br);
- else
- return gst_mpeg4_params_parse_vo (params, &br);
-}
diff --git a/gst/mpeg4videoparse/mpeg4parse.h b/gst/mpeg4videoparse/mpeg4parse.h
deleted file mode 100644
index cf79e8872..000000000
--- a/gst/mpeg4videoparse/mpeg4parse.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/* GStreamer MPEG4-2 video Parser
- * Copyright (C) <2008> Mindfruit B.V.
- * @author Sjoerd Simons <sjoerd@luon.net>
- * Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_MPEG4_PARAMS_H__
-#define __GST_MPEG4_PARAMS_H__
-
-#include <gst/gst.h>
-
-G_BEGIN_DECLS
-
-#define MPEG4_VIDEO_OBJECT_STARTCODE_MIN 0x00
-#define MPEG4_VIDEO_OBJECT_STARTCODE_MAX 0x1F
-#define MPEG4_VOS_STARTCODE 0xB0
-#define MPEG4_VOS_ENDCODE 0xB1
-#define MPEG4_USER_DATA_STARTCODE 0xB2
-#define MPEG4_GOP_STARTCODE 0xB3
-#define MPEG4_VISUAL_OBJECT_STARTCODE 0xB5
-#define MPEG4_VOP_STARTCODE 0xB6
-
-#define MPEG4_START_MARKER 0x000001
-#define MPEG4_VISUAL_OBJECT_STARTCODE_MARKER \
- ((MPEG4_START_MARKER << 8) + MPEG4_VISUAL_OBJECT_STARTCODE)
-#define MPEG4_VOS_STARTCODE_MARKER \
- ((MPEG4_START_MARKER << 8) + MPEG4_VOS_STARTCODE)
-#define MPEG4_USER_DATA_STARTCODE_MARKER \
- ((MPEG4_START_MARKER << 8) + MPEG4_USER_DATA_STARTCODE)
-
-
-typedef struct _MPEG4Params MPEG4Params;
-
-struct _MPEG4Params
-{
- gint profile;
-
- gint width, height;
- gint aspect_ratio_width, aspect_ratio_height;
- gint time_increment_resolution;
- gint fixed_time_increment;
-};
-
-GstFlowReturn gst_mpeg4_params_parse_config (MPEG4Params * params,
- const guint8 * data, guint size);
-
-G_END_DECLS
-#endif
diff --git a/gst/mpegdemux/gstmpegdemux.c b/gst/mpegdemux/gstmpegdemux.c
index a73f8f95f..55a567eb0 100644
--- a/gst/mpegdemux/gstmpegdemux.c
+++ b/gst/mpegdemux/gstmpegdemux.c
@@ -1607,7 +1607,7 @@ gst_flups_demux_parse_pack_start (GstFluPSDemux * demux)
/* adjustment of the SCR */
if (G_LIKELY (demux->current_scr != G_MAXUINT64)) {
- gint64 diff;
+ guint64 diff;
guint64 old_scr, old_mux_rate, bss, adjust = 0;
/* keep SCR of the previous packet */
diff --git a/gst/mpegdemux/gstmpegtsdemux.c b/gst/mpegdemux/gstmpegtsdemux.c
index 394d0b36d..5ee8daeb1 100644
--- a/gst/mpegdemux/gstmpegtsdemux.c
+++ b/gst/mpegdemux/gstmpegtsdemux.c
@@ -191,6 +191,7 @@ static gboolean gst_mpegts_demux_src_event (GstPad * pad, GstEvent * event);
static GstFlowReturn gst_mpegts_demux_chain (GstPad * pad, GstBuffer * buffer);
static gboolean gst_mpegts_demux_sink_setcaps (GstPad * pad, GstCaps * caps);
+static gboolean gst_mpegts_demux_is_live (GstMpegTSDemux * demux);
static GstClock *gst_mpegts_demux_provide_clock (GstElement * element);
static gboolean gst_mpegts_demux_src_pad_query (GstPad * pad, GstQuery * query);
static const GstQueryType *gst_mpegts_demux_src_pad_query_type (GstPad * pad);
@@ -1127,7 +1128,7 @@ gst_mpegts_demux_add_all_streams (GstMpegTSDemux * demux, GstClockTime pts)
}
if (!gst_mpegts_demux_fill_stream (stream, stream->filter.id,
stream->stream_type)) {
- GST_ERROR ("Unknown type for PID 0x%04x", stream->PID);
+ GST_WARNING_OBJECT (demux, "Unknown type for PID 0x%04x", stream->PID);
/* ignore */
continue;
}
@@ -1279,12 +1280,14 @@ gst_mpegts_demux_data_cb (GstPESFilter * filter, gboolean first,
if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (demux->in_gap))) {
if (GST_CLOCK_TIME_IS_VALID (demux->first_buf_ts)
- && GST_CLOCK_TIME_IS_VALID (filter->pts)) {
+ && GST_CLOCK_TIME_IS_VALID (filter->pts)
+ && gst_mpegts_demux_is_live (demux)) {
int i;
GstClockTime pts = GST_CLOCK_TIME_NONE;
for (i = 0; i < MPEGTS_MAX_PID + 1; i++) {
GstMpegTSStream *stream = demux->streams[i];
- if (stream && (pts == GST_CLOCK_TIME_NONE || stream->last_time < pts)) {
+ if (stream && stream->last_time > 0 && (pts == GST_CLOCK_TIME_NONE
+ || stream->last_time < pts)) {
pts = stream->last_time;
}
}
@@ -2952,14 +2955,12 @@ gst_mpegts_demux_sink_event (GstPad * pad, GstEvent * event)
}
static gboolean
-gst_mpegts_demux_provides_clock (GstElement * element)
+gst_mpegts_demux_is_live (GstMpegTSDemux * demux)
{
- GstMpegTSDemux *demux;
GstQuery *query;
gboolean is_live = FALSE;
GstPad *peer;
- demux = GST_MPEGTS_DEMUX (element);
query = gst_query_new_latency ();
peer = gst_pad_get_peer (demux->sinkpad);
@@ -2973,6 +2974,12 @@ gst_mpegts_demux_provides_clock (GstElement * element)
return is_live;
}
+static gboolean
+gst_mpegts_demux_provides_clock (GstElement * element)
+{
+ return gst_mpegts_demux_is_live (GST_MPEGTS_DEMUX (element));
+}
+
static GstClock *
gst_mpegts_demux_provide_clock (GstElement * element)
{
diff --git a/gst/mpegdemux/mpegtsparse.c b/gst/mpegdemux/mpegtsparse.c
index ca986c4d1..d77fd23ad 100644
--- a/gst/mpegdemux/mpegtsparse.c
+++ b/gst/mpegdemux/mpegtsparse.c
@@ -896,6 +896,14 @@ mpegts_parse_is_psi (MpegTSParse * parse, MpegTSPacketizerPacket * packet)
data = packet->data;
pointer = *data++;
data += pointer;
+ /* 'pointer' value may be invalid on malformed packet
+ * so we need to avoid out of range
+ */
+ if (!(data < packet->data_end)) {
+ GST_WARNING_OBJECT (parse,
+ "Wrong offset when retrieving table id: 0x%x", pointer);
+ return FALSE;
+ }
table_id = *data;
i = 0;
while (si_tables[i] != TABLE_ID_UNSET) {
diff --git a/gst/mpegpsmux/mpegpsmux.c b/gst/mpegpsmux/mpegpsmux.c
index 48c0828fb..3fff4be51 100644
--- a/gst/mpegpsmux/mpegpsmux.c
+++ b/gst/mpegpsmux/mpegpsmux.c
@@ -54,9 +54,11 @@ GST_DEBUG_CATEGORY (mpegpsmux_debug);
enum
{
- ARG_0
+ PROP_AGGREGATE_GOPS = 1
};
+#define DEFAULT_AGGREGATE_GOPS FALSE
+
static GstStaticPadTemplate mpegpsmux_sink_factory =
GST_STATIC_PAD_TEMPLATE ("sink_%u",
GST_PAD_SINK,
@@ -94,7 +96,7 @@ static gboolean new_packet_cb (guint8 * data, guint len, void *user_data);
static void release_buffer_cb (guint8 * data, void *user_data);
static gboolean mpegpsdemux_prepare_srcpad (MpegPsMux * mux);
-static GstFlowReturn mpegpsmux_collected (GstCollectPads * pads,
+static GstFlowReturn mpegpsmux_collected (GstCollectPads2 * pads,
MpegPsMux * mux);
static GstPad *mpegpsmux_request_new_pad (GstElement * element,
GstPadTemplate * templ, const gchar * name);
@@ -135,6 +137,10 @@ mpegpsmux_class_init (MpegPsMuxClass * klass)
gstelement_class->release_pad = mpegpsmux_release_pad;
gstelement_class->change_state = mpegpsmux_change_state;
+ g_object_class_install_property (gobject_class, PROP_AGGREGATE_GOPS,
+ g_param_spec_boolean ("aggregate-gops", "Aggregate GOPs",
+ "Whether to aggregate GOPs and push them out as buffer lists",
+ DEFAULT_AGGREGATE_GOPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
static void
@@ -145,9 +151,9 @@ mpegpsmux_init (MpegPsMux * mux, MpegPsMuxClass * g_class)
gst_pad_use_fixed_caps (mux->srcpad);
gst_element_add_pad (GST_ELEMENT (mux), mux->srcpad);
- mux->collect = gst_collect_pads_new ();
- gst_collect_pads_set_function (mux->collect,
- (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (mpegpsmux_collected), mux);
+ mux->collect = gst_collect_pads2_new ();
+ gst_collect_pads2_set_function (mux->collect,
+ (GstCollectPads2Function) GST_DEBUG_FUNCPTR (mpegpsmux_collected), mux);
mux->psmux = psmux_new ();
psmux_set_write_func (mux->psmux, new_packet_cb, mux);
@@ -171,6 +177,11 @@ mpegpsmux_dispose (GObject * object)
mux->psmux = NULL;
}
+ if (mux->gop_list != NULL) {
+ gst_buffer_list_unref (mux->gop_list);
+ mux->gop_list = NULL;
+ }
+
GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
}
@@ -178,9 +189,12 @@ static void
gst_mpegpsmux_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
-/* MpegPsMux *mux = GST_MPEG_PSMUX (object); */
+ MpegPsMux *mux = GST_MPEG_PSMUX (object);
switch (prop_id) {
+ case PROP_AGGREGATE_GOPS:
+ mux->aggregate_gops = g_value_get_boolean (value);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@@ -191,9 +205,12 @@ static void
gst_mpegpsmux_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
-/* MpegPsMux *mux = GST_MPEG_PSMUX (object); */
+ MpegPsMux *mux = GST_MPEG_PSMUX (object);
switch (prop_id) {
+ case PROP_AGGREGATE_GOPS:
+ g_value_set_boolean (value, mux->aggregate_gops);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@@ -217,6 +234,7 @@ mpegpsmux_create_stream (MpegPsMux * mux, MpegPsPadData * ps_data, GstPad * pad)
GstFlowReturn ret = GST_FLOW_ERROR;
GstCaps *caps = gst_pad_get_negotiated_caps (pad);
GstStructure *s;
+ gboolean is_video = FALSE;
if (caps == NULL) {
GST_DEBUG_OBJECT (pad, "Sink pad caps were not set before pushing");
@@ -229,6 +247,7 @@ mpegpsmux_create_stream (MpegPsMux * mux, MpegPsPadData * ps_data, GstPad * pad)
if (gst_structure_has_name (s, "video/x-dirac")) {
GST_DEBUG_OBJECT (pad, "Creating Dirac stream");
ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_DIRAC);
+ is_video = TRUE;
} else if (gst_structure_has_name (s, "audio/x-ac3")) {
GST_DEBUG_OBJECT (pad, "Creating AC3 stream");
ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_PS_AUDIO_AC3);
@@ -252,6 +271,7 @@ mpegpsmux_create_stream (MpegPsMux * mux, MpegPsPadData * ps_data, GstPad * pad)
ps_data->codec_data = NULL;
}
ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_H264);
+ is_video = TRUE;
} else if (gst_structure_has_name (s, "audio/mpeg")) {
gint mpegversion;
if (!gst_structure_get_int (s, "mpegversion", &mpegversion)) {
@@ -312,6 +332,7 @@ mpegpsmux_create_stream (MpegPsMux * mux, MpegPsPadData * ps_data, GstPad * pad)
GST_DEBUG_OBJECT (pad, "Creating MPEG Video, version 4 stream");
ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_MPEG4);
}
+ is_video = TRUE;
}
if (ps_data->stream != NULL) {
@@ -327,6 +348,11 @@ mpegpsmux_create_stream (MpegPsMux * mux, MpegPsPadData * ps_data, GstPad * pad)
psmux_stream_set_buffer_release_func (ps_data->stream, release_buffer_cb);
ret = GST_FLOW_OK;
+
+ if (is_video && mux->video_stream_id == 0) {
+ mux->video_stream_id = ps_data->stream_id;
+ GST_INFO_OBJECT (mux, "video pad stream_id 0x%02x", mux->video_stream_id);
+ }
}
beach:
@@ -343,7 +369,7 @@ mpegpsmux_create_streams (MpegPsMux * mux)
/* Create the streams */
while (walk) {
- GstCollectData *c_data = (GstCollectData *) walk->data;
+ GstCollectData2 *c_data = (GstCollectData2 *) walk->data;
MpegPsPadData *ps_data = (MpegPsPadData *) walk->data;
walk = g_slist_next (walk);
@@ -368,11 +394,11 @@ mpegpsmux_choose_best_stream (MpegPsMux * mux)
/* Choose from which stream to mux with */
MpegPsPadData *best = NULL;
- GstCollectData *c_best = NULL;
+ GstCollectData2 *c_best = NULL;
GSList *walk;
for (walk = mux->collect->data; walk != NULL; walk = g_slist_next (walk)) {
- GstCollectData *c_data = (GstCollectData *) walk->data;
+ GstCollectData2 *c_data = (GstCollectData2 *) walk->data;
MpegPsPadData *ps_data = (MpegPsPadData *) walk->data;
if (ps_data->eos == FALSE) {
@@ -380,7 +406,7 @@ mpegpsmux_choose_best_stream (MpegPsMux * mux)
GstBuffer *buf;
ps_data->queued_buf = buf =
- gst_collect_pads_peek (mux->collect, c_data);
+ gst_collect_pads2_peek (mux->collect, c_data);
if (buf != NULL) {
if (ps_data->prepare_func) {
@@ -441,19 +467,34 @@ mpegpsmux_choose_best_stream (MpegPsMux * mux)
}
}
if (c_best) {
- gst_buffer_unref (gst_collect_pads_pop (mux->collect, c_best));
+ gst_buffer_unref (gst_collect_pads2_pop (mux->collect, c_best));
}
return best;
}
static GstFlowReturn
-mpegpsmux_collected (GstCollectPads * pads, MpegPsMux * mux)
+mpegpsmux_push_gop_list (MpegPsMux * mux)
+{
+ GstFlowReturn flow;
+
+ g_assert (mux->gop_list != NULL);
+
+ GST_DEBUG_OBJECT (mux, "Sending pending GOP of %u buffers",
+ gst_buffer_list_n_groups (mux->gop_list));
+ flow = gst_pad_push_list (mux->srcpad, mux->gop_list);
+ mux->gop_list = NULL;
+ return flow;
+}
+
+static GstFlowReturn
+mpegpsmux_collected (GstCollectPads2 * pads, MpegPsMux * mux)
{
/* main muxing function */
GstFlowReturn ret = GST_FLOW_OK;
MpegPsPadData *best = NULL;
+ gboolean keyunit;
GST_DEBUG_OBJECT (mux, "Pads collected");
@@ -496,9 +537,20 @@ mpegpsmux_collected (GstCollectPads * pads, MpegPsMux * mux)
G_GINT64_FORMAT, GST_TIME_ARGS (best->cur_ts), pts);
}
+ /* start of new GOP? */
+ keyunit = !GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (keyunit && best->stream_id == mux->video_stream_id
+ && mux->gop_list != NULL) {
+ ret = mpegpsmux_push_gop_list (mux);
+ if (ret != GST_FLOW_OK)
+ goto done;
+ }
+
/* give the buffer to libpsmux for processing */
psmux_stream_add_data (best->stream, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf), buf, pts, -1);
+ GST_BUFFER_SIZE (buf), buf, pts, -1, keyunit);
+
best->queued_buf = NULL;
/* write the data from libpsmux to stream */
@@ -513,12 +565,17 @@ mpegpsmux_collected (GstCollectPads * pads, MpegPsMux * mux)
} else {
/* FIXME: Drain all remaining streams */
/* At EOS */
+ if (mux->gop_list != NULL)
+ mpegpsmux_push_gop_list (mux);
+
if (psmux_write_end_code (mux->psmux)) {
GST_WARNING_OBJECT (mux, "Writing MPEG PS Program end code failed.");
}
gst_pad_push_event (mux->srcpad, gst_event_new_eos ());
}
+done:
+
return ret;
new_seg_fail:
return GST_FLOW_ERROR;
@@ -538,8 +595,8 @@ mpegpsmux_request_new_pad (GstElement * element,
pad = gst_pad_new_from_template (templ, name);
- pad_data = (MpegPsPadData *) gst_collect_pads_add_pad (mux->collect, pad,
- sizeof (MpegPsPadData), NULL);
+ pad_data = (MpegPsPadData *) gst_collect_pads2_add_pad (mux->collect, pad,
+ sizeof (MpegPsPadData));
if (pad_data == NULL)
goto pad_failure;
@@ -555,7 +612,7 @@ mpegpsmux_request_new_pad (GstElement * element,
could_not_add:
GST_ELEMENT_ERROR (element, STREAM, FAILED,
("Internal data stream error."), ("Could not add pad to element"));
- gst_collect_pads_remove_pad (mux->collect, pad);
+ gst_collect_pads2_remove_pad (mux->collect, pad);
gst_object_unref (pad);
return NULL;
pad_failure:
@@ -586,9 +643,31 @@ mpegpsmux_release_pad (GstElement * element, GstPad * pad)
pad_data->codec_data = NULL;
}
}
+ if (pad_data->stream_id == mux->video_stream_id)
+ mux->video_stream_id = 0;
GST_OBJECT_UNLOCK (pad);
- gst_collect_pads_remove_pad (mux->collect, pad);
+ gst_collect_pads2_remove_pad (mux->collect, pad);
+}
+
+static void
+add_buffer_to_goplist (MpegPsMux * mux, GstBuffer * buf)
+{
+ GstBufferListIterator *it;
+
+ if (mux->gop_list == NULL)
+ mux->gop_list = gst_buffer_list_new ();
+
+ it = gst_buffer_list_iterate (mux->gop_list);
+
+ /* move iterator to end */
+ while (gst_buffer_list_iterator_next_group (it)) {
+ /* .. */
+ }
+
+ gst_buffer_list_iterator_add_group (it);
+ gst_buffer_list_iterator_add (it, buf);
+ gst_buffer_list_iterator_free (it);
}
static gboolean
@@ -611,7 +690,14 @@ new_packet_cb (guint8 * data, guint len, void *user_data)
memcpy (GST_BUFFER_DATA (buf), data, len);
GST_BUFFER_TIMESTAMP (buf) = mux->last_ts;
+
+ if (mux->aggregate_gops) {
+ add_buffer_to_goplist (mux, buf);
+ return TRUE;
+ }
+
ret = gst_pad_push (mux->srcpad, buf);
+
if (G_UNLIKELY (ret != GST_FLOW_OK)) {
mux->last_flow_ret = ret;
return FALSE;
@@ -623,6 +709,9 @@ new_packet_cb (guint8 * data, guint len, void *user_data)
static gboolean
mpegpsdemux_prepare_srcpad (MpegPsMux * mux)
{
+ GValue val = { 0, };
+ GList *headers, *l;
+
/* prepare the source pad for output */
GstEvent *new_seg =
@@ -634,6 +723,21 @@ mpegpsdemux_prepare_srcpad (MpegPsMux * mux)
/* gst_static_pad_template_get_caps (&mpegpsmux_src_factory); */
+ headers = psmux_get_stream_headers (mux->psmux);
+ g_value_init (&val, GST_TYPE_ARRAY);
+ for (l = headers; l != NULL; l = l->next) {
+ GValue buf_val = { 0, };
+
+ g_value_init (&buf_val, GST_TYPE_BUFFER);
+ gst_value_take_buffer (&buf_val, GST_BUFFER (l->data));
+ l->data = NULL;
+ gst_value_array_append_value (&val, &buf_val);
+ g_value_unset (&buf_val);
+ }
+ gst_caps_set_value (caps, "streamheader", &val);
+ g_value_unset (&val);
+ g_list_free (headers);
+
/* Set caps on src pad from our template and push new segment */
gst_pad_set_caps (mux->srcpad, caps);
@@ -657,12 +761,12 @@ mpegpsmux_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
- gst_collect_pads_start (mux->collect);
+ gst_collect_pads2_start (mux->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_collect_pads_stop (mux->collect);
+ gst_collect_pads2_stop (mux->collect);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;
diff --git a/gst/mpegpsmux/mpegpsmux.h b/gst/mpegpsmux/mpegpsmux.h
index 4da1bea66..69de48a5c 100644
--- a/gst/mpegpsmux/mpegpsmux.h
+++ b/gst/mpegpsmux/mpegpsmux.h
@@ -46,7 +46,7 @@
#define __MPEGPSMUX_H__
#include <gst/gst.h>
-#include <gst/base/gstcollectpads.h>
+#include <gst/base/gstcollectpads2.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
@@ -68,7 +68,9 @@ struct MpegPsMux {
GstPad *srcpad;
- GstCollectPads *collect; // pads collector
+ guint video_stream_id; /* stream id of primary video stream */
+
+ GstCollectPads2 *collect; /* pads collector */
PsMux *psmux;
@@ -76,6 +78,9 @@ struct MpegPsMux {
GstFlowReturn last_flow_ret;
GstClockTime last_ts;
+
+ GstBufferList *gop_list;
+ gboolean aggregate_gops;
};
struct MpegPsMuxClass {
@@ -83,7 +88,7 @@ struct MpegPsMuxClass {
};
struct MpegPsPadData {
- GstCollectData collect; /* Parent */
+ GstCollectData2 collect; /* Parent */
guint8 stream_id;
guint8 stream_id_ext;
diff --git a/gst/mpegpsmux/psmux.c b/gst/mpegpsmux/psmux.c
index 0a714d038..44e32faf5 100644
--- a/gst/mpegpsmux/psmux.c
+++ b/gst/mpegpsmux/psmux.c
@@ -141,6 +141,12 @@ psmux_free (PsMux * mux)
}
g_list_free (mux->streams);
+ if (mux->sys_header != NULL)
+ gst_buffer_unref (mux->sys_header);
+
+ if (mux->psm != NULL)
+ gst_buffer_unref (mux->psm);
+
g_slice_free (PsMux, mux);
}
@@ -332,17 +338,23 @@ psmux_write_pack_header (PsMux * mux)
return psmux_packet_out (mux);
}
-static gboolean
-psmux_write_system_header (PsMux * mux)
+static void
+psmux_ensure_system_header (PsMux * mux)
{
+ GstBuffer *buf;
bits_buffer_t bw;
guint len = 12 + (mux->nb_streams +
(mux->nb_private_streams > 1 ? mux->nb_private_streams - 1 : 0)) * 3;
GList *cur;
gboolean private_hit = FALSE;
+ if (mux->sys_header != NULL)
+ return;
+
+ buf = gst_buffer_new_and_alloc (len);
+
/* system_header_start_code */
- bits_initwrite (&bw, len, mux->packet_buf);
+ bits_initwrite (&bw, len, GST_BUFFER_DATA (buf));
/* system_header start code */
bits_write (&bw, 24, PSMUX_START_CODE_PREFIX);
@@ -378,19 +390,36 @@ psmux_write_system_header (PsMux * mux)
private_hit = TRUE;
}
- mux->packet_bytes_written = len;
- return psmux_packet_out (mux);
+ GST_MEMDUMP ("System Header", GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+
+ mux->sys_header = buf;
}
static gboolean
-psmux_write_program_stream_map (PsMux * mux)
+psmux_write_system_header (PsMux * mux)
+{
+ psmux_ensure_system_header (mux);
+
+ memcpy (mux->packet_buf, GST_BUFFER_DATA (mux->sys_header),
+ GST_BUFFER_SIZE (mux->sys_header));
+ mux->packet_bytes_written = GST_BUFFER_SIZE (mux->sys_header);
+
+ return psmux_packet_out (mux);
+}
+
+static void
+psmux_ensure_program_stream_map (PsMux * mux)
{
+ GstBuffer *buf;
gint psm_size = 16, es_map_size = 0;
bits_buffer_t bw;
GList *cur;
guint16 len;
guint8 *pos;
+ if (mux->psm != NULL)
+ return;
+
/* pre-write the descriptor loop */
pos = mux->es_info_buf;
for (cur = g_list_first (mux->streams); cur != NULL; cur = g_list_next (cur)) {
@@ -412,7 +441,10 @@ psmux_write_program_stream_map (PsMux * mux)
}
psm_size += es_map_size;
- bits_initwrite (&bw, psm_size, mux->packet_buf);
+
+ buf = gst_buffer_new_and_alloc (psm_size);
+
+ bits_initwrite (&bw, psm_size, GST_BUFFER_DATA (buf));
/* psm start code */
bits_write (&bw, 24, PSMUX_START_CODE_PREFIX);
@@ -429,15 +461,44 @@ psmux_write_program_stream_map (PsMux * mux)
/* program_stream_info empty */
bits_write (&bw, 16, es_map_size); /* elementary_stream_map_length */
+
memcpy (bw.p_data + bw.i_data, mux->es_info_buf, es_map_size);
/* CRC32 */
{
- guint32 crc = calc_crc32 (mux->packet_buf, psm_size - 4);
- guint8 *pos = mux->packet_buf + psm_size - 4;
+ guint32 crc = calc_crc32 (bw.p_data, psm_size - 4);
+ guint8 *pos = bw.p_data + psm_size - 4;
psmux_put32 (&pos, crc);
}
- mux->packet_bytes_written = psm_size;
+ GST_MEMDUMP ("Program Stream Map", GST_BUFFER_DATA (buf),
+ GST_BUFFER_SIZE (buf));
+
+ mux->psm = buf;
+}
+
+static gboolean
+psmux_write_program_stream_map (PsMux * mux)
+{
+ psmux_ensure_program_stream_map (mux);
+
+ memcpy (mux->packet_buf, GST_BUFFER_DATA (mux->psm),
+ GST_BUFFER_SIZE (mux->psm));
+ mux->packet_bytes_written = GST_BUFFER_SIZE (mux->psm);
+
return psmux_packet_out (mux);
}
+
+GList *
+psmux_get_stream_headers (PsMux * mux)
+{
+ GList *list;
+
+ psmux_ensure_system_header (mux);
+ psmux_ensure_program_stream_map (mux);
+
+ list = g_list_append (NULL, gst_buffer_ref (mux->sys_header));
+ list = g_list_append (list, gst_buffer_ref (mux->psm));
+
+ return list;
+}
diff --git a/gst/mpegpsmux/psmux.h b/gst/mpegpsmux/psmux.h
index 5aacf94ce..3daa90af7 100644
--- a/gst/mpegpsmux/psmux.h
+++ b/gst/mpegpsmux/psmux.h
@@ -93,6 +93,10 @@ struct PsMux {
guint8 audio_bound;
guint8 video_bound;
guint32 rate_bound;
+
+ /* stream headers */
+ GstBuffer *sys_header;
+ GstBuffer *psm;
};
/* create/free new muxer session */
@@ -109,6 +113,8 @@ PsMuxStream * psmux_create_stream (PsMux *mux, PsMuxStreamType stream_type);
gboolean psmux_write_stream_packet (PsMux *mux, PsMuxStream *stream);
gboolean psmux_write_end_code (PsMux *mux);
+GList * psmux_get_stream_headers (PsMux *mux);
+
G_END_DECLS
#endif
diff --git a/gst/mpegpsmux/psmuxstream.c b/gst/mpegpsmux/psmuxstream.c
index e91cc006b..a0bd38ee6 100644
--- a/gst/mpegpsmux/psmuxstream.c
+++ b/gst/mpegpsmux/psmuxstream.c
@@ -513,7 +513,7 @@ psmux_stream_write_pes_header (PsMuxStream * stream, guint8 * data)
*/
void
psmux_stream_add_data (PsMuxStream * stream, guint8 * data, guint len,
- void *user_data, gint64 pts, gint64 dts)
+ void *user_data, gint64 pts, gint64 dts, gboolean keyunit)
{
PsMuxStreamBuffer *packet;
@@ -524,6 +524,7 @@ psmux_stream_add_data (PsMuxStream * stream, guint8 * data, guint len,
packet->size = len;
packet->user_data = user_data;
+ packet->keyunit = keyunit;
packet->pts = pts;
packet->dts = dts;
diff --git a/gst/mpegpsmux/psmuxstream.h b/gst/mpegpsmux/psmuxstream.h
index 9860e6230..ff4c2892b 100644
--- a/gst/mpegpsmux/psmuxstream.h
+++ b/gst/mpegpsmux/psmuxstream.h
@@ -86,6 +86,8 @@ struct PsMuxStreamBuffer
guint8 *data;
guint32 size;
+ gboolean keyunit;
+
/* PTS & DTS associated with the contents of this buffer */
GstClockTime pts;
GstClockTime dts;
@@ -146,7 +148,8 @@ void psmux_stream_set_buffer_release_func (PsMuxStream *stream,
/* Add a new buffer to the pool of available bytes. If pts or dts are not -1, they
* indicate the PTS or DTS of the first access unit within this packet */
void psmux_stream_add_data (PsMuxStream *stream, guint8 *data, guint len,
- void *user_data, gint64 pts, gint64 dts);
+ void *user_data, gint64 pts, gint64 dts,
+ gboolean keyunit);
/* total bytes in buffer */
gint psmux_stream_bytes_in_buffer (PsMuxStream *stream);
diff --git a/gst/mpegtsmux/Makefile.am b/gst/mpegtsmux/Makefile.am
index c433af3c3..99adf803f 100644
--- a/gst/mpegtsmux/Makefile.am
+++ b/gst/mpegtsmux/Makefile.am
@@ -7,8 +7,9 @@ libgstmpegtsmux_la_SOURCES = \
mpegtsmux_h264.c \
mpegtsmux_aac.c
-libgstmpegtsmux_la_CFLAGS = $(GST_CFLAGS)
-libgstmpegtsmux_la_LIBADD = $(top_builddir)/gst/mpegtsmux/tsmux/libtsmux.la $(GST_LIBS) $(GST_BASE_LIBS)
+libgstmpegtsmux_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
+libgstmpegtsmux_la_LIBADD = $(top_builddir)/gst/mpegtsmux/tsmux/libtsmux.la \
+ $(GST_PLUGINS_BASE_LIBS) -lgstvideo-@GST_MAJORMINOR@ $(GST_BASE_LIBS) $(GST_LIBS)
libgstmpegtsmux_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstmpegtsmux_la_LIBTOOLFLAGS = --tag=disable-static
diff --git a/gst/mpegtsmux/mpegtsmux.c b/gst/mpegtsmux/mpegtsmux.c
index b28ec33d9..3e3b3f9e1 100644
--- a/gst/mpegtsmux/mpegtsmux.c
+++ b/gst/mpegtsmux/mpegtsmux.c
@@ -88,6 +88,8 @@
#include <stdio.h>
#include <string.h>
+#include <gst/video/video.h>
+
#include "mpegtsmux.h"
#include "mpegtsmux_h264.h"
@@ -143,7 +145,7 @@ static gboolean new_packet_cb (guint8 * data, guint len, void *user_data,
static void release_buffer_cb (guint8 * data, void *user_data);
static void mpegtsdemux_prepare_srcpad (MpegTsMux * mux);
-static GstFlowReturn mpegtsmux_collected (GstCollectPads * pads,
+static GstFlowReturn mpegtsmux_collected (GstCollectPads2 * pads,
MpegTsMux * mux);
static GstPad *mpegtsmux_request_new_pad (GstElement * element,
GstPadTemplate * templ, const gchar * name);
@@ -151,6 +153,8 @@ static void mpegtsmux_release_pad (GstElement * element, GstPad * pad);
static GstStateChangeReturn mpegtsmux_change_state (GstElement * element,
GstStateChange transition);
static void mpegtsdemux_set_header_on_caps (MpegTsMux * mux);
+static gboolean mpegtsmux_sink_event (GstPad * pad, GstEvent * event);
+static gboolean mpegtsmux_src_event (GstPad * pad, GstEvent * event);
GST_BOILERPLATE (MpegTsMux, mpegtsmux, GstElement, GST_TYPE_ELEMENT);
@@ -215,11 +219,12 @@ mpegtsmux_init (MpegTsMux * mux, MpegTsMuxClass * g_class)
mux->srcpad =
gst_pad_new_from_static_template (&mpegtsmux_src_factory, "src");
gst_pad_use_fixed_caps (mux->srcpad);
+ gst_pad_set_event_function (mux->srcpad, mpegtsmux_src_event);
gst_element_add_pad (GST_ELEMENT (mux), mux->srcpad);
- mux->collect = gst_collect_pads_new ();
- gst_collect_pads_set_function (mux->collect,
- (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (mpegtsmux_collected), mux);
+ mux->collect = gst_collect_pads2_new ();
+ gst_collect_pads2_set_function (mux->collect,
+ (GstCollectPads2Function) GST_DEBUG_FUNCPTR (mpegtsmux_collected), mux);
mux->tsmux = tsmux_new ();
tsmux_set_write_func (mux->tsmux, new_packet_cb, mux);
@@ -238,6 +243,8 @@ mpegtsmux_init (MpegTsMux * mux, MpegTsMuxClass * g_class)
mux->prog_map = NULL;
mux->streamheader = NULL;
mux->streamheader_sent = FALSE;
+ mux->force_key_unit_event = NULL;
+ mux->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
}
static void
@@ -509,7 +516,7 @@ mpegtsmux_create_streams (MpegTsMux * mux)
/* Create the streams */
while (walk) {
- GstCollectData *c_data = (GstCollectData *) walk->data;
+ GstCollectData2 *c_data = (GstCollectData2 *) walk->data;
MpegTsPadData *ts_data = (MpegTsPadData *) walk->data;
gchar *name = NULL;
@@ -569,11 +576,11 @@ static MpegTsPadData *
mpegtsmux_choose_best_stream (MpegTsMux * mux)
{
MpegTsPadData *best = NULL;
- GstCollectData *c_best = NULL;
+ GstCollectData2 *c_best = NULL;
GSList *walk;
for (walk = mux->collect->data; walk != NULL; walk = g_slist_next (walk)) {
- GstCollectData *c_data = (GstCollectData *) walk->data;
+ GstCollectData2 *c_data = (GstCollectData2 *) walk->data;
MpegTsPadData *ts_data = (MpegTsPadData *) walk->data;
if (ts_data->eos == FALSE) {
@@ -581,7 +588,7 @@ mpegtsmux_choose_best_stream (MpegTsMux * mux)
GstBuffer *buf;
ts_data->queued_buf = buf =
- gst_collect_pads_peek (mux->collect, c_data);
+ gst_collect_pads2_peek (mux->collect, c_data);
if (buf != NULL) {
if (ts_data->prepare_func) {
@@ -643,17 +650,212 @@ mpegtsmux_choose_best_stream (MpegTsMux * mux)
}
if (c_best) {
GstBuffer *buffer;
- if ((buffer = gst_collect_pads_pop (mux->collect, c_best)))
+ if ((buffer = gst_collect_pads2_pop (mux->collect, c_best)))
gst_buffer_unref (buffer);
}
return best;
}
-#define COLLECT_DATA_PAD(collect_data) (((GstCollectData *)(collect_data))->pad)
+#define COLLECT_DATA_PAD(collect_data) (((GstCollectData2 *)(collect_data))->pad)
+
+static MpegTsPadData *
+find_pad_data (MpegTsMux * mux, GstPad * pad)
+{
+ GSList *walk;
+ MpegTsPadData *ts_data = NULL;
+
+ GST_COLLECT_PADS2_STREAM_LOCK (mux->collect);
+ walk = mux->collect->pad_list;
+ while (walk) {
+ if (((GstCollectData2 *) walk->data)->pad == pad) {
+ ts_data = (MpegTsPadData *) walk->data;
+ break;
+ }
+
+ walk = g_slist_next (walk);
+ }
+ GST_COLLECT_PADS2_STREAM_UNLOCK (mux->collect);
+
+ return ts_data;
+}
+
+static gboolean
+mpegtsmux_sink_event (GstPad * pad, GstEvent * event)
+{
+ MpegTsMux *mux = GST_MPEG_TSMUX (gst_pad_get_parent (pad));
+ MpegTsPadData *ts_data;
+ gboolean res = TRUE;
+ gboolean forward = TRUE;
+
+ ts_data = find_pad_data (mux, pad);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ {
+ GstClockTime timestamp, stream_time, running_time;
+ gboolean all_headers;
+ guint count;
+
+ if (!gst_video_event_is_force_key_unit (event))
+ goto out;
+
+ forward = FALSE;
+
+ gst_video_event_parse_downstream_force_key_unit (event,
+ &timestamp, &stream_time, &running_time, &all_headers, &count);
+ GST_INFO_OBJECT (mux, "have downstream force-key-unit event on pad %s, "
+ "seqnum %d, running-time %" GST_TIME_FORMAT " count %d",
+ gst_pad_get_name (pad), gst_event_get_seqnum (event),
+ GST_TIME_ARGS (running_time), count);
+
+ if (mux->force_key_unit_event != NULL) {
+ GST_INFO_OBJECT (mux, "skipping downstream force key unit event "
+ "as an upstream force key unit is already queued");
+ goto out;
+ }
+
+ if (!all_headers)
+ goto out;
+
+ mux->pending_key_unit_ts = running_time;
+ gst_event_replace (&mux->force_key_unit_event, event);
+ break;
+ }
+ default:
+ break;
+ }
+
+out:
+ if (forward)
+ res = ts_data->eventfunc (pad, event);
+
+ gst_object_unref (mux);
+ return res;
+}
+
+static gboolean
+mpegtsmux_src_event (GstPad * pad, GstEvent * event)
+{
+ MpegTsMux *mux = GST_MPEG_TSMUX (gst_pad_get_parent (pad));
+ gboolean res = TRUE;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ {
+ GstIterator *iter;
+ GstIteratorResult iter_ret;
+ GstPad *sinkpad;
+ GstClockTime running_time;
+ gboolean all_headers, done;
+ guint count;
+
+ if (!gst_video_event_is_force_key_unit (event))
+ break;
+
+ gst_video_event_parse_upstream_force_key_unit (event,
+ &running_time, &all_headers, &count);
+
+ GST_INFO_OBJECT (mux, "received upstream force-key-unit event, "
+ "seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d",
+ gst_event_get_seqnum (event), GST_TIME_ARGS (running_time),
+ all_headers, count);
+
+ if (!all_headers)
+ break;
+
+ mux->pending_key_unit_ts = running_time;
+ gst_event_replace (&mux->force_key_unit_event, event);
+
+ iter = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mux));
+ done = FALSE;
+ while (!done) {
+ gboolean res = FALSE, tmp;
+ iter_ret = gst_iterator_next (iter, (gpointer *) & sinkpad);
+
+ switch (iter_ret) {
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
+ case GST_ITERATOR_OK:
+ GST_INFO_OBJECT (mux, "forwarding to %s",
+ gst_pad_get_name (sinkpad));
+ tmp = gst_pad_push_event (sinkpad, gst_event_ref (event));
+ GST_INFO_OBJECT (mux, "result %d", tmp);
+ /* succeed if at least one pad succeeds */
+ res |= tmp;
+ gst_object_unref (sinkpad);
+ break;
+ case GST_ITERATOR_ERROR:
+ done = TRUE;
+ break;
+ case GST_ITERATOR_RESYNC:
+ break;
+ }
+ }
+
+ gst_event_unref (event);
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, event);
+ break;
+ }
+
+ gst_object_unref (mux);
+ return res;
+}
+
+static GstEvent *
+check_pending_key_unit_event (GstEvent * pending_event, GstSegment * segment,
+ GstClockTime timestamp, guint flags, GstClockTime pending_key_unit_ts)
+{
+ GstClockTime running_time, stream_time;
+ gboolean all_headers;
+ guint count;
+ GstEvent *event = NULL;
+
+ g_return_val_if_fail (pending_event != NULL, NULL);
+ g_return_val_if_fail (segment != NULL, NULL);
+
+ if (pending_event == NULL)
+ goto out;
+
+ if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
+ timestamp == GST_CLOCK_TIME_NONE)
+ goto out;
+
+ running_time = gst_segment_to_running_time (segment,
+ GST_FORMAT_TIME, timestamp);
+
+ GST_INFO ("now %" GST_TIME_FORMAT " wanted %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time), GST_TIME_ARGS (pending_key_unit_ts));
+ if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
+ running_time < pending_key_unit_ts)
+ goto out;
+
+ if (flags & GST_BUFFER_FLAG_DELTA_UNIT) {
+ GST_INFO ("pending force key unit, waiting for keyframe");
+ goto out;
+ }
+
+ stream_time = gst_segment_to_stream_time (segment,
+ GST_FORMAT_TIME, timestamp);
+
+ gst_video_event_parse_upstream_force_key_unit (pending_event,
+ NULL, &all_headers, &count);
+
+ event =
+ gst_video_event_new_downstream_force_key_unit (timestamp, stream_time,
+ running_time, all_headers, count);
+ gst_event_set_seqnum (event, gst_event_get_seqnum (pending_event));
+
+out:
+ return event;
+}
static GstFlowReturn
-mpegtsmux_collected (GstCollectPads * pads, MpegTsMux * mux)
+mpegtsmux_collected (GstCollectPads2 * pads, MpegTsMux * mux)
{
GstFlowReturn ret = GST_FLOW_OK;
MpegTsPadData *best = NULL;
@@ -686,6 +888,42 @@ mpegtsmux_collected (GstCollectPads * pads, MpegTsMux * mux)
return GST_FLOW_ERROR;
}
+ if (mux->force_key_unit_event != NULL && best->stream->is_video_stream) {
+ GstEvent *event;
+
+ event = check_pending_key_unit_event (mux->force_key_unit_event,
+ &best->collect.segment, GST_BUFFER_TIMESTAMP (buf),
+ GST_BUFFER_FLAGS (buf), mux->pending_key_unit_ts);
+ if (event) {
+ GstClockTime running_time;
+ guint count;
+ GList *cur;
+
+ mux->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
+ gst_event_replace (&mux->force_key_unit_event, NULL);
+
+ gst_video_event_parse_downstream_force_key_unit (event,
+ NULL, NULL, &running_time, NULL, &count);
+
+ GST_INFO_OBJECT (mux, "pushing downstream force-key-unit event %d "
+ "%" GST_TIME_FORMAT " count %d", gst_event_get_seqnum (event),
+ GST_TIME_ARGS (running_time), count);
+ gst_pad_push_event (mux->srcpad, event);
+
+ /* output PAT */
+ mux->tsmux->last_pat_ts = -1;
+
+ /* output PMT for each program */
+ for (cur = g_list_first (mux->tsmux->programs); cur != NULL;
+ cur = g_list_next (cur)) {
+ TsMuxProgram *program = (TsMuxProgram *) cur->data;
+
+ program->last_pmt_ts = -1;
+ }
+ tsmux_program_set_pcr_stream (prog, NULL);
+ }
+ }
+
if (G_UNLIKELY (prog->pcr_stream == NULL)) {
/* Take the first data stream for the PCR */
GST_DEBUG_OBJECT (COLLECT_DATA_PAD (best),
@@ -749,22 +987,25 @@ mpegtsmux_request_new_pad (GstElement * element,
GstPad *pad = NULL;
MpegTsPadData *pad_data = NULL;
- if (name != NULL && sscanf (name, "sink_%u", &pid) == 1) {
+ if (name != NULL && sscanf (name, "sink_%d", &pid) == 1) {
if (tsmux_find_stream (mux->tsmux, pid))
goto stream_exists;
} else {
pid = tsmux_get_new_pid (mux->tsmux);
}
- pad_name = g_strdup_printf ("sink_%u", pid);
+ pad_name = g_strdup_printf ("sink_%d", pid);
pad = gst_pad_new_from_template (templ, pad_name);
g_free (pad_name);
- pad_data = (MpegTsPadData *) gst_collect_pads_add_pad (mux->collect, pad,
- sizeof (MpegTsPadData), NULL);
+ pad_data = (MpegTsPadData *) gst_collect_pads2_add_pad (mux->collect, pad,
+ sizeof (MpegTsPadData));
if (pad_data == NULL)
goto pad_failure;
+ pad_data->eventfunc = pad->eventfunc;
+ gst_pad_set_event_function (pad, mpegtsmux_sink_event);
+
pad_data->pid = pid;
pad_data->last_ts = GST_CLOCK_TIME_NONE;
pad_data->codec_data = NULL;
@@ -786,7 +1027,7 @@ stream_exists:
could_not_add:
GST_ELEMENT_ERROR (element, STREAM, FAILED,
("Internal data stream error."), ("Could not add pad to element"));
- gst_collect_pads_remove_pad (mux->collect, pad);
+ gst_collect_pads2_remove_pad (mux->collect, pad);
gst_object_unref (pad);
return NULL;
pad_failure:
@@ -804,7 +1045,7 @@ mpegtsmux_release_pad (GstElement * element, GstPad * pad)
GST_DEBUG_OBJECT (mux, "Pad %" GST_PTR_FORMAT " being released", pad);
if (mux->collect) {
- gst_collect_pads_remove_pad (mux->collect, pad);
+ gst_collect_pads2_remove_pad (mux->collect, pad);
}
/* chain up */
@@ -1065,12 +1306,12 @@ mpegtsmux_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
- gst_collect_pads_start (mux->collect);
+ gst_collect_pads2_start (mux->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_collect_pads_stop (mux->collect);
+ gst_collect_pads2_stop (mux->collect);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
if (mux->adapter)
diff --git a/gst/mpegtsmux/mpegtsmux.h b/gst/mpegtsmux/mpegtsmux.h
index b45dd34aa..c31707b40 100644
--- a/gst/mpegtsmux/mpegtsmux.h
+++ b/gst/mpegtsmux/mpegtsmux.h
@@ -84,7 +84,7 @@
#define __MPEGTSMUX_H__
#include <gst/gst.h>
-#include <gst/base/gstcollectpads.h>
+#include <gst/base/gstcollectpads2.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
@@ -108,7 +108,7 @@ struct MpegTsMux {
GstPad *srcpad;
- GstCollectPads *collect;
+ GstCollectPads2 *collect;
TsMux *tsmux;
TsMuxProgram **programs;
@@ -128,6 +128,8 @@ struct MpegTsMux {
GList *streamheader;
gboolean streamheader_sent;
+ GstClockTime pending_key_unit_ts;
+ GstEvent *force_key_unit_event;
};
struct MpegTsMuxClass {
@@ -137,7 +139,7 @@ struct MpegTsMuxClass {
#define MPEG_TS_PAD_DATA(data) ((MpegTsPadData *)(data))
struct MpegTsPadData {
- GstCollectData collect; /* Parent */
+ GstCollectData2 collect; /* Parent */
gint pid;
TsMuxStream *stream;
@@ -157,7 +159,8 @@ struct MpegTsPadData {
gboolean eos;
gint prog_id; /* The program id to which it is attached to (not program pid) */
- TsMuxProgram *prog; /* The program to which this stream belongs to */
+ TsMuxProgram *prog; /* The program to which this stream belongs to */
+ GstPadEventFunction eventfunc;
};
GType mpegtsmux_get_type (void);
diff --git a/gst/mxf/mxfmux.c b/gst/mxf/mxfmux.c
index 70561a626..87f5dd569 100644
--- a/gst/mxf/mxfmux.c
+++ b/gst/mxf/mxfmux.c
@@ -146,9 +146,9 @@ gst_mxf_mux_init (GstMXFMux * mux, GstMXFMuxClass * g_class)
gst_caps_unref (caps);
gst_element_add_pad (GST_ELEMENT (mux), mux->srcpad);
- mux->collect = gst_collect_pads_new ();
- gst_collect_pads_set_function (mux->collect,
- (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_mxf_mux_collected), mux);
+ mux->collect = gst_collect_pads2_new ();
+ gst_collect_pads2_set_function (mux->collect,
+ (GstCollectPads2Function) GST_DEBUG_FUNCPTR (gst_mxf_mux_collected), mux);
gst_mxf_mux_reset (mux);
}
@@ -209,7 +209,7 @@ gst_mxf_mux_reset (GstMXFMux * mux)
g_object_unref (cpad->adapter);
g_free (cpad->mapping_data);
- gst_collect_pads_remove_pad (mux->collect, cpad->collect.pad);
+ gst_collect_pads2_remove_pad (mux->collect, cpad->collect.pad);
}
mux->state = GST_MXF_MUX_STATE_HEADER;
@@ -268,7 +268,7 @@ gst_mxf_mux_handle_sink_event (GstPad * pad, GstEvent * event)
break;
}
- /* now GstCollectPads can take care of the rest, e.g. EOS */
+ /* now GstCollectPads2 can take care of the rest, e.g. EOS */
if (ret)
ret = mux->collect_event (pad, event);
gst_object_unref (mux);
@@ -338,13 +338,13 @@ gst_mxf_mux_setcaps (GstPad * pad, GstCaps * caps)
for (i = 0; i < mux->preface->content_storage->n_packages; i++) {
MXFMetadataSourcePackage *package;
- if (!MXF_IS_METADATA_SOURCE_PACKAGE (mux->preface->content_storage->
- packages[i]))
+ if (!MXF_IS_METADATA_SOURCE_PACKAGE (mux->preface->
+ content_storage->packages[i]))
continue;
package =
- MXF_METADATA_SOURCE_PACKAGE (mux->preface->content_storage->
- packages[i]);
+ MXF_METADATA_SOURCE_PACKAGE (mux->preface->
+ content_storage->packages[i]);
if (!package->descriptor)
continue;
@@ -420,13 +420,13 @@ gst_mxf_mux_request_new_pad (GstElement * element,
pad = gst_pad_new_from_template (templ, name);
g_free (name);
cpad = (GstMXFMuxPad *)
- gst_collect_pads_add_pad (mux->collect, pad, sizeof (GstMXFMuxPad), NULL);
+ gst_collect_pads2_add_pad (mux->collect, pad, sizeof (GstMXFMuxPad));
cpad->last_timestamp = 0;
cpad->adapter = gst_adapter_new ();
cpad->writer = writer;
/* FIXME: hacked way to override/extend the event function of
- * GstCollectPads; because it sets its own event function giving the
+ * GstCollectPads2; because it sets its own event function giving the
* element no access to events.
*/
mux->collect_event = (GstPadEventFunction) GST_PAD_EVENTFUNC (pad);
@@ -450,7 +450,7 @@ gst_mxf_mux_release_pad (GstElement * element, GstPad * pad)
g_object_unref (cpad->adapter);
g_free (cpad->mapping_data);
- gst_collect_pads_remove_pad (mux->collect, pad);
+ gst_collect_pads2_remove_pad (mux->collect, pad);
gst_element_remove_pad (element, pad); */
}
@@ -706,8 +706,8 @@ gst_mxf_mux_create_metadata (GstMXFMux * mux)
if (p->parent.n_tracks == 1) {
p->descriptor = (MXFMetadataGenericDescriptor *) cpad->descriptor;
} else {
- MXF_METADATA_MULTIPLE_DESCRIPTOR (p->descriptor)->
- sub_descriptors[n] =
+ MXF_METADATA_MULTIPLE_DESCRIPTOR (p->
+ descriptor)->sub_descriptors[n] =
(MXFMetadataGenericDescriptor *) cpad->descriptor;
}
@@ -1099,9 +1099,8 @@ gst_mxf_mux_handle_buffer (GstMXFMux * mux, GstMXFMuxPad * cpad)
GstBuffer *packet;
GstFlowReturn ret = GST_FLOW_OK;
guint8 slen, ber[9];
- gboolean flush =
- (cpad->collect.abidata.ABI.eos && !cpad->have_complete_edit_unit
- && cpad->collect.buffer == NULL);
+ gboolean flush = ((cpad->collect.state & GST_COLLECT_PADS2_STATE_EOS)
+ && !cpad->have_complete_edit_unit && cpad->collect.buffer == NULL);
if (cpad->have_complete_edit_unit) {
GST_DEBUG_OBJECT (cpad->collect.pad,
@@ -1109,7 +1108,7 @@ gst_mxf_mux_handle_buffer (GstMXFMux * mux, GstMXFMuxPad * cpad)
cpad->source_track->parent.track_id, cpad->pos);
buf = NULL;
} else if (!flush) {
- buf = gst_collect_pads_pop (mux->collect, &cpad->collect);
+ buf = gst_collect_pads2_pop (mux->collect, &cpad->collect);
}
if (buf) {
@@ -1247,22 +1246,23 @@ gst_mxf_mux_handle_eos (GstMXFMux * mux)
/* Update durations */
cpad->source_track->parent.sequence->duration = cpad->pos;
- MXF_METADATA_SOURCE_CLIP (cpad->source_track->parent.sequence->
- structural_components[0])->parent.duration = cpad->pos;
+ MXF_METADATA_SOURCE_CLIP (cpad->source_track->parent.
+ sequence->structural_components[0])->parent.duration = cpad->pos;
for (i = 0; i < mux->preface->content_storage->packages[0]->n_tracks; i++) {
MXFMetadataTimelineTrack *track;
- if (!MXF_IS_METADATA_TIMELINE_TRACK (mux->preface->content_storage->
- packages[0]->tracks[i])
- || !MXF_IS_METADATA_SOURCE_CLIP (mux->preface->content_storage->
- packages[0]->tracks[i]->sequence->structural_components[0]))
+ if (!MXF_IS_METADATA_TIMELINE_TRACK (mux->preface->
+ content_storage->packages[0]->tracks[i])
+ || !MXF_IS_METADATA_SOURCE_CLIP (mux->preface->
+ content_storage->packages[0]->tracks[i]->sequence->
+ structural_components[0]))
continue;
track =
- MXF_METADATA_TIMELINE_TRACK (mux->preface->content_storage->
- packages[0]->tracks[i]);
- if (MXF_METADATA_SOURCE_CLIP (track->parent.sequence->
- structural_components[0])->source_track_id ==
+ MXF_METADATA_TIMELINE_TRACK (mux->preface->
+ content_storage->packages[0]->tracks[i]);
+ if (MXF_METADATA_SOURCE_CLIP (track->parent.
+ sequence->structural_components[0])->source_track_id ==
cpad->source_track->parent.track_id) {
track->parent.sequence->structural_components[0]->duration = cpad->pos;
track->parent.sequence->duration = cpad->pos;
@@ -1273,8 +1273,8 @@ gst_mxf_mux_handle_eos (GstMXFMux * mux)
/* Update timecode track duration */
{
MXFMetadataTimelineTrack *track =
- MXF_METADATA_TIMELINE_TRACK (mux->preface->content_storage->
- packages[0]->tracks[0]);
+ MXF_METADATA_TIMELINE_TRACK (mux->preface->
+ content_storage->packages[0]->tracks[0]);
MXFMetadataSequence *sequence = track->parent.sequence;
MXFMetadataTimecodeComponent *component =
MXF_METADATA_TIMECODE_COMPONENT (sequence->structural_components[0]);
@@ -1369,7 +1369,7 @@ _sort_mux_pads (gconstpointer a, gconstpointer b)
}
static GstFlowReturn
-gst_mxf_mux_collected (GstCollectPads * pads, gpointer user_data)
+gst_mxf_mux_collected (GstCollectPads2 * pads, gpointer user_data)
{
GstMXFMux *mux = GST_MXF_MUX (user_data);
GstMXFMuxPad *best = NULL;
@@ -1424,14 +1424,17 @@ gst_mxf_mux_collected (GstCollectPads * pads, gpointer user_data)
do {
for (sl = mux->collect->data; sl; sl = sl->next) {
+ gboolean pad_eos;
GstMXFMuxPad *cpad = sl->data;
GstClockTime next_gc_timestamp =
gst_util_uint64_scale ((mux->last_gc_position + 1) * GST_SECOND,
mux->min_edit_rate.d, mux->min_edit_rate.n);
- eos &= cpad->collect.abidata.ABI.eos;
+ pad_eos = cpad->collect.state & GST_COLLECT_PADS2_STATE_EOS;
+ if (!pad_eos)
+ eos = FALSE;
- if ((!cpad->collect.abidata.ABI.eos || cpad->have_complete_edit_unit ||
+ if ((!pad_eos || cpad->have_complete_edit_unit ||
gst_adapter_available (cpad->adapter) > 0 || cpad->collect.buffer)
&& cpad->last_timestamp < next_gc_timestamp) {
best = cpad;
@@ -1479,12 +1482,12 @@ gst_mxf_mux_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
- gst_collect_pads_start (mux->collect);
+ gst_collect_pads2_start (mux->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_collect_pads_stop (mux->collect);
+ gst_collect_pads2_stop (mux->collect);
break;
default:
break;
diff --git a/gst/mxf/mxfmux.h b/gst/mxf/mxfmux.h
index 94330c46b..20600ae79 100644
--- a/gst/mxf/mxfmux.h
+++ b/gst/mxf/mxfmux.h
@@ -22,7 +22,7 @@
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/base/gstcollectpads.h>
+#include <gst/base/gstcollectpads2.h>
#include "mxfessence.h"
@@ -41,7 +41,7 @@ G_BEGIN_DECLS
typedef struct
{
- GstCollectData collect;
+ GstCollectData2 collect;
guint64 pos;
GstClockTime last_timestamp;
@@ -71,7 +71,7 @@ typedef struct _GstMXFMux {
GstElement element;
GstPad *srcpad;
- GstCollectPads *collect;
+ GstCollectPads2 *collect;
/* <private> */
GstPadEventFunction collect_event;
diff --git a/gst/videomeasure/gstvideomeasure_ssim.c b/gst/videomeasure/gstvideomeasure_ssim.c
index 9242c002c..3a354bd67 100644
--- a/gst/videomeasure/gstvideomeasure_ssim.c
+++ b/gst/videomeasure/gstvideomeasure_ssim.c
@@ -114,7 +114,7 @@ static void gst_ssim_release_pad (GstElement * element, GstPad * pad);
static GstStateChangeReturn gst_ssim_change_state (GstElement * element,
GstStateChange transition);
-static GstFlowReturn gst_ssim_collected (GstCollectPads * pads,
+static GstFlowReturn gst_ssim_collected (GstCollectPads2 * pads,
gpointer user_data);
static GstElementClass *parent_class = NULL;
@@ -901,7 +901,7 @@ gst_ssim_src_event (GstPad * pad, GstEvent * event)
/* check if we are flushing */
if (flags & GST_SEEK_FLAG_FLUSH) {
/* make sure we accept nothing anymore and return WRONG_STATE */
- gst_collect_pads_set_flushing (ssim->collect, TRUE);
+ gst_collect_pads2_set_flushing (ssim->collect, TRUE);
/* flushing seek, start flush downstream, the flush will be done
* when all pads received a FLUSH_STOP. */
@@ -991,7 +991,7 @@ gst_ssim_sink_event (GstPad * pad, GstEvent * event)
break;
}
- /* now GstCollectPads can take care of the rest, e.g. EOS */
+ /* now GstCollectPads2 can take care of the rest, e.g. EOS */
GST_DEBUG ("Dispatching %s event on pad %s:%s", GST_EVENT_TYPE_NAME (event),
GST_DEBUG_PAD_NAME (pad));
ret = ssim->collect_event (pad, event);
@@ -1158,11 +1158,10 @@ gst_ssim_request_new_pad (GstElement * element, GstPadTemplate * templ,
gst_pad_set_getcaps_function (newpad,
GST_DEBUG_FUNCPTR (gst_ssim_sink_getcaps));
gst_pad_set_setcaps_function (newpad, GST_DEBUG_FUNCPTR (gst_ssim_setcaps));
- gst_collect_pads_add_pad (ssim->collect, newpad, sizeof (GstCollectData),
- NULL);
+ gst_collect_pads2_add_pad (ssim->collect, newpad, sizeof (GstCollectData2));
/* FIXME: hacked way to override/extend the event function of
- * GstCollectPads; because it sets its own event function giving the
+ * GstCollectPads2; because it sets its own event function giving the
* element no access to events
*/
GST_DEBUG_OBJECT (ssim, "Current collect_event is %p, changing to %p",
@@ -1234,7 +1233,7 @@ could_not_add_src:
could_not_add_sink:
{
GST_DEBUG_OBJECT (ssim, "could not add sink pad");
- gst_collect_pads_remove_pad (ssim->collect, newpad);
+ gst_collect_pads2_remove_pad (ssim->collect, newpad);
gst_object_unref (newpad);
return NULL;
}
@@ -1249,7 +1248,7 @@ gst_ssim_release_pad (GstElement * element, GstPad * pad)
GST_DEBUG_OBJECT (ssim, "release pad %s:%s", GST_DEBUG_PAD_NAME (pad));
- gst_collect_pads_remove_pad (ssim->collect, pad);
+ gst_collect_pads2_remove_pad (ssim->collect, pad);
gst_element_remove_pad (element, pad);
}
@@ -1268,8 +1267,8 @@ gst_ssim_init (GstSSim * ssim)
ssim->sinkcaps = NULL;
/* keep track of the sinkpads requested */
- ssim->collect = gst_collect_pads_new ();
- gst_collect_pads_set_function (ssim->collect,
+ ssim->collect = gst_collect_pads2_new ();
+ gst_collect_pads2_set_function (ssim->collect,
GST_DEBUG_FUNCPTR (gst_ssim_collected), ssim);
}
@@ -1408,7 +1407,7 @@ gst_ssim_regenerate_windows (GstSSim * ssim)
}
static GstFlowReturn
-gst_ssim_collected (GstCollectPads * pads, gpointer user_data)
+gst_ssim_collected (GstCollectPads2 * pads, gpointer user_data)
{
GstSSim *ssim;
GSList *collected;
@@ -1441,12 +1440,12 @@ gst_ssim_collected (GstCollectPads * pads, gpointer user_data)
}
for (collected = pads->data; collected; collected = g_slist_next (collected)) {
- GstCollectData *collect_data;
+ GstCollectData2 *collect_data;
GstBuffer *inbuf;
- collect_data = (GstCollectData *) collected->data;
+ collect_data = (GstCollectData2 *) collected->data;
- inbuf = gst_collect_pads_peek (pads, collect_data);
+ inbuf = gst_collect_pads2_peek (pads, collect_data);
if (inbuf == NULL) {
GST_LOG_OBJECT (ssim, "channel %p: no bytes available", collect_data);
@@ -1470,12 +1469,12 @@ gst_ssim_collected (GstCollectPads * pads, gpointer user_data)
for (collected = pads->data; collected;
collected = g_slist_next (collected)) {
- GstCollectData *collect_data;
+ GstCollectData2 *collect_data;
- collect_data = (GstCollectData *) collected->data;
+ collect_data = (GstCollectData2 *) collected->data;
if (collect_data->pad == ssim->orig) {
- orgbuf = gst_collect_pads_pop (pads, collect_data);;
+ orgbuf = gst_collect_pads2_pop (pads, collect_data);;
GST_DEBUG_OBJECT (ssim, "Original stream - flags(0x%x), timestamp(%"
GST_TIME_FORMAT "), duration(%" GST_TIME_FORMAT ")",
@@ -1492,14 +1491,14 @@ gst_ssim_collected (GstCollectPads * pads, gpointer user_data)
GST_LOG_OBJECT (ssim, "starting to cycle through streams");
for (collected = pads->data; collected; collected = g_slist_next (collected)) {
- GstCollectData *collect_data;
+ GstCollectData2 *collect_data;
GstBuffer *inbuf;
guint8 *indata;
- collect_data = (GstCollectData *) collected->data;
+ collect_data = (GstCollectData2 *) collected->data;
if (collect_data->pad != ssim->orig) {
- inbuf = gst_collect_pads_pop (pads, collect_data);
+ inbuf = gst_collect_pads2_pop (pads, collect_data);
indata = GST_BUFFER_DATA (inbuf);
@@ -1660,7 +1659,7 @@ gst_ssim_change_state (GstElement * element, GstStateChange transition)
ssim->segment_position = 0;
ssim->segment_rate = 1.0;
gst_segment_init (&ssim->segment, GST_FORMAT_UNDEFINED);
- gst_collect_pads_start (ssim->collect);
+ gst_collect_pads2_start (ssim->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
@@ -1668,7 +1667,7 @@ gst_ssim_change_state (GstElement * element, GstStateChange transition)
/* need to unblock the collectpads before calling the
* parent change_state so that streaming can finish
*/
- gst_collect_pads_stop (ssim->collect);
+ gst_collect_pads2_stop (ssim->collect);
break;
default:
break;
diff --git a/gst/videomeasure/gstvideomeasure_ssim.h b/gst/videomeasure/gstvideomeasure_ssim.h
index 57e0907fb..2760d032a 100644
--- a/gst/videomeasure/gstvideomeasure_ssim.h
+++ b/gst/videomeasure/gstvideomeasure_ssim.h
@@ -21,7 +21,7 @@
#define __GST_SSIM_H__
#include <gst/gst.h>
-#include <gst/base/gstcollectpads.h>
+#include <gst/base/gstcollectpads2.h>
#include <gst/video/video.h>
G_BEGIN_DECLS
@@ -85,7 +85,7 @@ struct _GstSSim {
gint padcount;
- GstCollectPads *collect;
+ GstCollectPads2 *collect;
GstPad *orig;
gint frame_rate;
diff --git a/gst/videoparsers/Makefile.am b/gst/videoparsers/Makefile.am
index ac2a51711..fb5497368 100644
--- a/gst/videoparsers/Makefile.am
+++ b/gst/videoparsers/Makefile.am
@@ -3,21 +3,24 @@ plugin_LTLIBRARIES = libgstvideoparsersbad.la
libgstvideoparsersbad_la_SOURCES = plugin.c \
h263parse.c gsth263parse.c \
gstdiracparse.c dirac_parse.c \
- gsth264parse.c gstmpegvideoparse.c
+ gsth264parse.c gstmpegvideoparse.c \
+ gstmpeg4videoparse.c
libgstvideoparsersbad_la_CFLAGS = \
$(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) \
-DGST_USE_UNSTABLE_API \
$(GST_BASE_CFLAGS) $(GST_CFLAGS)
-libgstvideoparsersbad_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
+libgstvideoparsersbad_la_LIBADD = \
$(top_builddir)/gst-libs/gst/codecparsers/libgstcodecparsers-$(GST_MAJORMINOR).la \
+ $(GST_PLUGINS_BASE_LIBS) -lgstpbutils-$(GST_MAJORMINOR) -lgstvideo-$(GST_MAJORMINOR) \
$(GST_BASE_LIBS) $(GST_LIBS)
libgstvideoparsersbad_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvideoparsersbad_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = gsth263parse.h h263parse.h \
gstdiracparse.h dirac_parse.h \
- gsth264parse.h gstmpegvideoparse.h
+ gsth264parse.h gstmpegvideoparse.h \
+ gstmpeg4videoparse.h
Android.mk: Makefile.am $(BUILT_SOURCES)
androgenizer \
diff --git a/gst/videoparsers/gsth264parse.c b/gst/videoparsers/gsth264parse.c
index ab6216800..90916a21e 100644
--- a/gst/videoparsers/gsth264parse.c
+++ b/gst/videoparsers/gsth264parse.c
@@ -29,6 +29,7 @@
#include <gst/base/gstbytereader.h>
#include <gst/base/gstbytewriter.h>
#include <gst/base/gstadapter.h>
+#include <gst/video/video.h>
#include "gsth264parse.h"
#include <string.h>
@@ -95,6 +96,9 @@ static GstCaps *gst_h264_parse_get_caps (GstBaseParse * parse,
GstCaps * filter);
static GstFlowReturn gst_h264_parse_chain (GstPad * pad, GstObject * parent,
GstBuffer * buffer);
+static gboolean gst_h264_parse_event (GstBaseParse * parse, GstEvent * event);
+static gboolean gst_h264_parse_src_event (GstBaseParse * parse,
+ GstEvent * event);
static void
gst_h264_parse_class_init (GstH264ParseClass * klass)
@@ -127,6 +131,8 @@ gst_h264_parse_class_init (GstH264ParseClass * klass)
GST_DEBUG_FUNCPTR (gst_h264_parse_pre_push_frame);
parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_set_caps);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_get_caps);
+ parse_class->event = GST_DEBUG_FUNCPTR (gst_h264_parse_event);
+ parse_class->src_event = GST_DEBUG_FUNCPTR (gst_h264_parse_src_event);
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&srctemplate));
@@ -190,6 +196,11 @@ gst_h264_parse_reset (GstH264Parse * h264parse)
h264parse->height = 0;
h264parse->fps_num = 0;
h264parse->fps_den = 0;
+ h264parse->aspect_ratio_idc = 0;
+ h264parse->sar_width = 0;
+ h264parse->sar_height = 0;
+ h264parse->upstream_par_n = -1;
+ h264parse->upstream_par_d = -1;
gst_buffer_replace (&h264parse->codec_data, NULL);
h264parse->nal_length_size = 4;
h264parse->packetized = FALSE;
@@ -199,6 +210,11 @@ gst_h264_parse_reset (GstH264Parse * h264parse)
h264parse->last_report = GST_CLOCK_TIME_NONE;
h264parse->push_codec = FALSE;
+ h264parse->have_pps = FALSE;
+ h264parse->have_sps = FALSE;
+
+ h264parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
+ h264parse->force_key_unit_event = NULL;
gst_h264_parse_reset_frame (h264parse);
}
@@ -399,15 +415,11 @@ static void
gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
{
guint nal_type;
- GstH264SliceHdr slice;
GstH264PPS pps;
GstH264SPS sps;
GstH264SEIMessage sei;
-
- gboolean slcparsed = FALSE;
GstH264NalParser *nalparser = h264parse->nalparser;
-
/* nothing to do for broken input */
if (G_UNLIKELY (nalu->size < 2)) {
GST_DEBUG_OBJECT (h264parse, "not processing nal size %u", nalu->size);
@@ -427,8 +439,15 @@ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
GST_DEBUG_OBJECT (h264parse, "triggering src caps check");
h264parse->update_caps = TRUE;
- /* found in stream, no need to forcibly push at start */
- h264parse->push_codec = FALSE;
+ h264parse->have_sps = TRUE;
+ if (h264parse->push_codec && h264parse->have_pps) {
+ /* SPS and PPS found in stream before the first pre_push_frame, no need
+ * to forcibly push at start */
+ GST_INFO_OBJECT (h264parse, "have SPS/PPS in stream");
+ h264parse->push_codec = FALSE;
+ h264parse->have_sps = FALSE;
+ h264parse->have_pps = FALSE;
+ }
gst_h264_parser_store_nal (h264parse, sps.id, nal_type, nalu);
break;
@@ -437,8 +456,15 @@ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
/* parameters might have changed, force caps check */
GST_DEBUG_OBJECT (h264parse, "triggering src caps check");
h264parse->update_caps = TRUE;
- /* found in stream, no need to forcibly push at start */
- h264parse->push_codec = FALSE;
+ h264parse->have_pps = TRUE;
+ if (h264parse->push_codec && h264parse->have_sps) {
+ /* SPS and PPS found in stream before the first pre_push_frame, no need
+ * to forcibly push at start */
+ GST_INFO_OBJECT (h264parse, "have SPS/PPS in stream");
+ h264parse->push_codec = FALSE;
+ h264parse->have_sps = FALSE;
+ h264parse->have_pps = FALSE;
+ }
gst_h264_parser_store_nal (h264parse, pps.id, nal_type, nalu);
break;
@@ -470,32 +496,33 @@ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
case GST_H264_NAL_SLICE_DPA:
case GST_H264_NAL_SLICE_DPB:
case GST_H264_NAL_SLICE_DPC:
- slcparsed = TRUE;
- if (gst_h264_parser_parse_slice_hdr (nalparser, nalu,
- &slice, FALSE, FALSE) == GST_H264_PARSER_ERROR)
- return;
-
- /* real frame data */
- h264parse->frame_start |= (slice.first_mb_in_slice == 0);
+ case GST_H264_NAL_SLICE_IDR:
+ /* don't need to parse the whole slice (header) here */
+ if (*(nalu->data + nalu->offset + 1) & 0x80) {
+ /* means first_mb_in_slice == 0 */
+ /* real frame data */
+ GST_DEBUG_OBJECT (h264parse, "first_mb_in_slice = 0");
+ h264parse->frame_start = TRUE;
+ }
+ GST_DEBUG_OBJECT (h264parse, "frame start: %i", h264parse->frame_start);
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ GstH264SliceHdr slice;
+ GstH264ParserResult pres;
+
+ pres = gst_h264_parser_parse_slice_hdr (nalparser, nalu, &slice,
+ FALSE, FALSE);
+ GST_DEBUG_OBJECT (h264parse,
+ "parse result %d, first MB: %u, slice type: %u",
+ pres, slice.first_mb_in_slice, slice.type);
+ }
+#endif
+ if (G_LIKELY (nal_type != GST_H264_NAL_SLICE_IDR &&
+ !h264parse->push_codec))
+ break;
/* if we need to sneak codec NALs into the stream,
* this is a good place, so fake it as IDR
* (which should be at start anyway) */
- GST_DEBUG_OBJECT (h264parse, "frame start: %i first_mb_in_slice %i",
- h264parse->frame_start, slice.first_mb_in_slice);
- if (G_LIKELY (!h264parse->push_codec))
- break;
- /* fall-through */
- case GST_H264_NAL_SLICE_IDR:
- if (!slcparsed) {
- if (gst_h264_parser_parse_slice_hdr (nalparser, nalu,
- &slice, FALSE, FALSE) == GST_H264_PARSER_ERROR)
- return;
- GST_DEBUG_OBJECT (h264parse, "frame start: %i first_mb_in_slice %i",
- h264parse->frame_start, slice.first_mb_in_slice);
- }
- /* real frame data */
- h264parse->frame_start |= (slice.first_mb_in_slice == 0);
-
/* mark where config needs to go if interval expired */
/* mind replacement buffer if applicable */
if (h264parse->idr_pos == -1) {
@@ -506,9 +533,6 @@ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
GST_DEBUG_OBJECT (h264parse, "marking IDR in frame at offset %d",
h264parse->idr_pos);
}
-
- GST_DEBUG_OBJECT (h264parse, "first MB: %u, slice type: %u",
- slice.first_mb_in_slice, slice.type);
break;
default:
gst_h264_parser_parse_nal (nalparser, nalu);
@@ -622,8 +646,18 @@ gst_h264_parse_check_valid_frame (GstBaseParse * parse,
GST_DEBUG_OBJECT (h264parse, "last parse position %u", current_off);
while (TRUE) {
- switch (gst_h264_parser_identify_nalu (nalparser, data, current_off,
- size, &nalu)) {
+ GstH264ParserResult pres;
+
+ if (h264parse->packetized)
+ pres =
+ gst_h264_parser_identify_nalu_unchecked (nalparser, data, current_off,
+ size, &nalu);
+ else
+ pres =
+ gst_h264_parser_identify_nalu (nalparser, data, current_off, size,
+ &nalu);
+
+ switch (pres) {
case GST_H264_PARSER_OK:
GST_DEBUG_OBJECT (h264parse, "complete nal found. "
"current offset: %u, Nal offset: %u, Nal Size: %u",
@@ -631,10 +665,12 @@ gst_h264_parse_check_valid_frame (GstBaseParse * parse,
GST_DEBUG_OBJECT (h264parse, "current off. %u",
nalu.offset + nalu.size);
+
if (!h264parse->nalu.size && !h264parse->nalu.valid)
h264parse->nalu = nalu;
+
/* need 2 bytes of next nal */
- if (nalu.offset + nalu.size + 4 + 2 > size) {
+ if (!h264parse->packetized && (nalu.offset + nalu.size + 4 + 2 > size)) {
if (GST_BASE_PARSE_DRAINING (parse)) {
drain = TRUE;
} else {
@@ -704,6 +740,12 @@ gst_h264_parse_check_valid_frame (GstBaseParse * parse,
/* if no next nal, we know it's complete here */
if (drain || gst_h264_parse_collect_nal (h264parse, data, size, &nalu))
break;
+
+ /* In packetized mode we know there's only on NALU in each input packet */
+ if (h264parse->packetized)
+ break;
+
+ GST_DEBUG_OBJECT (h264parse, "Looking for more");
}
end:
@@ -716,8 +758,8 @@ end:
parsing_error:
GST_DEBUG_OBJECT (h264parse, "error parsing Nal Unit");
-more:
+more:
/* ask for best next available */
*framesize = G_MAXUINT;
if (!h264parse->nalu.size) {
@@ -824,6 +866,98 @@ gst_h264_parse_make_codec_data (GstH264Parse * h264parse)
}
static void
+gst_h264_parse_get_par (GstH264Parse * h264parse, gint * num, gint * den)
+{
+ gint par_n, par_d;
+
+ if (h264parse->upstream_par_n != -1 && h264parse->upstream_par_d != -1) {
+ *num = h264parse->upstream_par_n;
+ *den = h264parse->upstream_par_d;
+ return;
+ }
+
+ par_n = par_d = 0;
+ switch (h264parse->aspect_ratio_idc) {
+ case 0:
+ par_n = par_d = 0;
+ break;
+ case 1:
+ par_n = 1;
+ par_d = 1;
+ break;
+ case 2:
+ par_n = 12;
+ par_d = 11;
+ break;
+ case 3:
+ par_n = 10;
+ par_d = 11;
+ break;
+ case 4:
+ par_n = 16;
+ par_d = 11;
+ break;
+ case 5:
+ par_n = 40;
+ par_d = 33;
+ break;
+ case 6:
+ par_n = 24;
+ par_d = 11;
+ break;
+ case 7:
+ par_n = 20;
+ par_d = 11;
+ break;
+ case 8:
+ par_n = 32;
+ par_d = 11;
+ break;
+ case 9:
+ par_n = 80;
+ par_d = 33;
+ break;
+ case 10:
+ par_n = 18;
+ par_d = 11;
+ break;
+ case 11:
+ par_n = 15;
+ par_d = 11;
+ break;
+ case 12:
+ par_n = 64;
+ par_d = 33;
+ break;
+ case 13:
+ par_n = 160;
+ par_d = 99;
+ break;
+ case 14:
+ par_n = 4;
+ par_d = 3;
+ break;
+ case 15:
+ par_n = 3;
+ par_d = 2;
+ break;
+ case 16:
+ par_n = 2;
+ par_d = 1;
+ break;
+ case 255:
+ par_n = h264parse->sar_width;
+ par_d = h264parse->sar_height;
+ break;
+ default:
+ par_n = par_d = 0;
+ }
+
+ *num = par_n;
+ *den = par_d;
+}
+
+static void
gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps)
{
GstH264SPS *sps;
@@ -875,33 +1009,80 @@ gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps)
caps = NULL;
if (G_UNLIKELY (!sps)) {
caps = gst_caps_copy (sink_caps);
- } else if (G_UNLIKELY (h264parse->width != sps->width ||
- h264parse->height != sps->height || h264parse->fps_num != sps->fps_num
- || h264parse->fps_den != sps->fps_den || modified)) {
- caps = gst_caps_copy (sink_caps);
- /* sps should give this */
- gst_caps_set_simple (caps, "width", G_TYPE_INT, sps->width,
- "height", G_TYPE_INT, sps->height, NULL);
- h264parse->height = sps->height;
- h264parse->width = sps->width;
- /* but not necessarily or reliably this */
- if ((!h264parse->fps_num || !h264parse->fps_den) &&
- sps->fps_num > 0 && sps->fps_den > 0) {
- gst_caps_set_simple (caps, "framerate",
- GST_TYPE_FRACTION, sps->fps_num, sps->fps_den, NULL);
- h264parse->fps_num = sps->fps_num;
- h264parse->fps_den = sps->fps_den;
- gst_base_parse_set_frame_rate (GST_BASE_PARSE (h264parse),
- h264parse->fps_num, h264parse->fps_den, 0, 0);
+ } else {
+ if (G_UNLIKELY (h264parse->width != sps->width ||
+ h264parse->height != sps->height)) {
+ GST_INFO_OBJECT (h264parse, "resolution changed %dx%d",
+ sps->width, sps->height);
+ h264parse->width = sps->width;
+ h264parse->height = sps->height;
+ modified = TRUE;
+ }
+
+ /* 0/1 is set as the default in the codec parser */
+ if (sps->vui_parameters.timing_info_present_flag &&
+ !(sps->fps_num == 0 && sps->fps_den == 1)) {
+ if (G_UNLIKELY (h264parse->fps_num != sps->fps_num
+ || h264parse->fps_den != sps->fps_den)) {
+ GST_INFO_OBJECT (h264parse, "framerate changed %d/%d",
+ sps->fps_num, sps->fps_den);
+ h264parse->fps_num = sps->fps_num;
+ h264parse->fps_den = sps->fps_den;
+ gst_base_parse_set_frame_rate (GST_BASE_PARSE (h264parse),
+ h264parse->fps_num, h264parse->fps_den, 0, 0);
+ modified = TRUE;
+ }
+ }
+
+ if (sps->vui_parameters.aspect_ratio_info_present_flag) {
+ if (G_UNLIKELY (h264parse->aspect_ratio_idc !=
+ sps->vui_parameters.aspect_ratio_idc)) {
+ h264parse->aspect_ratio_idc = sps->vui_parameters.aspect_ratio_idc;
+ GST_INFO_OBJECT (h264parse, "aspect ratio idc changed %d",
+ h264parse->aspect_ratio_idc);
+ modified = TRUE;
+ }
+
+ /* 255 means sar_width and sar_height present */
+ if (G_UNLIKELY (sps->vui_parameters.aspect_ratio_idc == 255 &&
+ (h264parse->sar_width != sps->vui_parameters.sar_width ||
+ h264parse->sar_height != sps->vui_parameters.sar_height))) {
+ h264parse->sar_width = sps->vui_parameters.sar_width;
+ h264parse->sar_height = sps->vui_parameters.sar_height;
+ GST_INFO_OBJECT (h264parse, "aspect ratio SAR changed %d/%d",
+ h264parse->sar_width, h264parse->sar_height);
+ modified = TRUE;
+ }
+ }
+
+ if (G_UNLIKELY (modified)) {
+ caps = gst_caps_copy (sink_caps);
+ /* sps should give this */
+ gst_caps_set_simple (caps, "width", G_TYPE_INT, sps->width,
+ "height", G_TYPE_INT, sps->height, NULL);
+ /* but not necessarily or reliably this */
+ if (h264parse->fps_num > 0 && h264parse->fps_den > 0)
+ gst_caps_set_simple (caps, "framerate",
+ GST_TYPE_FRACTION, sps->fps_num, sps->fps_den, NULL);
}
}
if (caps) {
+ gint par_n, par_d;
+
gst_caps_set_simple (caps, "parsed", G_TYPE_BOOLEAN, TRUE,
"stream-format", G_TYPE_STRING,
gst_h264_parse_get_string (h264parse, TRUE, h264parse->format),
"alignment", G_TYPE_STRING,
gst_h264_parse_get_string (h264parse, FALSE, h264parse->align), NULL);
+
+ gst_h264_parse_get_par (h264parse, &par_n, &par_d);
+ if (par_n != 0 && par_d != 0) {
+ GST_INFO_OBJECT (h264parse, "PAR %d/%d", par_n, par_d);
+ gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ par_n, par_d, NULL);
+ }
+
if (buf) {
gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_replace (&h264parse->codec_data, buf);
@@ -1104,15 +1285,109 @@ gst_h264_parse_push_codec_buffer (GstH264Parse * h264parse, GstBuffer * nal,
return gst_pad_push (GST_BASE_PARSE_SRC_PAD (h264parse), nal);
}
+static GstEvent *
+check_pending_key_unit_event (GstEvent * pending_event, GstSegment * segment,
+ GstClockTime timestamp, guint flags, GstClockTime pending_key_unit_ts)
+{
+ GstClockTime running_time, stream_time;
+ gboolean all_headers;
+ guint count;
+ GstEvent *event = NULL;
+
+ g_return_val_if_fail (segment != NULL, NULL);
+
+ if (pending_event == NULL)
+ goto out;
+
+ if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
+ timestamp == GST_CLOCK_TIME_NONE)
+ goto out;
+
+ running_time = gst_segment_to_running_time (segment,
+ GST_FORMAT_TIME, timestamp);
+
+ GST_INFO ("now %" GST_TIME_FORMAT " wanted %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time), GST_TIME_ARGS (pending_key_unit_ts));
+ if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
+ running_time < pending_key_unit_ts)
+ goto out;
+
+ if (flags & GST_BUFFER_FLAG_DELTA_UNIT) {
+ GST_DEBUG ("pending force key unit, waiting for keyframe");
+ goto out;
+ }
+
+ stream_time = gst_segment_to_stream_time (segment,
+ GST_FORMAT_TIME, timestamp);
+
+ gst_video_event_parse_upstream_force_key_unit (pending_event,
+ NULL, &all_headers, &count);
+
+ event =
+ gst_video_event_new_downstream_force_key_unit (timestamp, stream_time,
+ running_time, all_headers, count);
+ gst_event_set_seqnum (event, gst_event_get_seqnum (pending_event));
+
+out:
+ return event;
+}
+
+static void
+gst_h264_parse_prepare_key_unit (GstH264Parse * parse, GstEvent * event)
+{
+ GstClockTime running_time;
+ guint count;
+ gboolean have_sps, have_pps;
+ gint i;
+
+ parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
+ gst_event_replace (&parse->force_key_unit_event, NULL);
+
+ gst_video_event_parse_downstream_force_key_unit (event,
+ NULL, NULL, &running_time, NULL, &count);
+
+ GST_INFO_OBJECT (parse, "pushing downstream force-key-unit event %d "
+ "%" GST_TIME_FORMAT " count %d", gst_event_get_seqnum (event),
+ GST_TIME_ARGS (running_time), count);
+ gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (parse), event);
+
+ have_sps = have_pps = FALSE;
+ for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
+ if (parse->sps_nals[i] != NULL) {
+ have_sps = TRUE;
+ break;
+ }
+ }
+ for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
+ if (parse->pps_nals[i] != NULL) {
+ have_pps = TRUE;
+ break;
+ }
+ }
+
+ GST_INFO_OBJECT (parse, "preparing key unit, have sps %d have pps %d",
+ have_sps, have_pps);
+
+ /* set push_codec to TRUE so that pre_push_frame sends SPS/PPS again */
+ parse->push_codec = TRUE;
+}
+
static GstFlowReturn
gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
GstH264Parse *h264parse;
GstBuffer *buffer;
+ GstEvent *event;
h264parse = GST_H264_PARSE (parse);
buffer = frame->buffer;
+ if ((event = check_pending_key_unit_event (h264parse->force_key_unit_event,
+ &parse->segment, GST_BUFFER_TIMESTAMP (buffer),
+ GST_BUFFER_FLAGS (buffer), h264parse->pending_key_unit_ts))) {
+ gst_h264_parse_prepare_key_unit (h264parse, event);
+ }
+
/* periodic SPS/PPS sending */
if (h264parse->interval > 0 || h264parse->push_codec) {
GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
@@ -1207,6 +1482,8 @@ gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
}
/* we pushed whatever we had */
h264parse->push_codec = FALSE;
+ h264parse->have_sps = FALSE;
+ h264parse->have_pps = FALSE;
}
}
@@ -1239,6 +1516,8 @@ gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps)
gst_structure_get_int (str, "height", &h264parse->height);
gst_structure_get_fraction (str, "framerate", &h264parse->fps_num,
&h264parse->fps_den);
+ gst_structure_get_fraction (str, "pixel-aspect-ratio",
+ &h264parse->upstream_par_n, &h264parse->upstream_par_d);
/* get upstream format and align from caps */
gst_h264_parse_format_from_caps (caps, &format, &align);
@@ -1348,6 +1627,8 @@ gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps)
/* arrange to insert codec-data in-stream if needed.
* src caps are only arranged for later on */
h264parse->push_codec = TRUE;
+ h264parse->have_sps = FALSE;
+ h264parse->have_pps = FALSE;
h264parse->split_packetized = TRUE;
h264parse->packetized = TRUE;
}
@@ -1416,6 +1697,87 @@ gst_h264_parse_get_caps (GstBaseParse * parse, GstCaps * filter)
return res;
}
+static gboolean
+gst_h264_parse_event (GstBaseParse * parse, GstEvent * event)
+{
+ gboolean handled = FALSE;
+ GstH264Parse *h264parse = GST_H264_PARSE (parse);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ {
+ GstClockTime timestamp, stream_time, running_time;
+ gboolean all_headers;
+ guint count;
+
+ if (!gst_video_event_is_force_key_unit (event))
+ break;
+
+ gst_video_event_parse_downstream_force_key_unit (event,
+ &timestamp, &stream_time, &running_time, &all_headers, &count);
+
+ GST_INFO_OBJECT (h264parse, "received downstream force key unit event, "
+ "seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d",
+ gst_event_get_seqnum (event), GST_TIME_ARGS (running_time),
+ all_headers, count);
+ handled = TRUE;
+
+ if (h264parse->force_key_unit_event) {
+ GST_INFO_OBJECT (h264parse, "ignoring force key unit event "
+ "as one is already queued");
+ break;
+ }
+
+ h264parse->pending_key_unit_ts = running_time;
+ gst_event_replace (&h264parse->force_key_unit_event, event);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return handled;
+}
+
+static gboolean
+gst_h264_parse_src_event (GstBaseParse * parse, GstEvent * event)
+{
+ gboolean handled = FALSE;
+ GstH264Parse *h264parse = GST_H264_PARSE (parse);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ {
+ GstClockTime running_time;
+ gboolean all_headers;
+ guint count;
+
+ if (!gst_video_event_is_force_key_unit (event))
+ break;
+
+ gst_video_event_parse_upstream_force_key_unit (event,
+ &running_time, &all_headers, &count);
+
+ GST_INFO_OBJECT (h264parse, "received upstream force-key-unit event, "
+ "seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d",
+ gst_event_get_seqnum (event), GST_TIME_ARGS (running_time),
+ all_headers, count);
+
+ if (!all_headers)
+ break;
+
+ h264parse->pending_key_unit_ts = running_time;
+ gst_event_replace (&h264parse->force_key_unit_event, event);
+ /* leave handled = FALSE so that the event gets propagated upstream */
+ break;
+ }
+ default:
+ break;
+ }
+
+ return handled;
+}
+
static GstFlowReturn
gst_h264_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
diff --git a/gst/videoparsers/gsth264parse.h b/gst/videoparsers/gsth264parse.h
index e013a3fac..4800092b3 100644
--- a/gst/videoparsers/gsth264parse.h
+++ b/gst/videoparsers/gsth264parse.h
@@ -58,6 +58,9 @@ struct _GstH264Parse
/* stream */
gint width, height;
gint fps_num, fps_den;
+ gint aspect_ratio_idc;
+ gint sar_width, sar_height;
+ gint upstream_par_n, upstream_par_d;
GstBuffer *codec_data;
guint nal_length_size;
gboolean packetized;
@@ -71,6 +74,8 @@ struct _GstH264Parse
GstClockTime last_report;
gboolean push_codec;
+ gboolean have_sps;
+ gboolean have_pps;
/* collected SPS and PPS NALUs */
GstBuffer *sps_nals[GST_H264_MAX_SPS_COUNT];
@@ -102,6 +107,9 @@ struct _GstH264Parse
/* props */
gboolean split_packetized;
guint interval;
+
+ GstClockTime pending_key_unit_ts;
+ GstEvent *force_key_unit_event;
};
struct _GstH264ParseClass
diff --git a/gst/mpeg4videoparse/mpeg4videoparse.c b/gst/videoparsers/gstmpeg4videoparse.c
index 5440ac291..53f97debe 100644
--- a/gst/mpeg4videoparse/mpeg4videoparse.c
+++ b/gst/videoparsers/gstmpeg4videoparse.c
@@ -3,8 +3,10 @@
* @author Sjoerd Simons <sjoerd@luon.net>
* Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
* Copyright (C) <2011> Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
- * Copyright (C) <2011> Collabora Multimedia
* Copyright (C) <2011> Nokia Corporation
+ * Copyright (C) <2011> Intel
+ * Copyright (C) <2011> Collabora Ltd.
+ * Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -28,7 +30,10 @@
#include <string.h>
#include <gst/base/gstbytereader.h>
-#include "mpeg4videoparse.h"
+#include <gst/pbutils/codec-utils.h>
+#include <gst/video/video.h>
+
+#include "gstmpeg4videoparse.h"
GST_DEBUG_CATEGORY (mpeg4v_parse_debug);
#define GST_CAT_DEFAULT mpeg4v_parse_debug
@@ -49,7 +54,7 @@ GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK,
);
/* Properties */
-#define DEFAULT_PROP_DROP TRUE
+#define DEFAULT_PROP_DROP TRUE
#define DEFAULT_CONFIG_INTERVAL (0)
enum
@@ -79,12 +84,15 @@ static void gst_mpeg4vparse_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_mpeg4vparse_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
+static gboolean gst_mpeg4vparse_event (GstBaseParse * parse, GstEvent * event);
+static gboolean gst_mpeg4vparse_src_event (GstBaseParse * parse,
+ GstEvent * event);
static void
gst_mpeg4vparse_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
- GstMpeg4VParse *parse = GST_MPEG4VIDEOPARSE (object);
+ GstMpeg4VParse *parse = GST_MPEG4VIDEO_PARSE (object);
switch (property_id) {
case PROP_DROP:
@@ -102,7 +110,7 @@ static void
gst_mpeg4vparse_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
- GstMpeg4VParse *parse = GST_MPEG4VIDEOPARSE (object);
+ GstMpeg4VParse *parse = GST_MPEG4VIDEO_PARSE (object);
switch (property_id) {
case PROP_DROP:
@@ -120,8 +128,8 @@ static void
gst_mpeg4vparse_class_init (GstMpeg4VParseClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
parent_class = g_type_class_peek_parent (klass);
@@ -142,16 +150,19 @@ gst_mpeg4vparse_class_init (GstMpeg4VParseClass * klass)
0, 3600, DEFAULT_CONFIG_INTERVAL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- gst_element_class_add_pad_template (gstelement_class,
+ gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
- gst_element_class_add_pad_template (gstelement_class,
+ gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
- gst_element_class_set_details_simple (gstelement_class,
+ gst_element_class_set_details_simple (element_class,
"MPEG 4 video elementary stream parser", "Codec/Parser/Video",
"Parses MPEG-4 Part 2 elementary video streams",
"Julien Moutte <julien@fluendo.com>");
+ GST_DEBUG_CATEGORY_INIT (mpeg4v_parse_debug, "mpeg4videoparse", 0,
+ "MPEG-4 video parser");
+
/* Override BaseParse vfuncs */
parse_class->start = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_stop);
@@ -162,6 +173,8 @@ gst_mpeg4vparse_class_init (GstMpeg4VParseClass * klass)
GST_DEBUG_FUNCPTR (gst_mpeg4vparse_pre_push_frame);
parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_set_caps);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_get_caps);
+ parse_class->event = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_event);
+ parse_class->src_event = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_src_event);
}
static void
@@ -177,25 +190,28 @@ gst_mpeg4vparse_reset_frame (GstMpeg4VParse * mp4vparse)
/* done parsing; reset state */
mp4vparse->last_sc = -1;
mp4vparse->vop_offset = -1;
- mp4vparse->vos_offset = -1;
- mp4vparse->vo_offset = -1;
+ mp4vparse->vo_found = FALSE;
+ mp4vparse->vol_offset = -1;
}
static void
gst_mpeg4vparse_reset (GstMpeg4VParse * mp4vparse)
{
gst_mpeg4vparse_reset_frame (mp4vparse);
- mp4vparse->profile = 0;
mp4vparse->update_caps = TRUE;
+ mp4vparse->profile = NULL;
+ mp4vparse->level = NULL;
+ mp4vparse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
+ mp4vparse->force_key_unit_event = NULL;
gst_buffer_replace (&mp4vparse->config, NULL);
- memset (&mp4vparse->params, 0, sizeof (mp4vparse->params));
+ memset (&mp4vparse->vol, 0, sizeof (mp4vparse->vol));
}
static gboolean
gst_mpeg4vparse_start (GstBaseParse * parse)
{
- GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
+ GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
GST_DEBUG_OBJECT (parse, "start");
@@ -209,7 +225,7 @@ gst_mpeg4vparse_start (GstBaseParse * parse)
static gboolean
gst_mpeg4vparse_stop (GstBaseParse * parse)
{
- GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
+ GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
GST_DEBUG_OBJECT (parse, "stop");
@@ -219,29 +235,41 @@ gst_mpeg4vparse_stop (GstBaseParse * parse)
}
static gboolean
-gst_mpeg4vparse_process_config (GstMpeg4VParse * mp4vparse, const guint8 * data,
- gsize size)
+gst_mpeg4vparse_process_config (GstMpeg4VParse * mp4vparse,
+ const guint8 * data, guint offset, gsize size)
{
/* only do stuff if something new */
- if (mp4vparse->config && size == gst_buffer_get_size (mp4vparse->config) &&
- gst_buffer_memcmp (mp4vparse->config, 0, data, size) == 0)
+ if (!gst_buffer_memcmp (mp4vparse->config, offset, data, size))
return TRUE;
- if (!gst_mpeg4_params_parse_config (&mp4vparse->params, data, size)) {
- GST_DEBUG_OBJECT (mp4vparse, "failed to parse config data (size %"
- G_GSSIZE_FORMAT ")", size);
+ if (mp4vparse->vol_offset < 0) {
+ GST_WARNING ("No video object Layer parsed in this frame, cannot accept "
+ "config");
return FALSE;
}
+ /* If the parsing fail, we accept the config only if we don't have
+ * any config yet. */
+ if (gst_mpeg4_parse_video_object_layer (&mp4vparse->vol,
+ NULL, data + mp4vparse->vol_offset,
+ size - mp4vparse->vol_offset) != GST_MPEG4_PARSER_OK &&
+ mp4vparse->config)
+ return FALSE;
+
+ GST_LOG_OBJECT (mp4vparse, "Width/Height: %u/%u, "
+ "time increment resolution: %u fixed time increment: %u",
+ mp4vparse->vol.width, mp4vparse->vol.height,
+ mp4vparse->vol.vop_time_increment_resolution,
+ mp4vparse->vol.fixed_vop_time_increment);
+
+
GST_LOG_OBJECT (mp4vparse, "accepting parsed config size %" G_GSSIZE_FORMAT,
size);
- /* parsing ok, so accept it as new config */
if (mp4vparse->config != NULL)
gst_buffer_unref (mp4vparse->config);
- mp4vparse->config = gst_buffer_new_and_alloc (size);
- gst_buffer_fill (mp4vparse->config, 0, data, size);
+ mp4vparse->config = gst_buffer_new_wrapped (g_memdup (data, size), size);
/* trigger src caps update */
mp4vparse->update_caps = TRUE;
@@ -251,74 +279,80 @@ gst_mpeg4vparse_process_config (GstMpeg4VParse * mp4vparse, const guint8 * data,
/* caller guarantees at least start code in @buf at @off */
static gboolean
-gst_mpeg4vparse_process_sc (GstMpeg4VParse * mp4vparse, GstBuffer * buf,
- gint off)
+gst_mpeg4vparse_process_sc (GstMpeg4VParse * mp4vparse, GstMpeg4Packet * packet,
+ gsize size)
{
- guint8 *data;
- gsize size;
- guint code;
- g_return_val_if_fail (buf && gst_buffer_get_size (buf) >= off + 4, FALSE);
-
- data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
- code = data[off + 3];
-
- GST_LOG_OBJECT (mp4vparse, "process startcode %x", code);
+ GST_LOG_OBJECT (mp4vparse, "process startcode %x", packet->type);
/* if we found a VOP, next start code ends it,
* except for final VOS end sequence code included in last VOP-frame */
- if (mp4vparse->vop_offset >= 0 && code != MPEG4_VOS_ENDCODE) {
- if (G_LIKELY (size > mp4vparse->vop_offset + 4)) {
+ if (mp4vparse->vop_offset >= 0 &&
+ packet->type != GST_MPEG4_VISUAL_OBJ_SEQ_END) {
+ if (G_LIKELY (size > mp4vparse->vop_offset + 1)) {
mp4vparse->intra_frame =
- ((data[mp4vparse->vop_offset + 4] >> 6 & 0x3) == 0);
+ ((packet->data[mp4vparse->vop_offset + 1] >> 6 & 0x3) == 0);
} else {
GST_WARNING_OBJECT (mp4vparse, "no data following VOP startcode");
mp4vparse->intra_frame = FALSE;
}
- GST_LOG_OBJECT (mp4vparse, "ending frame of size %d, is intra %d", off,
- mp4vparse->intra_frame);
- gst_buffer_unmap (buf, data, size);
+ GST_LOG_OBJECT (mp4vparse, "ending frame of size %d, is intra %d",
+ packet->offset - 3, mp4vparse->intra_frame);
return TRUE;
}
- switch (code) {
- case MPEG4_VOP_STARTCODE:
- case MPEG4_GOP_STARTCODE:
+ switch (packet->type) {
+ case GST_MPEG4_VIDEO_OBJ_PLANE:
+ case GST_MPEG4_GROUP_OF_VOP:
{
- gint offset;
- if (code == MPEG4_VOP_STARTCODE) {
+ if (packet->type == GST_MPEG4_VIDEO_OBJ_PLANE) {
GST_LOG_OBJECT (mp4vparse, "startcode is VOP");
- mp4vparse->vop_offset = off;
+ mp4vparse->vop_offset = packet->offset;
} else {
GST_LOG_OBJECT (mp4vparse, "startcode is GOP");
}
/* parse config data ending here if proper startcodes found earlier;
* preferably start at VOS (visual object sequence),
* otherwise at VO (video object) */
- offset = mp4vparse->vos_offset >= 0 ?
- mp4vparse->vos_offset : mp4vparse->vo_offset;
- if (offset >= 0) {
- gst_mpeg4vparse_process_config (mp4vparse, data, off);
+ if (mp4vparse->vo_found) {
+
+ /*Do not take care startcode into account */
+ gst_mpeg4vparse_process_config (mp4vparse,
+ packet->data, packet->offset, packet->offset - 3);
+
/* avoid accepting again for a VOP sc following a GOP sc */
- mp4vparse->vos_offset = -1;
- mp4vparse->vo_offset = -1;
+ mp4vparse->vo_found = FALSE;
}
break;
}
- case MPEG4_VOS_STARTCODE:
- GST_LOG_OBJECT (mp4vparse, "startcode is VOS");
- mp4vparse->vos_offset = off;
+ case GST_MPEG4_VISUAL_OBJ_SEQ_START:
+ GST_LOG_OBJECT (mp4vparse, "Visual Sequence Start");
+ mp4vparse->vo_found = TRUE;
+ mp4vparse->profile = gst_codec_utils_mpeg4video_get_profile (packet->data
+ + packet->offset + 1, packet->offset);
+ mp4vparse->level = gst_codec_utils_mpeg4video_get_level (packet->data
+ + packet->offset + 1, packet->offset);
break;
+ case GST_MPEG4_VISUAL_OBJ:
+ GST_LOG_OBJECT (mp4vparse, "Visual Object");
default:
- /* VO (video object) cases */
- if (code <= 0x1f) {
- GST_LOG_OBJECT (mp4vparse, "startcode is VO");
- mp4vparse->vo_offset = off;
+ if (packet->type >= GST_MPEG4_VIDEO_LAYER_FIRST &&
+ packet->type <= GST_MPEG4_VIDEO_LAYER_LAST) {
+
+ GST_LOG_OBJECT (mp4vparse, "Video Object Layer");
+
+ /* wee keep track of the offset to parse later on */
+ if (mp4vparse->vol_offset < 0)
+ mp4vparse->vol_offset = packet->offset;
+
+ /* VO (video object) cases */
+ } else if (packet->type <= GST_MPEG4_VIDEO_OBJ_LAST) {
+ GST_LOG_OBJECT (mp4vparse, "Video object");
+ mp4vparse->vo_found = TRUE;
}
break;
}
- gst_buffer_unmap (buf, data, size);
/* at least need to have a VOP in a frame */
return FALSE;
@@ -332,22 +366,19 @@ static gboolean
gst_mpeg4vparse_check_valid_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
{
- GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
- GstBuffer *buf = frame->buffer;
- GstByteReader reader;
+ GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
+ GstMpeg4Packet packet;
+ guint8 *data = NULL;
+ gsize size;
gint off = 0;
gboolean ret = FALSE;
- guint code;
- guint8 *data;
- gsize size;
- data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
- gst_byte_reader_init (&reader, data, size);
+ data = gst_buffer_map (frame->buffer, &size, NULL, GST_MAP_READ);
retry:
/* at least start code and subsequent byte */
if (G_UNLIKELY (size - off < 5))
- goto done;
+ goto out;
/* avoid stale cached parsing state */
if (!(frame->flags & GST_BASE_PARSE_FRAME_FLAG_PARSING)) {
@@ -364,36 +395,33 @@ retry:
goto next;
}
- off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffff00, 0x00000100,
- off, size - off);
-
- GST_LOG_OBJECT (mp4vparse, "possible sync at buffer offset %d", off);
-
/* didn't find anything that looks like a sync word, skip */
- if (G_UNLIKELY (off < 0)) {
- *skipsize = size - 3;
- goto done;
+ switch (gst_mpeg4_parse (&packet, TRUE, NULL, data, off, size)) {
+ case (GST_MPEG4_PARSER_NO_PACKET):
+ case (GST_MPEG4_PARSER_ERROR):
+ *skipsize = size - 3;
+ goto out;
+ default:
+ break;
}
+ off = packet.offset;
/* possible frame header, but not at offset 0? skip bytes before sync */
- if (G_UNLIKELY (off > 0)) {
- *skipsize = off;
- goto done;
+ if (G_UNLIKELY (off > 3)) {
+ *skipsize = off - 3;
+ goto out;
}
- /* ensure start code looks like a real starting start code */
- code = data[3];
- switch (code) {
- case MPEG4_VOP_STARTCODE:
- case MPEG4_VOS_STARTCODE:
- case MPEG4_GOP_STARTCODE:
+ switch (packet.type) {
+ case GST_MPEG4_GROUP_OF_VOP:
+ case GST_MPEG4_VISUAL_OBJ_SEQ_START:
+ case GST_MPEG4_VIDEO_OBJ_PLANE:
break;
default:
- if (code <= 0x1f)
+ if (packet.type <= GST_MPEG4_VIDEO_OBJ_LAST)
break;
/* undesirable sc */
GST_LOG_OBJECT (mp4vparse, "start code is no VOS, VO, VOP or GOP");
- off++;
goto retry;
}
@@ -401,44 +429,52 @@ retry:
mp4vparse->last_sc = 0;
/* examine start code, which should not end frame at present */
- gst_mpeg4vparse_process_sc (mp4vparse, buf, 0);
+ gst_mpeg4vparse_process_sc (mp4vparse, &packet, size);
next:
+ GST_LOG_OBJECT (mp4vparse, "Looking for frame end");
+
/* start is fine as of now */
*skipsize = 0;
/* position a bit further than last sc */
off++;
- /* so now we have start code at start of data; locate next start code */
- off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffff00, 0x00000100,
- off, size - off);
-
- GST_LOG_OBJECT (mp4vparse, "next start code at %d", off);
- if (off < 0) {
- /* if draining, take all */
- if (GST_BASE_PARSE_DRAINING (parse)) {
- off = size;
- ret = TRUE;
- } else {
- /* resume scan where we left it */
- mp4vparse->last_sc = size - 4;
- /* request best next available */
- *framesize = G_MAXUINT;
- goto done;
- }
- } else {
- /* decide whether this startcode ends a frame */
- ret = gst_mpeg4vparse_process_sc (mp4vparse, buf, off);
+
+ /* so now we have start code at start of data; locate next packet */
+ switch (gst_mpeg4_parse (&packet, TRUE, NULL, data, off, size)) {
+ case (GST_MPEG4_PARSER_NO_PACKET_END):
+ ret = gst_mpeg4vparse_process_sc (mp4vparse, &packet, size);
+ if (ret)
+ break;
+ case (GST_MPEG4_PARSER_NO_PACKET):
+ case (GST_MPEG4_PARSER_ERROR):
+ /* if draining, take all */
+ if (GST_BASE_PARSE_DRAINING (parse)) {
+ *framesize = size;
+ ret = TRUE;
+ } else {
+ /* resume scan where we left it */
+ mp4vparse->last_sc = size - 3;
+ /* request best next available */
+ *framesize = G_MAXUINT;
+ }
+ goto out;
+ break;
+ default:
+ /* decide whether this startcode ends a frame */
+ ret = gst_mpeg4vparse_process_sc (mp4vparse, &packet, size);
+ break;
}
+ off = packet.offset;
+
if (ret) {
- *framesize = off;
+ *framesize = off - 3;
} else {
goto next;
}
-done:
- gst_buffer_unmap (buf, data, size);
-
+out:
+ gst_buffer_unmap (frame->buffer, data, size);
return ret;
}
@@ -447,6 +483,8 @@ gst_mpeg4vparse_update_src_caps (GstMpeg4VParse * mp4vparse)
{
GstCaps *caps = NULL;
+ GST_LOG_OBJECT (mp4vparse, "Updating caps");
+
/* only update if no src caps yet or explicitly triggered */
if (G_LIKELY (gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (mp4vparse)) &&
!mp4vparse->update_caps))
@@ -455,7 +493,9 @@ gst_mpeg4vparse_update_src_caps (GstMpeg4VParse * mp4vparse)
/* carry over input caps as much as possible; override with our own stuff */
caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (mp4vparse));
if (caps) {
- caps = gst_caps_make_writable (caps);
+ GstCaps *tmp = gst_caps_copy (caps);
+ gst_caps_unref (caps);
+ caps = tmp;
} else {
caps = gst_caps_new_simple ("video/mpeg",
"mpegversion", G_TYPE_INT, 4, NULL);
@@ -464,14 +504,9 @@ gst_mpeg4vparse_update_src_caps (GstMpeg4VParse * mp4vparse)
gst_caps_set_simple (caps, "systemstream", G_TYPE_BOOLEAN, FALSE,
"parsed", G_TYPE_BOOLEAN, TRUE, NULL);
- if (mp4vparse->profile != 0) {
- gchar *profile = NULL;
-
- /* FIXME does it make sense to expose the profile in the caps ? */
- profile = g_strdup_printf ("%d", mp4vparse->profile);
- gst_caps_set_simple (caps, "profile-level-id",
- G_TYPE_STRING, profile, NULL);
- g_free (profile);
+ if (mp4vparse->profile && mp4vparse->level) {
+ gst_caps_set_simple (caps, "profile", G_TYPE_STRING, mp4vparse->profile,
+ "level", G_TYPE_STRING, mp4vparse->level, NULL);
}
if (mp4vparse->config != NULL) {
@@ -479,15 +514,15 @@ gst_mpeg4vparse_update_src_caps (GstMpeg4VParse * mp4vparse)
GST_TYPE_BUFFER, mp4vparse->config, NULL);
}
- if (mp4vparse->params.width > 0 && mp4vparse->params.height > 0) {
- gst_caps_set_simple (caps, "width", G_TYPE_INT, mp4vparse->params.width,
- "height", G_TYPE_INT, mp4vparse->params.height, NULL);
+ if (mp4vparse->vol.width > 0 && mp4vparse->vol.height > 0) {
+ gst_caps_set_simple (caps, "width", G_TYPE_INT, mp4vparse->vol.width,
+ "height", G_TYPE_INT, mp4vparse->vol.height, NULL);
}
/* perhaps we have a framerate */
- if (mp4vparse->params.fixed_time_increment != 0) {
- gint fps_num = mp4vparse->params.time_increment_resolution;
- gint fps_den = mp4vparse->params.fixed_time_increment;
+ if (mp4vparse->vol.fixed_vop_time_increment != 0) {
+ gint fps_num = mp4vparse->vol.vop_time_increment_resolution;
+ gint fps_den = mp4vparse->vol.fixed_vop_time_increment;
GstClockTime latency = gst_util_uint64_scale (GST_SECOND, fps_den, fps_num);
gst_caps_set_simple (caps, "framerate",
@@ -498,11 +533,10 @@ gst_mpeg4vparse_update_src_caps (GstMpeg4VParse * mp4vparse)
}
/* or pixel-aspect-ratio */
- if (mp4vparse->params.aspect_ratio_width > 0 &&
- mp4vparse->params.aspect_ratio_height > 0) {
+ if (mp4vparse->vol.par_width > 0 && mp4vparse->vol.par_height > 0) {
gst_caps_set_simple (caps, "pixel-aspect-ratio",
- GST_TYPE_FRACTION, mp4vparse->params.aspect_ratio_width,
- mp4vparse->params.aspect_ratio_height, NULL);
+ GST_TYPE_FRACTION, mp4vparse->vol.par_width,
+ mp4vparse->vol.par_height, NULL);
}
gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (mp4vparse), caps);
@@ -512,7 +546,7 @@ gst_mpeg4vparse_update_src_caps (GstMpeg4VParse * mp4vparse)
static GstFlowReturn
gst_mpeg4vparse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
- GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
+ GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
GstBuffer *buffer = frame->buffer;
gst_mpeg4vparse_update_src_caps (mp4vparse);
@@ -523,34 +557,95 @@ gst_mpeg4vparse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
if (G_UNLIKELY (mp4vparse->drop && !mp4vparse->config)) {
- GST_DEBUG_OBJECT (mp4vparse, "dropping frame as no config yet");
+ GST_LOG_OBJECT (mp4vparse, "dropping frame as no config yet");
return GST_BASE_PARSE_FLOW_DROPPED;
} else
return GST_FLOW_OK;
}
-static gint
-compare_buffers (GstBuffer * buf1, GstBuffer * buf2)
+static GstEvent *
+check_pending_key_unit_event (GstEvent * pending_event, GstSegment * segment,
+ GstClockTime timestamp, guint flags, GstClockTime pending_key_unit_ts)
{
- gpointer data;
- gsize size;
- gint ret;
+ GstClockTime running_time, stream_time;
+ gboolean all_headers;
+ guint count;
+ GstEvent *event = NULL;
- data = gst_buffer_map (buf2, &size, NULL, GST_MAP_READ);
- ret = gst_buffer_memcmp (buf1, 0, data, size);
- gst_buffer_unmap (buf2, data, size);
+ g_return_val_if_fail (segment != NULL, NULL);
- return ret;
+ if (pending_event == NULL)
+ goto out;
+
+ if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
+ timestamp == GST_CLOCK_TIME_NONE)
+ goto out;
+
+ running_time = gst_segment_to_running_time (segment,
+ GST_FORMAT_TIME, timestamp);
+
+ GST_INFO ("now %" GST_TIME_FORMAT " wanted %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time), GST_TIME_ARGS (pending_key_unit_ts));
+ if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
+ running_time < pending_key_unit_ts)
+ goto out;
+
+ if (flags & GST_BUFFER_FLAG_DELTA_UNIT) {
+ GST_DEBUG ("pending force key unit, waiting for keyframe");
+ goto out;
+ }
+
+ stream_time = gst_segment_to_stream_time (segment,
+ GST_FORMAT_TIME, timestamp);
+
+ gst_video_event_parse_upstream_force_key_unit (pending_event,
+ NULL, &all_headers, &count);
+
+ event =
+ gst_video_event_new_downstream_force_key_unit (timestamp, stream_time,
+ running_time, all_headers, count);
+ gst_event_set_seqnum (event, gst_event_get_seqnum (pending_event));
+
+out:
+ return event;
}
+static void
+gst_mpeg4vparse_prepare_key_unit (GstMpeg4VParse * parse, GstEvent * event)
+{
+ GstClockTime running_time;
+ guint count;
+
+ parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
+ gst_event_replace (&parse->force_key_unit_event, NULL);
+
+ gst_video_event_parse_downstream_force_key_unit (event,
+ NULL, NULL, &running_time, NULL, &count);
+
+ GST_INFO_OBJECT (parse, "pushing downstream force-key-unit event %d "
+ "%" GST_TIME_FORMAT " count %d", gst_event_get_seqnum (event),
+ GST_TIME_ARGS (running_time), count);
+ gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (parse), event);
+}
+
+
static GstFlowReturn
gst_mpeg4vparse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
- GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
+ GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
GstBuffer *buffer = frame->buffer;
+ gboolean push_codec = FALSE;
+ GstEvent *event = NULL;
+
+ if ((event = check_pending_key_unit_event (mp4vparse->force_key_unit_event,
+ &parse->segment, GST_BUFFER_TIMESTAMP (buffer),
+ GST_BUFFER_FLAGS (buffer), mp4vparse->pending_key_unit_ts))) {
+ gst_mpeg4vparse_prepare_key_unit (mp4vparse, event);
+ push_codec = TRUE;
+ }
- /* periodic SPS/PPS sending */
- if (mp4vparse->interval > 0) {
+ /* periodic config sending */
+ if (mp4vparse->interval > 0 || push_codec) {
GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
guint64 diff;
@@ -572,23 +667,30 @@ gst_mpeg4vparse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
GST_LOG_OBJECT (mp4vparse,
"interval since last config %" GST_TIME_FORMAT, GST_TIME_ARGS (diff));
- if (GST_TIME_AS_SECONDS (diff) >= mp4vparse->interval) {
+ if (GST_TIME_AS_SECONDS (diff) >= mp4vparse->interval || push_codec) {
+ guint8 *cdata;
+ gsize csize;
+ gboolean diffconf;
+
/* we need to send config now first */
- GST_LOG_OBJECT (parse, "inserting config in stream");
+ GST_INFO_OBJECT (parse, "inserting config in stream");
+ cdata = gst_buffer_map (mp4vparse->config, &csize, NULL, GST_MAP_READ);
+ diffconf = (gst_buffer_get_size (buffer) < csize)
+ || gst_buffer_memcmp (buffer, 0, cdata, csize);
+ gst_buffer_unmap (mp4vparse->config, cdata, csize);
/* avoid inserting duplicate config */
- if ((gst_buffer_get_size (buffer) <
- gst_buffer_get_size (mp4vparse->config))
- || compare_buffers (buffer, mp4vparse->config)) {
+ if (diffconf) {
GstBuffer *superbuf;
/* insert header */
superbuf = gst_buffer_merge (mp4vparse->config, buffer);
- gst_buffer_copy_into (superbuf, buffer, GST_BUFFER_COPY_ALL, 0, -1);
+ gst_buffer_copy_into (superbuf, buffer, GST_BUFFER_COPY_METADATA, 0,
+ csize);
gst_buffer_replace (&frame->buffer, superbuf);
gst_buffer_unref (superbuf);
} else {
- GST_LOG_OBJECT (parse, "... but avoiding duplication");
+ GST_INFO_OBJECT (parse, "... but avoiding duplication");
}
if (G_UNLIKELY (timestamp != -1)) {
@@ -604,10 +706,15 @@ gst_mpeg4vparse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
static gboolean
gst_mpeg4vparse_set_caps (GstBaseParse * parse, GstCaps * caps)
{
- GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
+ GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
GstStructure *s;
const GValue *value;
GstBuffer *buf;
+ guint8 *data;
+ gsize size;
+
+ GstMpeg4Packet packet;
+ GstMpeg4ParseResult res;
GST_DEBUG_OBJECT (parse, "setcaps called with %" GST_PTR_FORMAT, caps);
@@ -615,14 +722,23 @@ gst_mpeg4vparse_set_caps (GstBaseParse * parse, GstCaps * caps)
if ((value = gst_structure_get_value (s, "codec_data")) != NULL
&& (buf = gst_value_get_buffer (value))) {
- guint8 *data;
- gsize size;
-
- data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
/* best possible parse attempt,
* src caps are based on sink caps so it will end up in there
* whether sucessful or not */
- gst_mpeg4vparse_process_config (mp4vparse, data, size);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ res = gst_mpeg4_parse (&packet, TRUE, NULL, data, 0, size);
+
+ while (res == GST_MPEG4_PARSER_OK || res == GST_MPEG4_PARSER_NO_PACKET_END) {
+
+ if (packet.type >= GST_MPEG4_VIDEO_LAYER_FIRST &&
+ packet.type <= GST_MPEG4_VIDEO_LAYER_LAST)
+ mp4vparse->vol_offset = packet.offset;
+
+ res = gst_mpeg4_parse (&packet, TRUE, NULL, data, packet.offset, size);
+ }
+
+ /* And take it as config */
+ gst_mpeg4vparse_process_config (mp4vparse, data, 3, size);
gst_buffer_unmap (buf, data, size);
}
@@ -630,6 +746,7 @@ gst_mpeg4vparse_set_caps (GstBaseParse * parse, GstCaps * caps)
return TRUE;
}
+
static GstCaps *
gst_mpeg4vparse_get_caps (GstBaseParse * parse, GstCaps * filter)
{
@@ -660,24 +777,94 @@ gst_mpeg4vparse_get_caps (GstBaseParse * parse, GstCaps * filter)
(parse)));
}
+ if (filter) {
+ GstCaps *tmp = gst_caps_intersect_full (res, filter,
+ GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = tmp;
+ }
+
+
return res;
}
static gboolean
-plugin_init (GstPlugin * plugin)
+gst_mpeg4vparse_event (GstBaseParse * parse, GstEvent * event)
{
- GST_DEBUG_CATEGORY_INIT (mpeg4v_parse_debug, "mpeg4videoparse", 0,
- "MPEG-4 video parser");
+ gboolean handled = FALSE;
+ GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
- if (!gst_element_register (plugin, "mpeg4videoparse", GST_RANK_PRIMARY + 1,
- gst_mpeg4vparse_get_type ()))
- return FALSE;
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ {
+ GstClockTime timestamp, stream_time, running_time;
+ gboolean all_headers;
+ guint count;
- return TRUE;
+ if (!gst_video_event_is_force_key_unit (event))
+ break;
+
+ gst_video_event_parse_downstream_force_key_unit (event,
+ &timestamp, &stream_time, &running_time, &all_headers, &count);
+
+ GST_INFO_OBJECT (mp4vparse, "received downstream force key unit event, "
+ "seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d",
+ gst_event_get_seqnum (event), GST_TIME_ARGS (running_time),
+ all_headers, count);
+ handled = TRUE;
+
+ if (mp4vparse->force_key_unit_event) {
+ GST_INFO_OBJECT (mp4vparse, "ignoring force key unit event "
+ "as one is already queued");
+ break;
+ }
+
+ mp4vparse->pending_key_unit_ts = running_time;
+ gst_event_replace (&mp4vparse->force_key_unit_event, event);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return handled;
}
-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
- GST_VERSION_MINOR,
- "mpeg4videoparse",
- "MPEG-4 video parser",
- plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
+static gboolean
+gst_mpeg4vparse_src_event (GstBaseParse * parse, GstEvent * event)
+{
+ gboolean handled = FALSE;
+ GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ {
+ GstClockTime running_time;
+ gboolean all_headers;
+ guint count;
+
+ if (!gst_video_event_is_force_key_unit (event))
+ break;
+
+ gst_video_event_parse_upstream_force_key_unit (event,
+ &running_time, &all_headers, &count);
+
+ GST_INFO_OBJECT (mp4vparse, "received upstream force-key-unit event, "
+ "seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d",
+ gst_event_get_seqnum (event), GST_TIME_ARGS (running_time),
+ all_headers, count);
+
+ if (!all_headers)
+ break;
+
+ mp4vparse->pending_key_unit_ts = running_time;
+ gst_event_replace (&mp4vparse->force_key_unit_event, event);
+ /* leave handled = FALSE so that the event gets propagated upstream */
+ break;
+ }
+ default:
+ break;
+ }
+
+ return handled;
+}
diff --git a/gst/mpeg4videoparse/mpeg4videoparse.h b/gst/videoparsers/gstmpeg4videoparse.h
index 05d81e8a9..75d7d6c0f 100644
--- a/gst/mpeg4videoparse/mpeg4videoparse.h
+++ b/gst/videoparsers/gstmpeg4videoparse.h
@@ -16,28 +16,28 @@
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
-
-#ifndef __MPEG4VIDEOPARSE_H__
-#define __MPEG4VIDEOPARSE_H__
+
+#ifndef __MPEG4VIDEO_PARSE_H__
+#define __MPEG4VIDEO_PARSE_H__
#include <gst/gst.h>
#include <gst/base/gstbaseparse.h>
-#include "mpeg4parse.h"
+#include <gst/codecparsers/gstmpeg4parser.h>
G_BEGIN_DECLS
-#define GST_TYPE_MPEG4VIDEOPARSE (gst_mpeg4vparse_get_type())
-#define GST_MPEG4VIDEOPARSE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),\
- GST_TYPE_MPEG4VIDEOPARSE, GstMpeg4VParse))
-#define GST_MPEG4VIDEOPARSE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),\
- GST_TYPE_MPEG4VIDEOPARSE, GstMpeg4VParseClass))
-#define GST_MPEG4VIDEOPARSE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj),\
- GST_TYPE_MPEG4VIDEOPARSE, GstMpeg4VParseClass))
-#define GST_IS_MPEG4VIDEOPARSE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),\
- GST_TYPE_MPEG4VIDEOPARSE))
-#define GST_IS_MPEG4VIDEOPARSE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),\
- GST_TYPE_MPEG4VIDEOPARSE))
+#define GST_TYPE_MPEG4VIDEO_PARSE (gst_mpeg4vparse_get_type())
+#define GST_MPEG4VIDEO_PARSE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),\
+ GST_TYPE_MPEG4VIDEO_PARSE, GstMpeg4VParse))
+#define GST_MPEG4VIDEO_PARSE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),\
+ GST_TYPE_MPEG4VIDEO_PARSE, GstMpeg4VParseClass))
+#define GST_MPEG4VIDEO_PARSE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj),\
+ GST_TYPE_MPEG4VIDEO_PARSE, GstMpeg4VParseClass))
+#define GST_IS_MPEG4VIDEO_PARSE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),\
+ GST_TYPE_MPEG4VIDEO_PARSE))
+#define GST_IS_MPEG4VIDEO_PARSE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),\
+ GST_TYPE_MPEG4VIDEO_PARSE))
typedef struct _GstMpeg4VParse GstMpeg4VParse;
typedef struct _GstMpeg4VParseClass GstMpeg4VParseClass;
@@ -50,18 +50,21 @@ struct _GstMpeg4VParse {
/* parse state */
gint last_sc;
gint vop_offset;
- gint vos_offset;
- gint vo_offset;
+ gboolean vo_found;
gboolean intra_frame;
gboolean update_caps;
GstBuffer *config;
- guint8 profile;
- MPEG4Params params;
+ GstMpeg4VideoObjectLayer vol;
+ gboolean vol_offset;
+ const gchar *profile;
+ const gchar *level;
/* properties */
gboolean drop;
guint interval;
+ GstClockTime pending_key_unit_ts;
+ GstEvent *force_key_unit_event;
};
struct _GstMpeg4VParseClass {
@@ -72,4 +75,4 @@ GType gst_mpeg4vparse_get_type (void);
G_END_DECLS
-#endif /* __MPEG4VIDEOPARSE_H__ */
+#endif /* __MPEG4VIDEO_PARSE_H__ */
diff --git a/gst/videoparsers/plugin.c b/gst/videoparsers/plugin.c
index 3f2f73a2f..111bb1a87 100644
--- a/gst/videoparsers/plugin.c
+++ b/gst/videoparsers/plugin.c
@@ -26,6 +26,7 @@
#include "gsth264parse.h"
#include "gstdiracparse.h"
#include "gstmpegvideoparse.h"
+#include "gstmpeg4videoparse.h"
static gboolean
plugin_init (GstPlugin * plugin)
@@ -40,6 +41,8 @@ plugin_init (GstPlugin * plugin)
GST_RANK_NONE, GST_TYPE_DIRAC_PARSE);
ret |= gst_element_register (plugin, "mpegvideoparse",
GST_RANK_PRIMARY + 1, GST_TYPE_MPEGVIDEO_PARSE);
+ ret |= gst_element_register (plugin, "mpeg4videoparse",
+ GST_RANK_PRIMARY + 1, GST_TYPE_MPEG4VIDEO_PARSE);
return ret;
}
diff --git a/pkgconfig/gstreamer-plugins-bad.pc.in b/pkgconfig/gstreamer-plugins-bad.pc.in
index 1fad71747..5e2130168 100644
--- a/pkgconfig/gstreamer-plugins-bad.pc.in
+++ b/pkgconfig/gstreamer-plugins-bad.pc.in
@@ -7,7 +7,5 @@ Name: GStreamer Bad Plugin libraries
Description: Currently includes the photography interface library
Requires: gstreamer-@GST_MAJORMINOR@ gstreamer-base-@GST_MAJORMINOR@
Version: @VERSION@
-Libs: -L${libdir} -lgstphotography-@GST_MAJORMINOR@\
- -L${libdir} -lgstcodecparsers-@GST_MAJORMINOR@\
+Libs: -L${libdir} -lgstphotography-@GST_MAJORMINOR@
Cflags: -I${includedir}
-
diff --git a/po/LINGUAS b/po/LINGUAS
index e1a6c64a9..592640a7b 100644
--- a/po/LINGUAS
+++ b/po/LINGUAS
@@ -1 +1 @@
-af az bg ca cs da de el en_GB es eu fi fr gl hu id it ja ky lt lv mt nb nl or pl pt_BR ro ru sk sl sq sr sv tr uk vi zh_CN
+af az bg ca cs da de el en_GB eo es eu fi fr gl hu id it ja ky lt lv mt nb nl or pl pt_BR ro ru sk sl sq sr sv tr uk vi zh_CN
diff --git a/po/eo.po b/po/eo.po
new file mode 100644
index 000000000..168061c3a
--- /dev/null
+++ b/po/eo.po
@@ -0,0 +1,71 @@
+# Esperanto translation for gst-plugins-bad.
+# Copyright (C) 2011 Free Software Foundation, Inc.
+# This file is distributed under the same license as the gst-plugins-bad package.
+# Kristjan SCHMIDT <kristjan.schmidt@googlemail.com>, 2011.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: gst-plugins-bad 0.10.21.2\n"
+"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
+"POT-Creation-Date: 2011-12-11 00:54+0000\n"
+"PO-Revision-Date: 2011-06-04 22:18+0200\n"
+"Last-Translator: Kristjan SCHMIDT <kristjan.schmidt@googlemail.com>\n"
+"Language-Team: Esperanto <translation-team-eo@lists.sourceforge.net>\n"
+"Language: eo\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=(n != 1)\n"
+
+msgid "Could not read title information for DVD."
+msgstr "Ne eblis legi la titol-informojn de la DVD."
+
+#, c-format
+msgid "Failed to open DVD device '%s'."
+msgstr "Fiaskis malfermi la DVD-aparaton \"%s\"."
+
+msgid "Failed to set PGC based seeking."
+msgstr "PGC-bazita serĉo fiaskis."
+
+msgid ""
+"Could not read DVD. This may be because the DVD is encrypted and a DVD "
+"decryption library is not installed."
+msgstr ""
+"Ne eblis legi la DVD-n. Eble la DVD estas ĉifrita sed biblioteko por "
+"malĉifrado ne estas instalite."
+
+msgid "Could not read DVD."
+msgstr "Ne eblis legi la DVD-n."
+
+msgid "No file name specified for writing."
+msgstr "Neniu dosiernomo estas specifite por skribi."
+
+#, c-format
+msgid "Could not open file \"%s\" for writing."
+msgstr "Ne eblis malfermi la dosieron \"%s\" por skribi."
+
+msgid "Internal data stream error."
+msgstr "Interna datum-flu-eraro."
+
+#, c-format
+msgid "Could not write to file \"%s\"."
+msgstr "Ne eblis skribi al dosiero \"%s\"."
+
+msgid "Internal data flow error."
+msgstr "Interna datum-flu-eraro."
+
+#, c-format
+msgid "Device \"%s\" does not exist."
+msgstr "Aparato \"%s\" ne ekzistas."
+
+#, fuzzy, c-format
+msgid "Could not open frontend device \"%s\"."
+msgstr "Ne eblis malfermi la \"Frontend\"-aparaton \"%s\"."
+
+#, fuzzy, c-format
+msgid "Could not get settings from frontend device \"%s\"."
+msgstr "Ne eblis akiri la agordojn de la \"Frontend\"-aparato \"%s\"."
+
+#, c-format
+msgid "Could not open file \"%s\" for reading."
+msgstr "Ne eblis malfermi la dosieron \"%s\" por legi."
diff --git a/po/es.po b/po/es.po
index 0fc56d121..08d924d8a 100644
--- a/po/es.po
+++ b/po/es.po
@@ -1,21 +1,21 @@
-# translation of gst-plugins-bad-0.10.13.2.po to Español
+# translation of gst-plugins-bad-0.10.21.2.po to Español
# spanish translation for gst-plugins-bad
# This file is put in the public domain.
+# Jorge González <jorgegonz@svn.gnome.org>, 2007, 2008, 2009, 2011.
#
-# Jorge González González <aloriel@gmail.com>, 2007, 2008, 2009.
msgid ""
msgstr ""
-"Project-Id-Version: gst-plugins-bad 0.10.13.2\n"
+"Project-Id-Version: gst-plugins-bad 0.10.21.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2010-10-19 23:33+0100\n"
-"PO-Revision-Date: 2009-09-29 23:12+0200\n"
+"POT-Creation-Date: 2011-12-11 00:54+0000\n"
+"PO-Revision-Date: 2011-10-02 15:47+0200\n"
"Last-Translator: Jorge González González <aloriel@gmail.com>\n"
"Language-Team: Spanish <es@li.org>\n"
"Language: es\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+"Plural-Forms: nplurals=2; plural=(n!=1);\n"
msgid "Could not read title information for DVD."
msgstr "No se pudo leer la información del título para el DVD."
@@ -31,10 +31,11 @@ msgid ""
"Could not read DVD. This may be because the DVD is encrypted and a DVD "
"decryption library is not installed."
msgstr ""
+"No se pudo leer el DVD. Puede ser debido a que el DVD está cifrado y la "
+"biblioteca de descifrado del DVD no está instalada."
-#, fuzzy
msgid "Could not read DVD."
-msgstr "No se pudo leer la información del título para el DVD."
+msgstr "No se pudo leer el DVD."
msgid "No file name specified for writing."
msgstr "No se especificó un nombre de archivo para su escritura."
@@ -69,11 +70,359 @@ msgstr "No se pudieron obtener los ajustes del dispositivo frontend «%s»."
msgid "Could not open file \"%s\" for reading."
msgstr "No se pudo abrir el archivo «%s» para leer."
+#~ msgid "Internal clock error."
+#~ msgstr "Error en el reloj interno."
+
+#~ msgid "Could not open audio device for mixer control handling."
+#~ msgstr ""
+#~ "No se pudo abrir el dispositivo de sonido para manejar el control del "
+#~ "mezclador."
+
+#~ msgid ""
+#~ "Could not open audio device for mixer control handling. This version of "
+#~ "the Open Sound System is not supported by this element."
+#~ msgstr ""
+#~ "No se pudo abrir el dispositivo para manejar el control del mezclador. "
+#~ "Este elemento no soporta esta versión del Open Sound System."
+
+#~ msgid "Volume"
+#~ msgstr "Volumen"
+
+#~ msgid "Master"
+#~ msgstr "Maestro"
+
+#~ msgid "Front"
+#~ msgstr "Frontal"
+
+#~ msgid "Rear"
+#~ msgstr "Trasero"
+
+#~ msgid "Headphones"
+#~ msgstr "Auriculares"
+
+#~ msgid "Center"
+#~ msgstr "Centrado"
+
+#~ msgid "LFE"
+#~ msgstr "LFE"
+
+#~ msgid "Surround"
+#~ msgstr "Sonido envolvente"
+
+#~ msgid "Side"
+#~ msgstr "Lateral"
+
+#~ msgid "Built-in Speaker"
+#~ msgstr "Altavoz integrado"
+
+#~ msgid "AUX 1 Out"
+#~ msgstr "Salida auxiliar 1"
+
+#~ msgid "AUX 2 Out"
+#~ msgstr "Salida auxiliar 2"
+
+#~ msgid "AUX Out"
+#~ msgstr "Salida auxiliar"
+
+#~ msgid "Bass"
+#~ msgstr "Bajos"
+
+#~ msgid "Treble"
+#~ msgstr "Agudos"
+
+#~ msgid "3D Depth"
+#~ msgstr "Profundidad 3D"
+
+#~ msgid "3D Center"
+#~ msgstr "Centro 3D"
+
+#~ msgid "3D Enhance"
+#~ msgstr "Mejora 3D"
+
+#~ msgid "Telephone"
+#~ msgstr "Teléfono"
+
+#~ msgid "Microphone"
+#~ msgstr "Micrófono"
+
+#~ msgid "Line Out"
+#~ msgstr "Línea de salida"
+
+#~ msgid "Line In"
+#~ msgstr "Línea de entrada"
+
+#~ msgid "Internal CD"
+#~ msgstr "CD interno"
+
+#~ msgid "Video In"
+#~ msgstr "Entrada de vídeo"
+
+#~ msgid "AUX 1 In"
+#~ msgstr "Entrada auxiliar 1"
+
+#~ msgid "AUX 2 In"
+#~ msgstr "Entrada auxiliar 2"
+
+#~ msgid "AUX In"
+#~ msgstr "Entrada auxiliar"
+
+#~ msgid "PCM"
+#~ msgstr "PCM"
+
+#~ msgid "Record Gain"
+#~ msgstr "Ganancia de grabación"
+
+#~ msgid "Output Gain"
+#~ msgstr "Salida de grabación"
+
+#~ msgid "Microphone Boost"
+#~ msgstr "Aumento del micrófono"
+
+#~ msgid "Loopback"
+#~ msgstr "Bucle local"
+
+#~ msgid "Diagnostic"
+#~ msgstr "Diagnóstico"
+
+#~ msgid "Bass Boost"
+#~ msgstr "Aumento de bajos"
+
+#~ msgid "Playback Ports"
+#~ msgstr "Puertos de reproducción"
+
+#~ msgid "Input"
+#~ msgstr "Entrada"
+
+#~ msgid "Record Source"
+#~ msgstr "Origen de la grabación"
+
+#~ msgid "Monitor Source"
+#~ msgstr "Origen del monitor"
+
+#~ msgid "Keyboard Beep"
+#~ msgstr "Pitido del teclado"
+
+#~ msgid "Monitor"
+#~ msgstr "Monitor"
+
+#~ msgid "Simulate Stereo"
+#~ msgstr "Simular estéreo"
+
+#~ msgid "Stereo"
+#~ msgstr "Estéreo"
+
+#~ msgid "Surround Sound"
+#~ msgstr "Sonido envolvente"
+
+#~ msgid "Microphone Gain"
+#~ msgstr "Ganancia del micrófono"
+
+#~ msgid "Speaker Source"
+#~ msgstr "Origen de los altavoces"
+
+#~ msgid "Microphone Source"
+#~ msgstr "Origen del micrófono"
+
+#~ msgid "Jack"
+#~ msgstr "Jack"
+
+#~ msgid "Center / LFE"
+#~ msgstr "Centrado / LFE"
+
+#~ msgid "Stereo Mix"
+#~ msgstr "Mezclador estéreo"
+
+#~ msgid "Mono Mix"
+#~ msgstr "Mezclador mono"
+
+#~ msgid "Input Mix"
+#~ msgstr "Mezclador de entrada"
+
+#~ msgid "SPDIF In"
+#~ msgstr "Entrada S/PDIF"
+
+#~ msgid "SPDIF Out"
+#~ msgstr "Salida S/PDIF"
+
+#~ msgid "Microphone 1"
+#~ msgstr "Micrófono 1"
+
+#~ msgid "Microphone 2"
+#~ msgstr "Micrófono 2"
+
+#~ msgid "Digital Out"
+#~ msgstr "Salida digital"
+
+#~ msgid "Digital In"
+#~ msgstr "Entrada digital"
+
+#~ msgid "HDMI"
+#~ msgstr "HDMI"
+
+#~ msgid "Modem"
+#~ msgstr "Módem"
+
+#~ msgid "Handset"
+#~ msgstr "Auriculares"
+
+#~ msgid "Other"
+#~ msgstr "Otro"
+
+#~ msgid "None"
+#~ msgstr "Ninguno"
+
+#~ msgid "On"
+#~ msgstr "Encendido"
+
+#~ msgid "Off"
+#~ msgstr "Apagado"
+
+#~ msgid "Mute"
+#~ msgstr "Silenciar"
+
+#~ msgid "Fast"
+#~ msgstr "Rápido"
+
+#~ msgid "Very Low"
+#~ msgstr "Muy bajo"
+
+#~ msgid "Low"
+#~ msgstr "Bajo"
+
+#~ msgid "Medium"
+#~ msgstr "Medio"
+
+#~ msgid "High"
+#~ msgstr "Alto"
+
+#~ msgid "Very High"
+#~ msgstr "Muy alto"
+
+#~ msgid "Production"
+#~ msgstr "Producción"
+
+#~ msgid "Front Panel Microphone"
+#~ msgstr "Micrófono del panel frontal"
+
+#~ msgid "Front Panel Line In"
+#~ msgstr "Línea de entrada del panel frontal"
+
+#~ msgid "Front Panel Headphones"
+#~ msgstr "Auriculares del panel frontal"
+
+#~ msgid "Front Panel Line Out"
+#~ msgstr "Línea de salida del panel frontal"
+
+#~ msgid "Green Connector"
+#~ msgstr "Conector verde"
+
+#~ msgid "Pink Connector"
+#~ msgstr "Conector rosa"
+
+#~ msgid "Blue Connector"
+#~ msgstr "Conector azul"
+
+#~ msgid "White Connector"
+#~ msgstr "Conector blanco"
+
+#~ msgid "Black Connector"
+#~ msgstr "Conector negro"
+
+#~ msgid "Gray Connector"
+#~ msgstr "Conector gris"
+
+#~ msgid "Orange Connector"
+#~ msgstr "Conector naranja"
+
+#~ msgid "Red Connector"
+#~ msgstr "Conector rojo"
+
+#~ msgid "Yellow Connector"
+#~ msgstr "Conector amarillo"
+
+#~ msgid "Green Front Panel Connector"
+#~ msgstr "Conector verde del panel frontal"
+
+#~ msgid "Pink Front Panel Connector"
+#~ msgstr "Conector rosa del panel frontal"
+
+#~ msgid "Blue Front Panel Connector"
+#~ msgstr "Conector azul del panel frontal"
+
+#~ msgid "White Front Panel Connector"
+#~ msgstr "Conector blanco del panel frontal"
+
+#~ msgid "Black Front Panel Connector"
+#~ msgstr "Conector negro del panel frontal"
+
+#~ msgid "Gray Front Panel Connector"
+#~ msgstr "Conector gris del panel frontal"
+
+#~ msgid "Orange Front Panel Connector"
+#~ msgstr "Conector naranja del panel frontal"
+
+#~ msgid "Red Front Panel Connector"
+#~ msgstr "Conector rojo del panel frontal"
+
+#~ msgid "Yellow Front Panel Connector"
+#~ msgstr "Conector amarillo del panel frontal"
+
+#~ msgid "Spread Output"
+#~ msgstr "Expandir salida"
+
+#~ msgid "Downmix"
+#~ msgstr "Reducción de canales"
+
+#~ msgid "Virtual Mixer Input"
+#~ msgstr "Entrada del mezclador virtual"
+
+#~ msgid "Virtual Mixer Output"
+#~ msgstr "Salida del mezclador virtual"
+
+#~ msgid "Virtual Mixer Channels"
+#~ msgstr "Canales del mezclador virtual"
+
+#~ msgid "%s Function"
+#~ msgstr "Función %s"
+
#~ msgid "%s %d"
#~ msgstr "%s %d"
-#~ msgid "Internal clock error."
-#~ msgstr "Error en el reloj interno."
+#~ msgid ""
+#~ "Could not open audio device for playback. Device is being used by another "
+#~ "application."
+#~ msgstr ""
+#~ "No se pudo abrir el dispositivo de sonido para reproducir. Otra "
+#~ "aplicación está usando el dispositivo."
+
+#~ msgid ""
+#~ "Could not open audio device for playback. You don't have permission to "
+#~ "open the device."
+#~ msgstr ""
+#~ "No se pudo abrir el dispositivo de sonido para reproducir. No tiene "
+#~ "permiso para abrir el dispositivo."
+
+#~ msgid "Could not open audio device for playback."
+#~ msgstr "No se pudo abrir el dispositivo de sonido para reproducción."
+
+#~ msgid ""
+#~ "Could not open audio device for playback. This version of the Open Sound "
+#~ "System is not supported by this element."
+#~ msgstr ""
+#~ "No se pudo abrir el dispositivo para reproducir. Este elemento no soporta "
+#~ "esta versión del Open Sound System."
+
+#~ msgid "Playback is not supported by this audio device."
+#~ msgstr "Este dispositivo de sonido no soporta reproducción."
+
+#~ msgid "Audio playback error."
+#~ msgstr "Error en la reproducción del sonido."
+
+#~ msgid "Recording is not supported by this audio device."
+#~ msgstr "Este dispositivo de sonido no soporta grabación."
+
+#~ msgid "Error recording from audio device."
+#~ msgstr "Error al grabar desde el dispositivo de sonido."
#~ msgid "PCM 1"
#~ msgstr "PCM 1"
diff --git a/po/gl.po b/po/gl.po
index 13e59000c..0b5e5bd32 100644
--- a/po/gl.po
+++ b/po/gl.po
@@ -2,20 +2,21 @@
# Copyright (C) 2010 Fran Dieguez
# This file is distributed under the same license as the gst-plugins-bad package.
# Fran Diéguez <frandieguez@ubuntu.com>, 2010.
+# Fran Dieguez <frandieguez@ubuntu.com>, 2011.
#
msgid ""
msgstr ""
-"Project-Id-Version: gst-plugins-bad 0.10.18.2\n"
+"Project-Id-Version: gst-plugins-bad 0.10.21.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2010-10-19 23:33+0100\n"
-"PO-Revision-Date: 2010-05-30 13:28+0200\n"
+"POT-Creation-Date: 2011-12-11 00:54+0000\n"
+"PO-Revision-Date: 2011-09-05 12:50+0200\n"
"Last-Translator: Fran Dieguez <frandieguez@ubuntu.com>\n"
"Language-Team: Galician <proxecto@trasno.net>\n"
"Language: gl\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-"Plural-Forms: nplurals=2; plural=(n!=1);\\\n"
+"Plural-Forms: nplurals=2; plural=(n!=1);\n"
"X-Poedit-Language: galego\n"
msgid "Could not read title information for DVD."
@@ -32,10 +33,11 @@ msgid ""
"Could not read DVD. This may be because the DVD is encrypted and a DVD "
"decryption library is not installed."
msgstr ""
+"Non foi posíbel ler o DVD. Isto pode ser cause de que o DVD estea cifrado e "
+"non teña instalada unha biblioteca de descifrado de DVD."
-#, fuzzy
msgid "Could not read DVD."
-msgstr "Non foi posíbel ler a información do título do DVD"
+msgstr "Non foi posíbel ler o DVD."
msgid "No file name specified for writing."
msgstr "Non se especificou ningún nome de ficheiro para a súa escritura."
diff --git a/po/ky.po b/po/ky.po
index 8c59de50e..9c938d1a7 100644
--- a/po/ky.po
+++ b/po/ky.po
@@ -6,7 +6,7 @@ msgid ""
msgstr ""
"Project-Id-Version: gst-plugins-bad 0.10.5\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2010-10-19 23:33+0100\n"
+"POT-Creation-Date: 2011-12-11 00:54+0000\n"
"PO-Revision-Date: 2007-11-13 17:16+0600\n"
"Last-Translator: Ilyas Bakirov <just_ilyas@yahoo.com>\n"
"Language-Team: Kirghiz <i18n-team-ky-kyrgyz@lists.sourceforge.net>\n"
diff --git a/po/lv.po b/po/lv.po
index 6f29f108a..e8aff9c1c 100644
--- a/po/lv.po
+++ b/po/lv.po
@@ -1,13 +1,14 @@
# Latvian translation of gst-plugins-bad
# This file is put in the public domain.
# Arvis Lācis <arvis.lacis@inbox.lv>, 2009.
+# Rihards Priedītis <rprieditis@gmail.com>, 2011.
#
msgid ""
msgstr ""
-"Project-Id-Version: gst-plugins-bad 0.10.13.2\n"
+"Project-Id-Version: gst-plugins-bad 0.10.21.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2010-10-19 23:33+0100\n"
-"PO-Revision-Date: 2009-08-12 20:33+0100\n"
+"POT-Creation-Date: 2011-12-11 00:54+0000\n"
+"PO-Revision-Date: 2011-09-02 12:04-0000\n"
"Last-Translator: Rihards Priedītis <rprieditis@gmail.com>\n"
"Language-Team: Latvian <translation-team-lv@lists.sourceforge.net>\n"
"Language: lv\n"
@@ -33,10 +34,11 @@ msgid ""
"Could not read DVD. This may be because the DVD is encrypted and a DVD "
"decryption library is not installed."
msgstr ""
+"Nevarēja nolasīt DVD. Iespējams, ka DVD ir šifrēts un DVD atšifrēšanas "
+"bibliotēka nav instalēta."
-#, fuzzy
msgid "Could not read DVD."
-msgstr "Nevarēja nolasīt virsrakstu informāciju no DVD."
+msgstr "Nevar nolasīt DVD."
msgid "No file name specified for writing."
msgstr "Ierakstīšanai nav norādīts neviens faila nosaukums."
diff --git a/po/sr.po b/po/sr.po
index 6857998e1..e4d9fe711 100644
--- a/po/sr.po
+++ b/po/sr.po
@@ -1,75 +1,77 @@
# Serbian translation of gst-plugins
# Copyright (C) 2004 Free Software Foundation, Inc.
+# This file is distributed under the same license as the gst-plugins-bad package.
# Danilo Segan <dsegan@gmx.net>, 2004.
-#
+# Мирослав Николић <miroslavnikolic@rocketmail.com>, 2011.
msgid ""
msgstr ""
-"Project-Id-Version: gst-plugins 0.7.6\n"
+"Project-Id-Version: gst-plugins-bad-0.10.21.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2010-10-19 23:33+0100\n"
-"PO-Revision-Date: 2004-03-13 00:18+0100\n"
-"Last-Translator: Danilo Segan <dsegan@gmx.net>\n"
+"POT-Creation-Date: 2011-12-11 00:54+0000\n"
+"PO-Revision-Date: 2011-12-04 09:59+0200\n"
+"Last-Translator: Мирослав Николић <miroslavnikolic@rocketmail.com>\n"
"Language-Team: Serbian <gnu@prevod.org>\n"
"Language: sr\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-"Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : (n%10>=2 && n"
+"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
+"X-Generator: Virtaal 0.7.0\n"
+"X-Project-Style: gnome\n"
-#, fuzzy
msgid "Could not read title information for DVD."
-msgstr "Не могу да пишем у датотеку „%s“."
+msgstr "Не могу да прочитам информације о наслову ДВД-а."
-#, fuzzy, c-format
+#, c-format
msgid "Failed to open DVD device '%s'."
-msgstr "Не могу да затворим управљачки уређај „%s“."
+msgstr "Нисам успео да отворим ДВД уређај „%s“."
msgid "Failed to set PGC based seeking."
-msgstr ""
+msgstr "Нисам успео да подесим позиционирање засновано на ланцу програма."
msgid ""
"Could not read DVD. This may be because the DVD is encrypted and a DVD "
"decryption library is not installed."
msgstr ""
+"Не могу да читам ДВД. Можда зато што је ДВД шифрован а библиотека за "
+"дешифровање ДВД-а није инсталирана."
-#, fuzzy
msgid "Could not read DVD."
-msgstr "Не могу да пишем у датотеку „%s“."
+msgstr "Не могу да читам ДВД."
-#, fuzzy
msgid "No file name specified for writing."
-msgstr "Име датотеке није задато."
+msgstr "Није наведен назив датотеке за упис."
#, c-format
msgid "Could not open file \"%s\" for writing."
-msgstr "Не могу да отворим датотеку „%s“ ради уписа."
+msgstr "Не могу да отворим датотеку „%s“ за упис."
msgid "Internal data stream error."
-msgstr ""
+msgstr "Унутрашња грешка тока података."
#, c-format
msgid "Could not write to file \"%s\"."
msgstr "Не могу да пишем у датотеку „%s“."
msgid "Internal data flow error."
-msgstr ""
+msgstr "Унутрашња грешка протока података."
#, c-format
msgid "Device \"%s\" does not exist."
-msgstr "Не постоји уређај „%s“."
+msgstr "Уређај „%s“ не постоји."
-#, fuzzy, c-format
+#, c-format
msgid "Could not open frontend device \"%s\"."
-msgstr "Не могу да затворим управљачки уређај „%s“."
+msgstr "Не могу да отворим управљачки уређај „%s“."
-#, fuzzy, c-format
+#, c-format
msgid "Could not get settings from frontend device \"%s\"."
-msgstr "Не могу да примим довољно бафера са уређаја „%s“."
+msgstr "Не могу да добавим подешавања са управљачког уређаја „%s“."
#, c-format
msgid "Could not open file \"%s\" for reading."
-msgstr "Не могу да отворим датотеку „%s“ ради читања."
+msgstr "Не могу да отворим датотеку „%s“ за читање."
#~ msgid "Could not open device \"%s\" for reading and writing."
#~ msgstr "Не могу да отворим уређај „%s“ ради читања и уписа."
@@ -83,6 +85,9 @@ msgstr "Не могу да отворим датотеку „%s“ ради ч
#~ msgid "Could not get buffers from device \"%s\"."
#~ msgstr "Не могу да примим бафере са уређаја „%s“."
+#~ msgid "Could not open audio device \"%s\" for writing."
+#~ msgstr "Не могу да отворим звучни уређај „%s“ ради уписа."
+
#~ msgid "Could not open control device \"%s\" for writing."
#~ msgstr "Не могу да отворим управљачки уређај „%s“ ради уписа."
@@ -119,9 +124,30 @@ msgstr "Не могу да отворим датотеку „%s“ ради ч
#~ msgid "Could not open device \"%s\" for reading."
#~ msgstr "Не могу да отворим уређај „%s“ ради читања."
+#~ msgid "Volume"
+#~ msgstr "Јачина звука"
+
+#~ msgid "Bass"
+#~ msgstr "Бас"
+
+#~ msgid "Treble"
+#~ msgstr "Шум"
+
#~ msgid "Synth"
#~ msgstr "Синт."
+#~ msgid "PCM"
+#~ msgstr "ПЦМ"
+
+#~ msgid "Speaker"
+#~ msgstr "Звучник"
+
+#~ msgid "Line-in"
+#~ msgstr "Ул.лин."
+
+#~ msgid "Microphone"
+#~ msgstr "Микрофон"
+
#~ msgid "CD"
#~ msgstr "ЦД"
@@ -131,9 +157,15 @@ msgstr "Не могу да отворим датотеку „%s“ ради ч
#~ msgid "PCM-2"
#~ msgstr "ПЦМ-2"
+#~ msgid "Record"
+#~ msgstr "Снимање"
+
#~ msgid "In-gain"
#~ msgstr "Ул. пој."
+#~ msgid "Out-gain"
+#~ msgstr "Из. пој."
+
#~ msgid "Line-1"
#~ msgstr "Лин. 1"
@@ -143,6 +175,9 @@ msgstr "Не могу да отворим датотеку „%s“ ради ч
#~ msgid "Line-3"
#~ msgstr "Лин. 3"
+#~ msgid "Digital-1"
+#~ msgstr "Диг. 1"
+
#~ msgid "Digital-2"
#~ msgstr "Диг. 2"
@@ -155,9 +190,19 @@ msgstr "Не могу да отворим датотеку „%s“ ради ч
#~ msgid "Phone-out"
#~ msgstr "Тел. из."
+#~ msgid "Video"
+#~ msgstr "Видео"
+
#~ msgid "Radio"
#~ msgstr "Радио"
+#~ msgid "Monitor"
+#~ msgstr "Праћење"
+
+#, fuzzy
+#~ msgid "PC Speaker"
+#~ msgstr "Звучник"
+
#~ msgid "Could not open CD device for reading."
#~ msgstr "Не могу да отворим ЦД уређај ради читања."
diff --git a/po/sv.po b/po/sv.po
index 71b26ffca..f67a71cfa 100644
--- a/po/sv.po
+++ b/po/sv.po
@@ -7,7 +7,7 @@ msgid ""
msgstr ""
"Project-Id-Version: gst-plugins-bad 0.10.13.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2010-10-19 23:33+0100\n"
+"POT-Creation-Date: 2011-12-11 00:54+0000\n"
"PO-Revision-Date: 2009-08-12 20:29+0100\n"
"Last-Translator: Daniel Nylander <po@danielnylander.se>\n"
"Language-Team: Swedish <tp-sv@listor.tp-sv.se>\n"
@@ -70,442 +70,3 @@ msgstr "Kunde inte öppna filen \"%s\" för läsning."
#~ msgid "Internal clock error."
#~ msgstr "Internt klockfel."
-
-#~ msgid "Could not open audio device for mixer control handling."
-#~ msgstr "Kunde inte öppna ljudenheten för mixningshantering."
-
-#~ msgid ""
-#~ "Could not open audio device for mixer control handling. This version of "
-#~ "the Open Sound System is not supported by this element."
-#~ msgstr ""
-#~ "Kunde inte öppna ljudenheten för mixningshantering. Denna version av Open "
-#~ "Sound System stöds inte av detta element."
-
-#~ msgid "Volume"
-#~ msgstr "Volym"
-
-#~ msgid "Master"
-#~ msgstr "Övergripande"
-
-#~ msgid "Front"
-#~ msgstr "Fram"
-
-#~ msgid "Rear"
-#~ msgstr "Bak"
-
-#~ msgid "Headphones"
-#~ msgstr "Hörlurar"
-
-# LFE=lågfrekvenseffekter
-#~ msgid "Center"
-#~ msgstr "Center"
-
-#~ msgid "LFE"
-#~ msgstr "LFE"
-
-#~ msgid "Surround"
-#~ msgstr "Surround"
-
-#~ msgid "Side"
-#~ msgstr "Sida"
-
-#~ msgid "Built-in Speaker"
-#~ msgstr "Inbyggd högtalare"
-
-#~ msgid "AUX 1 Out"
-#~ msgstr "AUX 1 ut"
-
-#~ msgid "AUX 2 Out"
-#~ msgstr "AUX 2 ut"
-
-#~ msgid "AUX Out"
-#~ msgstr "AUX ut"
-
-#~ msgid "Bass"
-#~ msgstr "Bas"
-
-#~ msgid "Treble"
-#~ msgstr "Diskant"
-
-#~ msgid "3D Depth"
-#~ msgstr "3D-djup"
-
-#~ msgid "3D Center"
-#~ msgstr "3D-center"
-
-#~ msgid "3D Enhance"
-#~ msgstr "3D-förbättring"
-
-#~ msgid "Telephone"
-#~ msgstr "Telefon"
-
-#~ msgid "Microphone"
-#~ msgstr "Mikrofon"
-
-#~ msgid "Line Out"
-#~ msgstr "Linje ut"
-
-#~ msgid "Line In"
-#~ msgstr "Linje in"
-
-#~ msgid "Internal CD"
-#~ msgstr "Intern cd"
-
-#~ msgid "Video In"
-#~ msgstr "Video in"
-
-#~ msgid "AUX 1 In"
-#~ msgstr "AUX 1 in"
-
-#~ msgid "AUX 2 In"
-#~ msgstr "AUX 2 in"
-
-#~ msgid "AUX In"
-#~ msgstr "AUX in"
-
-#~ msgid "PCM"
-#~ msgstr "PCM"
-
-#~ msgid "Record Gain"
-#~ msgstr "Inspelningsförstärkning"
-
-#~ msgid "Output Gain"
-#~ msgstr "Utgångsförstärkning"
-
-#~ msgid "Microphone Boost"
-#~ msgstr "Mikrofonförstärkning"
-
-#~ msgid "Loopback"
-#~ msgstr "Vändslinga"
-
-#~ msgid "Diagnostic"
-#~ msgstr "Diagnostik"
-
-#~ msgid "Bass Boost"
-#~ msgstr "Basförstärkning"
-
-#~ msgid "Playback Ports"
-#~ msgstr "Uppspelningsportar"
-
-#~ msgid "Input"
-#~ msgstr "Ingång"
-
-#~ msgid "Record Source"
-#~ msgstr "Inspelningskälla"
-
-#~ msgid "Monitor Source"
-#~ msgstr "Monitorkälla"
-
-#~ msgid "Keyboard Beep"
-#~ msgstr "Tangentbordspip"
-
-#~ msgid "Monitor"
-#~ msgstr "Monitor"
-
-#~ msgid "Simulate Stereo"
-#~ msgstr "Simulera stereo"
-
-#~ msgid "Stereo"
-#~ msgstr "Stereo"
-
-#~ msgid "Surround Sound"
-#~ msgstr "Surroundljud"
-
-#~ msgid "Microphone Gain"
-#~ msgstr "Mikrofonförstärkning"
-
-#~ msgid "Speaker Source"
-#~ msgstr "Högtalarkälla"
-
-#~ msgid "Microphone Source"
-#~ msgstr "Mikrofonkälla"
-
-#~ msgid "Jack"
-#~ msgstr "Jack"
-
-# LFE=lågfrekvenseffekter
-#~ msgid "Center / LFE"
-#~ msgstr "Center / LFE"
-
-#~ msgid "Stereo Mix"
-#~ msgstr "Stereomix"
-
-#~ msgid "Mono Mix"
-#~ msgstr "Monomix"
-
-#~ msgid "Input Mix"
-#~ msgstr "Ingångsmix"
-
-#~ msgid "SPDIF In"
-#~ msgstr "SPDIF in"
-
-#~ msgid "SPDIF Out"
-#~ msgstr "SPDIF ut"
-
-#~ msgid "Microphone 1"
-#~ msgstr "Mikrofon 1"
-
-#~ msgid "Microphone 2"
-#~ msgstr "Mikrofon 2"
-
-#~ msgid "Digital Out"
-#~ msgstr "Digital ut"
-
-#~ msgid "Digital In"
-#~ msgstr "Digital in"
-
-#~ msgid "HDMI"
-#~ msgstr "HDMI"
-
-#~ msgid "Modem"
-#~ msgstr "Modem"
-
-# Denna är svår att tolka
-#~ msgid "Handset"
-#~ msgstr "Telefonlur"
-
-#~ msgid "Other"
-#~ msgstr "Annan"
-
-#~ msgid "None"
-#~ msgstr "Ingen"
-
-#~ msgid "On"
-#~ msgstr "På"
-
-#~ msgid "Off"
-#~ msgstr "Av"
-
-#~ msgid "Mute"
-#~ msgstr "Tyst"
-
-#~ msgid "Fast"
-#~ msgstr "Snabb"
-
-#~ msgid "Very Low"
-#~ msgstr "Mycket låg"
-
-#~ msgid "Low"
-#~ msgstr "Låg"
-
-#~ msgid "Medium"
-#~ msgstr "Medel"
-
-#~ msgid "High"
-#~ msgstr "Hög"
-
-#~ msgid "Very High"
-#~ msgstr "Mycket hög"
-
-#~ msgid "Production"
-#~ msgstr "Produktion"
-
-#~ msgid "Front Panel Microphone"
-#~ msgstr "Frontpanelsmikrofon"
-
-#~ msgid "Front Panel Line In"
-#~ msgstr "Linje-in på frontpanel"
-
-#~ msgid "Front Panel Headphones"
-#~ msgstr "Hörlurar på frontpanel"
-
-#~ msgid "Front Panel Line Out"
-#~ msgstr "Linje-ut på frontpanel"
-
-#~ msgid "Green Connector"
-#~ msgstr "Grön kontakt"
-
-#~ msgid "Pink Connector"
-#~ msgstr "Rosa kontakt"
-
-#~ msgid "Blue Connector"
-#~ msgstr "Blå kontakt"
-
-#~ msgid "White Connector"
-#~ msgstr "Vit kontakt"
-
-#~ msgid "Black Connector"
-#~ msgstr "Svart kontakt"
-
-#~ msgid "Gray Connector"
-#~ msgstr "Grå kontakt"
-
-#~ msgid "Orange Connector"
-#~ msgstr "Orange kontakt"
-
-#~ msgid "Red Connector"
-#~ msgstr "Röd kontakt"
-
-#~ msgid "Yellow Connector"
-#~ msgstr "Gul kontakt"
-
-#~ msgid "Green Front Panel Connector"
-#~ msgstr "Grön frontpanelskontakt"
-
-#~ msgid "Pink Front Panel Connector"
-#~ msgstr "Rosa frontpanelskontakt"
-
-#~ msgid "Blue Front Panel Connector"
-#~ msgstr "Blå frontpanelskontakt"
-
-#~ msgid "White Front Panel Connector"
-#~ msgstr "Vit frontpanelskontakt"
-
-#~ msgid "Black Front Panel Connector"
-#~ msgstr "Svart frontpanelskontakt"
-
-#~ msgid "Gray Front Panel Connector"
-#~ msgstr "Grå frontpanelskontakt"
-
-#~ msgid "Orange Front Panel Connector"
-#~ msgstr "Orange frontpanelskontakt"
-
-#~ msgid "Red Front Panel Connector"
-#~ msgstr "Röd frontpanelskontakt"
-
-#~ msgid "Yellow Front Panel Connector"
-#~ msgstr "Gul frontpanelskontakt"
-
-#~ msgid "Spread Output"
-#~ msgstr "Spridd utgång"
-
-#~ msgid "Downmix"
-#~ msgstr "Nedmixning"
-
-#~ msgid "Virtual Mixer Input"
-#~ msgstr "Virtuell mixeringång"
-
-#~ msgid "Virtual Mixer Output"
-#~ msgstr "Virtuell mixerutgång"
-
-#~ msgid "Virtual Mixer Channels"
-#~ msgstr "Virtuella mixerkanaler"
-
-#~ msgid "%s Function"
-#~ msgstr "%s-funktion"
-
-#~ msgid "%s %d"
-#~ msgstr "%s %d"
-
-#~ msgid ""
-#~ "Could not open audio device for playback. Device is being used by another "
-#~ "application."
-#~ msgstr ""
-#~ "Kunde inte öppna ljudenheten för uppspelning. Enheten används av ett "
-#~ "annat program."
-
-#~ msgid ""
-#~ "Could not open audio device for playback. You don't have permission to "
-#~ "open the device."
-#~ msgstr ""
-#~ "Kunde inte öppna ljudenheten för uppspelning. Du har inte behörighet att "
-#~ "öppna enheten."
-
-#~ msgid "Could not open audio device for playback."
-#~ msgstr "Kunde inte öppna ljudenheten för uppspelning."
-
-#~ msgid ""
-#~ "Could not open audio device for playback. This version of the Open Sound "
-#~ "System is not supported by this element."
-#~ msgstr ""
-#~ "Kunde inte öppna ljudenheten för uppspelning. Denna version av Open Sound "
-#~ "System stöds inte av detta element."
-
-#~ msgid "Playback is not supported by this audio device."
-#~ msgstr "Uppspelning stöds inte av denna ljudenhet."
-
-#~ msgid "Audio playback error."
-#~ msgstr "Fel vid ljuduppspelning."
-
-#~ msgid "Recording is not supported by this audio device."
-#~ msgstr "Inspelning stöds inte av denna ljudenhet."
-
-#~ msgid "Error recording from audio device."
-#~ msgstr "Fel vid inspelning från ljudenhet."
-
-#~ msgid "Failed to configure TwoLAME encoder. Check your encoding parameters."
-#~ msgstr ""
-#~ "Misslyckades med att konfigurera TwoLAME-kodaren. Kontrollera dina "
-#~ "kodningsparametrar."
-
-#~ msgid ""
-#~ "The requested bitrate %d kbit/s for property '%s' is not allowed. The "
-#~ "bitrate was changed to %d kbit/s."
-#~ msgstr ""
-#~ "Den begärda bitfrekvensen %d kbit/s för egenskapen \"%s\" tillåts inte. "
-#~ "Bitfrekvensen ändrades till %d kbit/s."
-
-#~ msgid "PCM 1"
-#~ msgstr "PCM 1"
-
-#~ msgid "PCM 2"
-#~ msgstr "PCM 2"
-
-#~ msgid "PCM 3"
-#~ msgstr "PCM 3"
-
-#~ msgid "PCM 4"
-#~ msgstr "PCM 4"
-
-#~ msgid "Green connector function"
-#~ msgstr "Funktion för grön kontakt"
-
-#~ msgid "Green front panel connector function"
-#~ msgstr "Funktion för grön frontpanelskontakt"
-
-#~ msgid "Pink connector function"
-#~ msgstr "Funktion för rosa kontakt"
-
-#~ msgid "Pink front panel connector function"
-#~ msgstr "Funktion för rosa frontpanelskontakt"
-
-#~ msgid "Blue connector function"
-#~ msgstr "Funktion för blå kontakt"
-
-#~ msgid "Blue front panel connector function"
-#~ msgstr "Funktion för blå frontpanelskontakt"
-
-#~ msgid "Orange connector function"
-#~ msgstr "Funktion för orange kontakt"
-
-#~ msgid "Orange front panel connector function"
-#~ msgstr "Funktion för orange frontpanelskontakt"
-
-#~ msgid "Black connector function"
-#~ msgstr "Funktion för svart kontakt"
-
-#~ msgid "Black front panel connector function"
-#~ msgstr "Funktion för svart frontpanelskontakt"
-
-#~ msgid "Gray connector function"
-#~ msgstr "Funktion för grå kontakt"
-
-#~ msgid "Gray front panel connector function"
-#~ msgstr "Funktion för grå frontpanelskontakt"
-
-#~ msgid "White connector function"
-#~ msgstr "Funktion för vit kontakt"
-
-#~ msgid "White front panel connector function"
-#~ msgstr "Funktion för vit frontpanelskontakt"
-
-#~ msgid "Red connector function"
-#~ msgstr "Funktion för röd kontakt"
-
-#~ msgid "Red front panel connector function"
-#~ msgstr "Funktion för röd frontpanelskontakt"
-
-#~ msgid "Yellow connector function"
-#~ msgstr "Funktion för gul kontakt"
-
-#~ msgid "Yellow front panel connector function"
-#~ msgstr "Funktion för gul frontpanelskontakt"
-
-#~ msgid "Virtual mixer channel configuration"
-#~ msgstr "Kanalkonfiguration för virtuell mixer"
-
-#~ msgid "This file is corrupt and cannot be played."
-#~ msgstr "Den här filen är skadad och kan inte spelas upp."
-
-#~ msgid "This file is encrypted and cannot be played."
-#~ msgstr "Den här filen är krypterad och kan inte spelas upp."
diff --git a/tests/check/Makefile.am b/tests/check/Makefile.am
index 238fdd98c..76f39f59e 100644
--- a/tests/check/Makefile.am
+++ b/tests/check/Makefile.am
@@ -159,7 +159,9 @@ VALGRIND_TESTS_DISABLE = \
# these tests don't even pass
# neon: too flaky (almost always fails 'the first time')
+# colorspace: bad memory accesses in orc code for odd width buffers - https://bugzilla.gnome.org/show_bug.cgi?id=663248
noinst_PROGRAMS = \
+ pipelines/colorspace \
$(check_neon)
check_PROGRAMS = \
@@ -188,13 +190,13 @@ check_PROGRAMS = \
$(check_logoinsert) \
elements/h263parse \
elements/h264parse \
+ elements/mpegtsmux \
elements/mpegvideoparse \
elements/mpeg4videoparse \
elements/mxfdemux \
elements/mxfmux \
elements/id3mux \
pipelines/mxf \
- pipelines/colorspace \
$(check_mimic) \
elements/rtpmux \
libs/mpegvideoparser \
@@ -315,6 +317,10 @@ elements_rtpmux_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstrtp-0.10 $(GST_BASE_LIBS)
elements_assrender_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(AM_CFLAGS)
elements_assrender_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstvideo-0.10 -lgstapp-0.10 $(GST_BASE_LIBS) $(LDADD)
+elements_mpegtsmux_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(AM_CFLAGS)
+elements_mpegtsmux_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstvideo-0.10 $(GST_BASE_LIBS) $(LDADD)
+
+
EXTRA_DIST = gst-plugins-bad.supp
orc_cog_CFLAGS = $(ORC_CFLAGS)
diff --git a/tests/check/elements/.gitignore b/tests/check/elements/.gitignore
index 6df3baeda..08043fb77 100644
--- a/tests/check/elements/.gitignore
+++ b/tests/check/elements/.gitignore
@@ -25,11 +25,13 @@ logoinsert
mpeg2enc
mpegvideoparse
mpeg4videoparse
+mpegtsmux
mplex
mxfdemux
mxfmux
neonhttpsrc
ofa
+opus
rganalysis
rglimiter
rgvolume
@@ -40,6 +42,7 @@ timidity
y4menc
videorecordingbin
viewfinderbin
+voaacenc
vp8dec
vp8enc
zbar
diff --git a/tests/check/elements/camerabin2.c b/tests/check/elements/camerabin2.c
index 25f6ac8d2..efdd977f0 100644
--- a/tests/check/elements/camerabin2.c
+++ b/tests/check/elements/camerabin2.c
@@ -363,6 +363,7 @@ extract_jpeg_tags (const gchar * filename, gint num)
gst_object_unref (bus);
g_source_remove (source);
gst_object_unref (pipeline);
+ g_main_loop_unref (loop);
}
static void
@@ -579,6 +580,7 @@ check_file_validity (const gchar * filename, gint num, GstTagList * taglist,
g_source_remove (source);
gst_object_unref (bus);
gst_object_unref (playbin);
+ g_main_loop_unref (loop);
return TRUE;
}
@@ -626,6 +628,8 @@ wait_for_element_message (GstElement * camera, const gchar * name,
if (gst_structure_has_name (st, name))
break;
+ else
+ gst_message_unref (msg);
} else {
gst_message_unref (msg);
msg = NULL;
@@ -649,7 +653,8 @@ wait_for_idle_state (void)
if (idle)
break;
- g_usleep (GST_SECOND / 5);
+ GST_LOG ("waiting for idle state..");
+ g_usleep (G_USEC_PER_SEC / 5);
}
fail_unless (idle);
}
@@ -832,6 +837,7 @@ GST_START_TEST (test_multiple_video_recordings)
gst_caps_unref (caps);
+ GST_LOG ("starting #%d with caps %" GST_PTR_FORMAT, i, caps);
g_signal_emit_by_name (camera, "start-capture", NULL);
g_object_get (camera, "idle", &idle, NULL);
@@ -840,15 +846,20 @@ GST_START_TEST (test_multiple_video_recordings)
g_timeout_add_seconds (VIDEO_DURATION, (GSourceFunc) g_main_loop_quit,
main_loop);
g_main_loop_run (main_loop);
+
+ GST_LOG ("stopping run %d", i);
g_signal_emit_by_name (camera, "stop-capture", NULL);
msg = wait_for_element_message (camera, "video-done", GST_CLOCK_TIME_NONE);
fail_unless (msg != NULL);
gst_message_unref (msg);
+ GST_LOG ("video done, checking preview image");
check_preview_image (camera, video_filename, i);
+ GST_LOG ("waiting for idle state");
wait_for_idle_state ();
+ GST_LOG ("finished run %d", i);
}
gst_element_set_state (GST_ELEMENT (camera), GST_STATE_NULL);
diff --git a/tests/check/elements/mpegtsmux.c b/tests/check/elements/mpegtsmux.c
new file mode 100644
index 000000000..a93ac67b6
--- /dev/null
+++ b/tests/check/elements/mpegtsmux.c
@@ -0,0 +1,323 @@
+/* GStreamer
+ *
+ * Copyright (C) 2011 Alessandro Decina <alessandro.d@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include <gst/check/gstcheck.h>
+#include <string.h>
+#include <gst/video/video.h>
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate video_src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-h264")
+ );
+
+static GstStaticPadTemplate audio_src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg")
+ );
+
+typedef struct _TestData
+{
+ GstEvent *sink_event;
+ GstEvent *src_event1;
+ GstEvent *src_event2;
+ gint src_events;
+} TestData;
+
+typedef struct _ThreadData
+{
+ GstPad *pad;
+ GstBuffer *buffer;
+ GstFlowReturn flow_return;
+ GThread *thread;
+} ThreadData;
+
+static gboolean
+src_event (GstPad * pad, GstEvent * event)
+{
+ TestData *data = (TestData *) gst_pad_get_element_private (pad);
+
+ if (event->type == GST_EVENT_CUSTOM_UPSTREAM) {
+ data->src_events += 1;
+ if (data->src_event1 != NULL)
+ data->src_event2 = event;
+ else
+ data->src_event1 = event;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+sink_event (GstPad * pad, GstEvent * event)
+{
+ TestData *data = (TestData *) gst_pad_get_element_private (pad);
+
+ if (event->type == GST_EVENT_CUSTOM_DOWNSTREAM)
+ data->sink_event = event;
+
+ return TRUE;
+}
+
+static void
+link_sinks (GstElement * mpegtsmux,
+ GstPad ** src1, GstPad ** src2, GstPad ** src3, TestData * test_data)
+{
+ GstPad *mux_sink1, *mux_sink2, *mux_sink3;
+ GstCaps *caps;
+
+ /* link 3 sink pads, 2 video 1 audio */
+ *src1 = gst_pad_new_from_static_template (&video_src_template, "src1");
+ gst_pad_set_active (*src1, TRUE);
+ gst_pad_set_element_private (*src1, test_data);
+ gst_pad_set_event_function (*src1, src_event);
+ mux_sink1 = gst_element_get_request_pad (mpegtsmux, "sink_1");
+ fail_unless (gst_pad_link (*src1, mux_sink1) == GST_PAD_LINK_OK);
+
+ *src2 = gst_pad_new_from_static_template (&video_src_template, "src2");
+ gst_pad_set_active (*src2, TRUE);
+ gst_pad_set_element_private (*src2, test_data);
+ gst_pad_set_event_function (*src2, src_event);
+ mux_sink2 = gst_element_get_request_pad (mpegtsmux, "sink_2");
+ fail_unless (gst_pad_link (*src2, mux_sink2) == GST_PAD_LINK_OK);
+
+ *src3 = gst_pad_new_from_static_template (&audio_src_template, "src3");
+ gst_pad_set_active (*src3, TRUE);
+ gst_pad_set_element_private (*src3, test_data);
+ gst_pad_set_event_function (*src3, src_event);
+ mux_sink3 = gst_element_get_request_pad (mpegtsmux, "sink_3");
+ fail_unless (gst_pad_link (*src3, mux_sink3) == GST_PAD_LINK_OK);
+
+ caps = gst_caps_new_simple ("video/x-h264", NULL);
+ gst_pad_set_caps (mux_sink1, caps);
+ gst_pad_set_caps (mux_sink2, caps);
+ gst_caps_unref (caps);
+ caps = gst_caps_new_simple ("audio/mpeg", "mpegversion", G_TYPE_INT, 4, NULL);
+ gst_pad_set_caps (mux_sink3, caps);
+ gst_caps_unref (caps);
+
+ gst_object_unref (mux_sink1);
+ gst_object_unref (mux_sink2);
+ gst_object_unref (mux_sink3);
+}
+
+static void
+link_src (GstElement * mpegtsmux, GstPad ** sink, TestData * test_data)
+{
+ GstPad *mux_src;
+
+ mux_src = gst_element_get_static_pad (mpegtsmux, "src");
+ *sink = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_active (*sink, TRUE);
+ gst_pad_set_event_function (*sink, sink_event);
+ gst_pad_set_element_private (*sink, test_data);
+ fail_unless (gst_pad_link (mux_src, *sink) == GST_PAD_LINK_OK);
+
+ gst_object_unref (mux_src);
+}
+
+static gpointer
+pad_push_thread (gpointer user_data)
+{
+ ThreadData *data = (ThreadData *) user_data;
+
+ data->flow_return = gst_pad_push (data->pad, data->buffer);
+
+ return NULL;
+}
+
+static ThreadData *
+pad_push (GstPad * pad, GstBuffer * buffer, GstClockTime timestamp)
+{
+ ThreadData *data;
+
+ data = g_new0 (ThreadData, 1);
+ data->pad = pad;
+ data->buffer = buffer;
+ GST_BUFFER_TIMESTAMP (buffer) = timestamp;
+ data->thread = g_thread_create (pad_push_thread, data, TRUE, NULL);
+
+ return data;
+}
+
+GST_START_TEST (test_force_key_unit_event_downstream)
+{
+ GstElement *mpegtsmux;
+ GstPad *sink;
+ GstPad *src1;
+ GstPad *src2;
+ GstPad *src3;
+ GstEvent *sink_event;
+ GstClockTime timestamp, stream_time, running_time;
+ gboolean all_headers = TRUE;
+ gint count = 0;
+ ThreadData *thread_data_1, *thread_data_2, *thread_data_3, *thread_data_4;
+ TestData test_data = { 0, };
+
+ mpegtsmux = gst_check_setup_element ("mpegtsmux");
+ gst_element_set_state (mpegtsmux, GST_STATE_PLAYING);
+
+ link_src (mpegtsmux, &sink, &test_data);
+ link_sinks (mpegtsmux, &src1, &src2, &src3, &test_data);
+
+ /* hack: make sure collectpads builds collect->data */
+ gst_pad_push_event (src1, gst_event_new_flush_start ());
+ gst_pad_push_event (src1, gst_event_new_flush_stop ());
+
+ /* send a force-key-unit event with running_time=2s */
+ timestamp = stream_time = running_time = 2 * GST_SECOND;
+ sink_event = gst_video_event_new_downstream_force_key_unit (timestamp,
+ stream_time, running_time, all_headers, count);
+
+ fail_unless (gst_pad_push_event (src1, sink_event));
+ fail_unless (test_data.sink_event == NULL);
+
+ /* push 4 buffers, make sure mpegtsmux handles the force-key-unit event when
+ * the buffer with the requested running time is collected */
+ thread_data_1 = pad_push (src1, gst_buffer_new (), 1 * GST_SECOND);
+ thread_data_2 = pad_push (src2, gst_buffer_new (), 2 * GST_SECOND);
+ thread_data_3 = pad_push (src3, gst_buffer_new (), 3 * GST_SECOND);
+
+ g_thread_join (thread_data_1->thread);
+ fail_unless (test_data.sink_event == NULL);
+
+ /* push again on src1 so that the buffer on src2 is collected */
+ thread_data_4 = pad_push (src1, gst_buffer_new (), 4 * GST_SECOND);
+
+ g_thread_join (thread_data_2->thread);
+ fail_unless (test_data.sink_event != NULL);
+
+ gst_element_set_state (mpegtsmux, GST_STATE_NULL);
+
+ g_thread_join (thread_data_3->thread);
+ g_thread_join (thread_data_4->thread);
+
+ g_free (thread_data_1);
+ g_free (thread_data_2);
+ g_free (thread_data_3);
+ g_free (thread_data_4);
+ gst_object_unref (src1);
+ gst_object_unref (src2);
+ gst_object_unref (src3);
+ gst_object_unref (sink);
+ gst_object_unref (mpegtsmux);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_force_key_unit_event_upstream)
+{
+ GstElement *mpegtsmux;
+ GstPad *sink;
+ GstPad *src1;
+ GstPad *src2;
+ GstPad *src3;
+ GstEvent *event;
+ GstClockTime timestamp, stream_time, running_time;
+ gboolean all_headers = TRUE;
+ gint count = 0;
+ TestData test_data = { 0, };
+ ThreadData *thread_data_1, *thread_data_2, *thread_data_3, *thread_data_4;
+
+ mpegtsmux = gst_check_setup_element ("mpegtsmux");
+ gst_element_set_state (mpegtsmux, GST_STATE_PLAYING);
+
+ link_src (mpegtsmux, &sink, &test_data);
+ link_sinks (mpegtsmux, &src1, &src2, &src3, &test_data);
+
+ /* hack: make sure collectpads builds collect->data */
+ gst_pad_push_event (src1, gst_event_new_flush_start ());
+ gst_pad_push_event (src1, gst_event_new_flush_stop ());
+
+ /* send an upstream force-key-unit event with running_time=2s */
+ timestamp = stream_time = running_time = 2 * GST_SECOND;
+ event =
+ gst_video_event_new_upstream_force_key_unit (running_time, TRUE, count);
+ fail_unless (gst_pad_push_event (sink, event));
+
+ fail_unless (test_data.sink_event == NULL);
+ fail_unless_equals_int (test_data.src_events, 3);
+
+ /* send downstream events with unrelated seqnums */
+ event = gst_video_event_new_downstream_force_key_unit (timestamp,
+ stream_time, running_time, all_headers, count);
+ fail_unless (gst_pad_push_event (src1, event));
+ event = gst_video_event_new_downstream_force_key_unit (timestamp,
+ stream_time, running_time, all_headers, count);
+ fail_unless (gst_pad_push_event (src2, event));
+
+ /* events should be skipped */
+ fail_unless (test_data.sink_event == NULL);
+
+ /* push 4 buffers, make sure mpegtsmux handles the force-key-unit event when
+ * the buffer with the requested running time is collected */
+ thread_data_1 = pad_push (src1, gst_buffer_new (), 1 * GST_SECOND);
+ thread_data_2 = pad_push (src2, gst_buffer_new (), 2 * GST_SECOND);
+ thread_data_3 = pad_push (src3, gst_buffer_new (), 3 * GST_SECOND);
+
+ g_thread_join (thread_data_1->thread);
+ fail_unless (test_data.sink_event == NULL);
+
+ /* push again on src1 so that the buffer on src2 is collected */
+ thread_data_4 = pad_push (src1, gst_buffer_new (), 4 * GST_SECOND);
+
+ g_thread_join (thread_data_2->thread);
+ fail_unless (test_data.sink_event != NULL);
+
+ gst_element_set_state (mpegtsmux, GST_STATE_NULL);
+
+ g_thread_join (thread_data_3->thread);
+ g_thread_join (thread_data_4->thread);
+
+ g_free (thread_data_1);
+ g_free (thread_data_2);
+ g_free (thread_data_3);
+ g_free (thread_data_4);
+
+ gst_object_unref (src1);
+ gst_object_unref (src2);
+ gst_object_unref (src3);
+ gst_object_unref (sink);
+ gst_object_unref (mpegtsmux);
+}
+
+GST_END_TEST;
+
+static Suite *
+mpegtsmux_suite (void)
+{
+ Suite *s = suite_create ("mpegtsmux");
+ TCase *tc_chain = tcase_create ("general");
+
+ suite_add_tcase (s, tc_chain);
+
+ tcase_add_test (tc_chain, test_force_key_unit_event_downstream);
+ tcase_add_test (tc_chain, test_force_key_unit_event_upstream);
+
+ return s;
+}
+
+GST_CHECK_MAIN (mpegtsmux);
diff --git a/tests/check/libs/.gitignore b/tests/check/libs/.gitignore
new file mode 100644
index 000000000..33bb8b6d6
--- /dev/null
+++ b/tests/check/libs/.gitignore
@@ -0,0 +1,3 @@
+h264parser
+mpegvideoparser
+vc1parser
diff --git a/tests/check/pipelines/colorspace.c b/tests/check/pipelines/colorspace.c
index 3704e86e3..1acb4ab67 100644
--- a/tests/check/pipelines/colorspace.c
+++ b/tests/check/pipelines/colorspace.c
@@ -199,17 +199,16 @@ colorspace_compare (gint width, gint height, gboolean comp)
for (j = 0; j < gst_caps_get_size (caps); j++) {
GstCaps *in_caps, *out_caps;
GstStructure *s;
- guint32 fourcc;
+ const gchar *fourcc;
in_caps = gst_caps_copy_nth (caps, i);
out_caps = gst_caps_copy_nth (caps, j);
/* FIXME remove if videotestsrc and video format handle these properly */
s = gst_caps_get_structure (in_caps, 0);
- if (gst_structure_get_fourcc (s, "format", &fourcc)) {
- if (fourcc == GST_MAKE_FOURCC ('Y', 'U', 'V', '9') ||
- fourcc == GST_MAKE_FOURCC ('Y', 'V', 'U', '9') ||
- fourcc == GST_MAKE_FOURCC ('v', '2', '1', '6')) {
+ if ((fourcc = gst_structure_get_string (s, "format"))) {
+ if (!strcmp (fourcc, "YUV9") ||
+ !strcmp (fourcc, "YVU9") || !strcmp (fourcc, "v216")) {
gst_caps_unref (in_caps);
gst_caps_unref (out_caps);
continue;