summaryrefslogtreecommitdiff
path: root/ext
diff options
context:
space:
mode:
authorEdward Hervey <edward.hervey@collabora.co.uk>2011-12-30 11:41:17 +0100
committerEdward Hervey <edward.hervey@collabora.co.uk>2011-12-30 11:41:17 +0100
commitf70a623418a52eada0278002f8d266d49957b3d5 (patch)
treefc54fd9fab400e886b11a9a2b313478b66dd7969 /ext
parentd5aaefa59f9ef4153644a2aa254c39a3a9d108e3 (diff)
parentd465188879cd4dd0735e3fbcaeb83d98f217ed88 (diff)
downloadgstreamer-plugins-bad-f70a623418a52eada0278002f8d266d49957b3d5.tar.gz
Merge remote-tracking branch 'origin/master' into 0.11-premerge
Conflicts: docs/libs/Makefile.am ext/kate/gstkatetiger.c ext/opus/gstopusdec.c ext/xvid/gstxvidenc.c gst-libs/gst/basecamerabinsrc/Makefile.am gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.c gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.h gst-libs/gst/video/gstbasevideocodec.c gst-libs/gst/video/gstbasevideocodec.h gst-libs/gst/video/gstbasevideodecoder.c gst-libs/gst/video/gstbasevideoencoder.c gst/asfmux/gstasfmux.c gst/audiovisualizers/gstwavescope.c gst/camerabin2/gstcamerabin2.c gst/debugutils/gstcompare.c gst/frei0r/gstfrei0rmixer.c gst/mpegpsmux/mpegpsmux.c gst/mpegtsmux/mpegtsmux.c gst/mxf/mxfmux.c gst/videomeasure/gstvideomeasure_ssim.c gst/videoparsers/gsth264parse.c gst/videoparsers/gstmpeg4videoparse.c
Diffstat (limited to 'ext')
-rw-r--r--ext/Makefile.am4
-rw-r--r--ext/dts/Makefile.am7
-rw-r--r--ext/dts/gstdtsdec.c9
-rw-r--r--ext/faac/gstfaac.c86
-rw-r--r--ext/faac/gstfaac.h4
-rw-r--r--ext/kate/gstkatetiger.c486
-rw-r--r--ext/kate/gstkatetiger.h4
-rw-r--r--ext/opencv/gstmotioncells.c51
-rw-r--r--ext/opencv/gstmotioncells.h1
-rw-r--r--ext/opus/Makefile.am5
-rw-r--r--ext/opus/gstopus.c11
-rw-r--r--ext/opus/gstopuscommon.c18
-rw-r--r--ext/opus/gstopuscommon.h3
-rw-r--r--ext/opus/gstopusdec.c77
-rw-r--r--ext/opus/gstopusenc.c279
-rw-r--r--ext/opus/gstopusenc.h4
-rw-r--r--ext/opus/gstopusheader.c46
-rw-r--r--ext/opus/gstopusheader.h2
-rw-r--r--ext/opus/gstopusparse.c2
-rw-r--r--ext/opus/gstrtpopusdepay.c120
-rw-r--r--ext/opus/gstrtpopusdepay.h57
-rw-r--r--ext/opus/gstrtpopuspay.c137
-rw-r--r--ext/opus/gstrtpopuspay.h58
-rw-r--r--ext/resindvd/rsndec.c3
-rw-r--r--ext/voaacenc/gstvoaacenc.c10
-rw-r--r--ext/voamrwbenc/gstvoamrwbenc.c5
-rw-r--r--ext/xvid/gstxvidenc.c192
-rw-r--r--ext/xvid/gstxvidenc.h1
28 files changed, 1084 insertions, 598 deletions
diff --git a/ext/Makefile.am b/ext/Makefile.am
index 95ba3da75..dc62386ca 100644
--- a/ext/Makefile.am
+++ b/ext/Makefile.am
@@ -349,9 +349,9 @@ SWFDEC_DIR=
endif
if USE_TELETEXTDEC
-TELETEXT_DIR=teletextdec
+TELETEXTDEC_DIR=teletextdec
else
-TELETEXT_DIR=
+TELETEXTDEC_DIR=
endif
if USE_VP8
diff --git a/ext/dts/Makefile.am b/ext/dts/Makefile.am
index f58f14972..97a838531 100644
--- a/ext/dts/Makefile.am
+++ b/ext/dts/Makefile.am
@@ -2,9 +2,10 @@ plugin_LTLIBRARIES = libgstdtsdec.la
libgstdtsdec_la_SOURCES = gstdtsdec.c
libgstdtsdec_la_CFLAGS = -DGST_USE_UNSTABLE_API \
- $(GST_CFLAGS) $(ORC_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS)
-libgstdtsdec_la_LIBADD = $(DTS_LIBS) $(ORC_LIBS) $(GST_PLUGINS_BASE_LIBS) \
- -lgstaudio-@GST_MAJORMINOR@
+ $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(ORC_CFLAGS)
+libgstdtsdec_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) \
+ -lgstaudio-@GST_MAJORMINOR@ \
+ $(DTS_LIBS) $(ORC_LIBS)
libgstdtsdec_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstdtsdec_la_LIBTOOLFLAGS = --tag=disable-static
diff --git a/ext/dts/gstdtsdec.c b/ext/dts/gstdtsdec.c
index 2a762e903..f71219478 100644
--- a/ext/dts/gstdtsdec.c
+++ b/ext/dts/gstdtsdec.c
@@ -494,11 +494,15 @@ gst_dtsdec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buffer)
guint8 *data;
gint size, chans;
gint length = 0, flags, sample_rate, bit_rate, frame_length;
- GstFlowReturn result = GST_FLOW_UNEXPECTED;
+ GstFlowReturn result = GST_FLOW_OK;
GstBuffer *outbuf;
dts = GST_DTSDEC (bdec);
+ /* no fancy draining */
+ if (G_UNLIKELY (!buffer))
+ return GST_FLOW_OK;
+
/* parsed stuff already, so this should work out fine */
data = GST_BUFFER_DATA (buffer);
size = GST_BUFFER_SIZE (buffer);
@@ -599,7 +603,8 @@ gst_dtsdec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buffer)
/* negotiate if required */
if (need_renegotiation) {
- GST_DEBUG ("dtsdec: sample_rate:%d stream_chans:0x%x using_chans:0x%x",
+ GST_DEBUG_OBJECT (dts,
+ "dtsdec: sample_rate:%d stream_chans:0x%x using_chans:0x%x",
dts->sample_rate, dts->stream_channels, dts->using_channels);
if (!gst_dtsdec_renegotiate (dts))
goto failed_negotiation;
diff --git a/ext/faac/gstfaac.c b/ext/faac/gstfaac.c
index ddfae6d52..c4b34f544 100644
--- a/ext/faac/gstfaac.c
+++ b/ext/faac/gstfaac.c
@@ -369,9 +369,8 @@ static gboolean
gst_faac_set_format (GstAudioEncoder * enc, GstAudioInfo * info)
{
GstFaac *faac = GST_FAAC (enc);
- faacEncHandle *handle;
gint channels, samplerate, width;
- gulong samples, bytes, fmt = 0, bps = 0;
+ gulong fmt = 0, bps = 0;
gboolean result = FALSE;
/* base class takes care */
@@ -398,41 +397,24 @@ gst_faac_set_format (GstAudioEncoder * enc, GstAudioInfo * info)
bps = 4;
}
- /* clean up in case of re-configure */
- gst_faac_close_encoder (faac);
-
- if (!(handle = faacEncOpen (samplerate, channels, &samples, &bytes)))
- goto setup_failed;
-
- /* mind channel count */
- samples /= channels;
-
/* ok, record and set up */
faac->format = fmt;
faac->bps = bps;
- faac->handle = handle;
- faac->bytes = bytes;
- faac->samples = samples;
faac->channels = channels;
faac->samplerate = samplerate;
/* finish up */
result = gst_faac_configure_source_pad (faac);
+ if (!result)
+ goto done;
/* report needs to base class */
- gst_audio_encoder_set_frame_samples_min (enc, samples);
- gst_audio_encoder_set_frame_samples_max (enc, samples);
+ gst_audio_encoder_set_frame_samples_min (enc, faac->samples);
+ gst_audio_encoder_set_frame_samples_max (enc, faac->samples);
gst_audio_encoder_set_frame_max (enc, 1);
done:
return result;
-
- /* ERRORS */
-setup_failed:
- {
- GST_ELEMENT_ERROR (faac, LIBRARY, SETTINGS, (NULL), (NULL));
- goto done;
- }
}
/* check downstream caps to configure format */
@@ -494,15 +476,32 @@ gst_faac_negotiate (GstFaac * faac)
}
static gboolean
-gst_faac_configure_source_pad (GstFaac * faac)
+gst_faac_open_encoder (GstFaac * faac)
{
- GstCaps *srccaps;
- gboolean ret = FALSE;
+ faacEncHandle *handle;
faacEncConfiguration *conf;
guint maxbitrate;
+ gulong samples, bytes;
- /* negotiate stream format */
- gst_faac_negotiate (faac);
+ g_return_val_if_fail (faac->samplerate != 0 && faac->channels != 0, FALSE);
+
+ /* clean up in case of re-configure */
+ gst_faac_close_encoder (faac);
+
+ if (!(handle = faacEncOpen (faac->samplerate, faac->channels,
+ &samples, &bytes)))
+ goto setup_failed;
+
+ /* mind channel count */
+ samples /= faac->channels;
+
+ /* record */
+ faac->handle = handle;
+ faac->samples = samples;
+ faac->bytes = bytes;
+
+ GST_DEBUG_OBJECT (faac, "faac needs samples %d, output size %d",
+ faac->samples, faac->bytes);
/* we negotiated caps update current configuration */
conf = faacEncGetCurrentConfiguration (faac->handle);
@@ -539,7 +538,7 @@ gst_faac_configure_source_pad (GstFaac * faac)
conf->bandWidth = 0;
if (!faacEncSetConfiguration (faac->handle, conf))
- goto set_failed;
+ goto setup_failed;
/* let's see what really happened,
* note that this may not really match desired rate */
@@ -548,6 +547,28 @@ gst_faac_configure_source_pad (GstFaac * faac)
GST_DEBUG_OBJECT (faac, "quantization quality: %ld", conf->quantqual);
GST_DEBUG_OBJECT (faac, "bandwidth: %d Hz", conf->bandWidth);
+ return TRUE;
+
+ /* ERRORS */
+setup_failed:
+ {
+ GST_ELEMENT_ERROR (faac, LIBRARY, SETTINGS, (NULL), (NULL));
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_faac_configure_source_pad (GstFaac * faac)
+{
+ GstCaps *srccaps;
+ gboolean ret;
+
+ /* negotiate stream format */
+ gst_faac_negotiate (faac);
+
+ if (!gst_faac_open_encoder (faac))
+ goto set_failed;
+
/* now create a caps for it all */
srccaps = gst_caps_new_simple ("audio/mpeg",
"mpegversion", G_TYPE_INT, faac->mpegversion,
@@ -665,6 +686,13 @@ gst_faac_handle_frame (GstAudioEncoder * enc, GstBuffer * in_buf)
} else {
gst_buffer_unmap (out_buf, out_data, 0);
gst_buffer_unref (out_buf);
+ /* re-create encoder after final flush */
+ if (!in_buf) {
+ GST_DEBUG_OBJECT (faac, "flushed; recreating encoder");
+ gst_faac_close_encoder (faac);
+ if (!gst_faac_open_encoder (faac))
+ ret = GST_FLOW_ERROR;
+ }
}
return ret;
diff --git a/ext/faac/gstfaac.h b/ext/faac/gstfaac.h
index 5bd057493..029e4fd05 100644
--- a/ext/faac/gstfaac.h
+++ b/ext/faac/gstfaac.h
@@ -51,9 +51,9 @@ struct _GstFaac {
bps;
/* input frame size */
- gulong samples;
+ gint samples;
/* required output buffer size */
- gulong bytes;
+ gint bytes;
/* negotiated */
gint mpegversion, outputformat;
diff --git a/ext/kate/gstkatetiger.c b/ext/kate/gstkatetiger.c
index f618b2e10..1504e112b 100644
--- a/ext/kate/gstkatetiger.c
+++ b/ext/kate/gstkatetiger.c
@@ -3,7 +3,6 @@
* Copyright 2005 Thomas Vander Stichele <thomas@apestaart.org>
* Copyright 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
* Copyright 2008 Vincent Penquerc'h <ogg.k.ogg.k@googlemail.com>
- * Copyright (C) <2009> Young-Ho Cha <ganadist@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
@@ -126,39 +125,6 @@ enum
ARG_SILENT
};
-/* RGB -> YUV blitting routines taken from textoverlay,
- original code from Young-Ho Cha <ganadist@gmail.com> */
-
-#define COMP_Y(ret, r, g, b) \
-{ \
- ret = (int) (((19595 * r) >> 16) + ((38470 * g) >> 16) + ((7471 * b) >> 16)); \
- ret = CLAMP (ret, 0, 255); \
-}
-
-#define COMP_U(ret, r, g, b) \
-{ \
- ret = (int) (-((11059 * r) >> 16) - ((21709 * g) >> 16) + ((32768 * b) >> 16) + 128); \
- ret = CLAMP (ret, 0, 255); \
-}
-
-#define COMP_V(ret, r, g, b) \
-{ \
- ret = (int) (((32768 * r) >> 16) - ((27439 * g) >> 16) - ((5329 * b) >> 16) + 128); \
- ret = CLAMP (ret, 0, 255); \
-}
-
-#define BLEND(ret, alpha, v0, v1) \
-{ \
- ret = (v0 * alpha + v1 * (255 - alpha)) / 255; \
-}
-
-#define OVER(ret, alphaA, Ca, alphaB, Cb, alphaNew) \
-{ \
- gint _tmp; \
- _tmp = (Ca * alphaA + Cb * alphaB * (255 - alphaA) / 255) / alphaNew; \
- ret = CLAMP (_tmp, 0, 255); \
-}
-
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
# define TIGER_ARGB_A 3
# define TIGER_ARGB_R 2
@@ -187,11 +153,16 @@ static GstStaticPadTemplate kate_sink_factory =
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
#define TIGER_VIDEO_CAPS \
GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_BGRx ";" \
- GST_VIDEO_CAPS_YUV ("{AYUV, I420, YV12, UYVY, NV12, NV21}")
+ GST_VIDEO_CAPS_YUV ("{I420, YV12, AYUV, YUY2, UYVY, v308, v210," \
+ " v216, Y41B, Y42B, Y444, Y800, Y16, NV12, NV21, UYVP, A420," \
+ " YUV9, IYU1}")
+
#else
#define TIGER_VIDEO_CAPS \
GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xRGB ";" \
- GST_VIDEO_CAPS_YUV ("{AYUV, I420, YV12, UYVY, NV12, NV21}")
+ GST_VIDEO_CAPS_YUV ("{I420, YV12, AYUV, YUY2, UYVY, v308, v210," \
+ " v216, Y41B, Y42B, Y444, Y800, Y16, NV12, NV21, UYVP, A420," \
+ " YUV9, IYU1}")
#endif
static GstStaticPadTemplate video_sink_factory =
@@ -417,6 +388,8 @@ gst_kate_tiger_init (GstKateTiger * tiger, GstKateTigerClass * gclass)
tiger->video_width = 0;
tiger->video_height = 0;
+ tiger->composition = NULL;
+
tiger->seen_header = FALSE;
}
@@ -432,8 +405,10 @@ gst_kate_tiger_dispose (GObject * object)
tiger->default_font_desc = NULL;
}
- g_free (tiger->render_buffer);
- tiger->render_buffer = NULL;
+ if (tiger->render_buffer) {
+ gst_buffer_unref (tiger->render_buffer);
+ tiger->render_buffer = NULL;
+ }
g_cond_free (tiger->cond);
tiger->cond = NULL;
@@ -441,6 +416,11 @@ gst_kate_tiger_dispose (GObject * object)
g_mutex_free (tiger->mutex);
tiger->mutex = NULL;
+ if (tiger->composition) {
+ gst_video_overlay_composition_unref (tiger->composition);
+ tiger->composition = NULL;
+ }
+
GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
}
@@ -789,404 +769,44 @@ gst_kate_tiger_get_time (GstKateTiger * tiger)
}
static inline void
-gst_kate_tiger_blit_1 (GstKateTiger * tiger, guchar * dest, gint xpos,
- gint ypos, const guint8 * image, gint image_width, gint image_height,
- guint dest_stride)
+gst_kate_tiger_set_composition (GstKateTiger * tiger)
{
- gint i, j = 0;
- gint x, y;
- guchar r, g, b, a;
- const guint8 *pimage;
- guchar *py;
- gint width = image_width;
- gint height = image_height;
-
- if (xpos < 0) {
- xpos = 0;
- }
-
- if (xpos + width > tiger->video_width) {
- width = tiger->video_width - xpos;
- }
-
- if (ypos + height > tiger->video_height) {
- height = tiger->video_height - ypos;
- }
-
- dest += (ypos / 1) * dest_stride;
-
- for (i = 0; i < height; i++) {
- pimage = image + 4 * (i * image_width);
- py = dest + i * dest_stride + xpos;
- for (j = 0; j < width; j++) {
- b = pimage[TIGER_ARGB_B];
- g = pimage[TIGER_ARGB_G];
- r = pimage[TIGER_ARGB_R];
- a = pimage[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a, r, g, b);
-
- pimage += 4;
- if (a == 0) {
- py++;
- continue;
- }
- COMP_Y (y, r, g, b);
- x = *py;
- BLEND (*py++, a, y, x);
- }
+ GstVideoOverlayRectangle *rectangle;
+
+ if (tiger->render_buffer) {
+ rectangle = gst_video_overlay_rectangle_new_argb (tiger->render_buffer,
+ tiger->video_width, tiger->video_height, 4 * tiger->video_width,
+ 0, 0, tiger->video_width, tiger->video_height,
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
+
+ if (tiger->composition)
+ gst_video_overlay_composition_unref (tiger->composition);
+ tiger->composition = gst_video_overlay_composition_new (rectangle);
+ gst_video_overlay_rectangle_unref (rectangle);
+
+ } else if (tiger->composition) {
+ gst_video_overlay_composition_unref (tiger->composition);
+ tiger->composition = NULL;
}
}
static inline void
-gst_kate_tiger_blit_sub2x2cbcr (GstKateTiger * tiger,
- guchar * destcb, guchar * destcr, gint xpos, gint ypos,
- const guint8 * image, gint image_width, gint image_height,
- guint destcb_stride, guint destcr_stride, guint pix_stride)
+gst_kate_tiger_unpremultiply (GstKateTiger * tiger)
{
- gint i, j;
- gint x, cb, cr;
- gushort r, g, b, a;
- gushort r1, g1, b1, a1;
- const guint8 *pimage1, *pimage2;
- guchar *pcb, *pcr;
- gint width = image_width - 2;
- gint height = image_height - 2;
-
- xpos *= pix_stride;
-
- if (xpos < 0) {
- xpos = 0;
- }
-
- if (xpos + width > tiger->video_width) {
- width = tiger->video_width - xpos;
- }
-
- if (ypos + height > tiger->video_height) {
- height = tiger->video_height - ypos;
- }
-
- destcb += (ypos / 2) * destcb_stride;
- destcr += (ypos / 2) * destcr_stride;
-
- for (i = 0; i < height; i += 2) {
- pimage1 = image + 4 * (i * image_width);
- pimage2 = pimage1 + 4 * image_width;
- pcb = destcb + (i / 2) * destcb_stride + xpos / 2;
- pcr = destcr + (i / 2) * destcr_stride + xpos / 2;
- for (j = 0; j < width; j += 2) {
- b = pimage1[TIGER_ARGB_B];
- g = pimage1[TIGER_ARGB_G];
- r = pimage1[TIGER_ARGB_R];
- a = pimage1[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a, r, g, b);
- pimage1 += 4;
-
- b1 = pimage1[TIGER_ARGB_B];
- g1 = pimage1[TIGER_ARGB_G];
- r1 = pimage1[TIGER_ARGB_R];
- a1 = pimage1[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a1, r1, g1, b1);
- b += b1;
- g += g1;
- r += r1;
- a += a1;
- pimage1 += 4;
-
- b1 = pimage2[TIGER_ARGB_B];
- g1 = pimage2[TIGER_ARGB_G];
- r1 = pimage2[TIGER_ARGB_R];
- a1 = pimage2[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a1, r1, g1, b1);
- b += b1;
- g += g1;
- r += r1;
- a += a1;
- pimage2 += 4;
-
- /* + 2 for rounding */
- b1 = pimage2[TIGER_ARGB_B];
- g1 = pimage2[TIGER_ARGB_G];
- r1 = pimage2[TIGER_ARGB_R];
- a1 = pimage2[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a1, r1, g1, b1);
- b += b1 + 2;
- g += g1 + 2;
- r += r1 + 2;
- a += a1 + 2;
- pimage2 += 4;
-
- b /= 4;
- g /= 4;
- r /= 4;
- a /= 4;
-
- if (a == 0) {
- pcb += pix_stride;
- pcr += pix_stride;
- continue;
- }
- COMP_U (cb, r, g, b);
- COMP_V (cr, r, g, b);
+ guint i, j;
+ guint8 *pimage, *text_image = GST_BUFFER_DATA (tiger->render_buffer);
- x = *pcb;
- BLEND (*pcb, a, cb, x);
- x = *pcr;
- BLEND (*pcr, a, cr, x);
-
- pcb += pix_stride;
- pcr += pix_stride;
- }
- }
-}
-
-/* FIXME:
- * - use proper strides and offset for I420
- */
-
-static inline void
-gst_kate_tiger_blit_NV12_NV21 (GstKateTiger * tiger,
- guint8 * yuv_pixels, gint xpos, gint ypos, const guint8 * image,
- gint image_width, gint image_height)
-{
- int y_stride, uv_stride;
- int u_offset, v_offset;
- int h, w;
-
- /* because U/V is 2x2 subsampled, we need to round, either up or down,
- * to a boundary of integer number of U/V pixels:
- */
- xpos = GST_ROUND_UP_2 (xpos);
- ypos = GST_ROUND_UP_2 (ypos);
-
- w = tiger->video_width;
- h = tiger->video_height;
-
- y_stride = gst_video_format_get_row_stride (tiger->video_format, 0, w);
- uv_stride = gst_video_format_get_row_stride (tiger->video_format, 1, w);
- u_offset =
- gst_video_format_get_component_offset (tiger->video_format, 1, w, h);
- v_offset =
- gst_video_format_get_component_offset (tiger->video_format, 2, w, h);
-
- gst_kate_tiger_blit_1 (tiger, yuv_pixels, xpos, ypos, image, image_width,
- image_height, y_stride);
- gst_kate_tiger_blit_sub2x2cbcr (tiger, yuv_pixels + u_offset,
- yuv_pixels + v_offset, xpos, ypos, image, image_width, image_height,
- uv_stride, uv_stride, 2);
-}
-
-static inline void
-gst_kate_tiger_blit_I420_YV12 (GstKateTiger * tiger,
- guint8 * yuv_pixels, gint xpos, gint ypos, const guint8 * image,
- gint image_width, gint image_height)
-{
- int y_stride, u_stride, v_stride;
- int u_offset, v_offset;
- int h, w;
-
- /* because U/V is 2x2 subsampled, we need to round, either up or down,
- * to a boundary of integer number of U/V pixels:
- */
- xpos = GST_ROUND_UP_2 (xpos);
- ypos = GST_ROUND_UP_2 (ypos);
-
- w = tiger->video_width;
- h = tiger->video_height;
-
- y_stride = gst_video_format_get_row_stride (tiger->video_format, 0, w);
- u_stride = gst_video_format_get_row_stride (tiger->video_format, 1, w);
- v_stride = gst_video_format_get_row_stride (tiger->video_format, 2, w);
- u_offset =
- gst_video_format_get_component_offset (tiger->video_format, 1, w, h);
- v_offset =
- gst_video_format_get_component_offset (tiger->video_format, 2, w, h);
-
- gst_kate_tiger_blit_1 (tiger, yuv_pixels, xpos, ypos, image, image_width,
- image_height, y_stride);
- gst_kate_tiger_blit_sub2x2cbcr (tiger, yuv_pixels + u_offset,
- yuv_pixels + v_offset, xpos, ypos, image, image_width, image_height,
- u_stride, v_stride, 1);
-}
-
-static inline void
-gst_kate_tiger_blit_UYVY (GstKateTiger * tiger,
- guint8 * yuv_pixels, gint xpos, gint ypos, const guint8 * image,
- gint image_width, gint image_height)
-{
- int a0, r0, g0, b0;
- int a1, r1, g1, b1;
- int y0, y1, u, v;
- int i, j;
- int h, w;
- const guint8 *pimage;
- guchar *dest;
-
- /* because U/V is 2x horizontally subsampled, we need to round to a
- * boundary of integer number of U/V pixels in x dimension:
- */
- xpos = GST_ROUND_UP_2 (xpos);
-
- w = image_width - 2;
- h = image_height - 2;
-
- if (xpos < 0) {
- xpos = 0;
- }
-
- if (xpos + w > tiger->video_width) {
- w = tiger->video_width - xpos;
- }
-
- if (ypos + h > tiger->video_height) {
- h = tiger->video_height - ypos;
- }
-
- for (i = 0; i < h; i++) {
- pimage = image + i * image_width * 4;
- dest = yuv_pixels + (i + ypos) * tiger->video_width * 2 + xpos * 2;
- for (j = 0; j < w; j += 2) {
- b0 = pimage[TIGER_ARGB_B];
- g0 = pimage[TIGER_ARGB_G];
- r0 = pimage[TIGER_ARGB_R];
- a0 = pimage[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a0, r0, g0, b0);
- pimage += 4;
-
- b1 = pimage[TIGER_ARGB_B];
- g1 = pimage[TIGER_ARGB_G];
- r1 = pimage[TIGER_ARGB_R];
- a1 = pimage[TIGER_ARGB_A];
- TIGER_UNPREMULTIPLY (a1, r1, g1, b1);
- pimage += 4;
-
- a0 += a1 + 2;
- a0 /= 2;
- if (a0 == 0) {
- dest += 4;
- continue;
- }
-
- COMP_Y (y0, r0, g0, b0);
- COMP_Y (y1, r1, g1, b1);
-
- b0 += b1 + 2;
- g0 += g1 + 2;
- r0 += r1 + 2;
-
- b0 /= 2;
- g0 /= 2;
- r0 /= 2;
-
- COMP_U (u, r0, g0, b0);
- COMP_V (v, r0, g0, b0);
-
- BLEND (*dest, a0, u, *dest);
- dest++;
- BLEND (*dest, a0, y0, *dest);
- dest++;
- BLEND (*dest, a0, v, *dest);
- dest++;
- BLEND (*dest, a0, y1, *dest);
- dest++;
- }
- }
-}
-
-static inline void
-gst_kate_tiger_blit_AYUV (GstKateTiger * tiger,
- guint8 * rgb_pixels, gint xpos, gint ypos, const guint8 * image,
- gint image_width, gint image_height)
-{
- int a, r, g, b, a1;
- int y, u, v;
- int i, j;
- int h, w;
- const guint8 *pimage;
- guchar *dest;
-
- w = image_width;
- h = image_height;
-
- if (xpos < 0) {
- xpos = 0;
- }
-
- if (xpos + w > tiger->video_width) {
- w = tiger->video_width - xpos;
- }
-
- if (ypos + h > tiger->video_height) {
- h = tiger->video_height - ypos;
- }
-
- for (i = 0; i < h; i++) {
- pimage = image + i * image_width * 4;
- dest = rgb_pixels + (i + ypos) * 4 * tiger->video_width + xpos * 4;
- for (j = 0; j < w; j++) {
- a = pimage[TIGER_ARGB_A];
- b = pimage[TIGER_ARGB_B];
- g = pimage[TIGER_ARGB_G];
- r = pimage[TIGER_ARGB_R];
-
- TIGER_UNPREMULTIPLY (a, r, g, b);
-
- // convert background to yuv
- COMP_Y (y, r, g, b);
- COMP_U (u, r, g, b);
- COMP_V (v, r, g, b);
-
- // preform text "OVER" background alpha compositing
- a1 = a + (dest[0] * (255 - a)) / 255 + 1; // add 1 to prevent divide by 0
- OVER (dest[1], a, y, dest[0], dest[1], a1);
- OVER (dest[2], a, u, dest[0], dest[2], a1);
- OVER (dest[3], a, v, dest[0], dest[3], a1);
- dest[0] = a1 - 1; // remove the temporary 1 we added
+ for (i = 0; i < tiger->video_height; i++) {
+ pimage = text_image + 4 * (i * tiger->video_width);
+ for (j = 0; j < tiger->video_width; j++) {
+ TIGER_UNPREMULTIPLY (pimage[TIGER_ARGB_A], pimage[TIGER_ARGB_R],
+ pimage[TIGER_ARGB_G], pimage[TIGER_ARGB_B]);
pimage += 4;
- dest += 4;
}
}
}
-static void
-gst_kate_tiger_blend_yuv (GstKateTiger * tiger, GstBuffer * video_frame,
- const guint8 * image, gint image_width, gint image_height)
-{
- gint xpos = 0, ypos = 0;
- gint width, height;
-
- width = image_width;
- height = image_height;
-
- switch (tiger->video_format) {
- case GST_VIDEO_FORMAT_I420:
- case GST_VIDEO_FORMAT_YV12:
- gst_kate_tiger_blit_I420_YV12 (tiger,
- GST_BUFFER_DATA (video_frame), xpos, ypos, image, image_width,
- image_height);
- break;
- case GST_VIDEO_FORMAT_NV12:
- case GST_VIDEO_FORMAT_NV21:
- gst_kate_tiger_blit_NV12_NV21 (tiger,
- GST_BUFFER_DATA (video_frame), xpos, ypos, image, image_width,
- image_height);
- break;
- case GST_VIDEO_FORMAT_UYVY:
- gst_kate_tiger_blit_UYVY (tiger,
- GST_BUFFER_DATA (video_frame), xpos, ypos, image, image_width,
- image_height);
- break;
- case GST_VIDEO_FORMAT_AYUV:
- gst_kate_tiger_blit_AYUV (tiger,
- GST_BUFFER_DATA (video_frame), xpos, ypos, image, image_width,
- image_height);
- break;
- default:
- g_assert_not_reached ();
- }
-}
-
static GstFlowReturn
gst_kate_tiger_video_chain (GstPad * pad, GstBuffer * buf)
{
@@ -1249,14 +869,12 @@ gst_kate_tiger_video_chain (GstPad * pad, GstBuffer * buf)
/* and setup that buffer before rendering */
if (gst_video_format_is_yuv (tiger->video_format)) {
- guint8 *tmp = g_realloc (tiger->render_buffer,
- tiger->video_width * tiger->video_height * 4);
- if (!tmp) {
- GST_WARNING_OBJECT (tiger, "Failed to allocate render buffer");
- goto pass;
+ if (!tiger->render_buffer) {
+ tiger->render_buffer =
+ gst_buffer_new_and_alloc (tiger->video_width * tiger->video_height *
+ 4);
}
- tiger->render_buffer = tmp;
- ptr = tiger->render_buffer;
+ ptr = GST_BUFFER_DATA (tiger->render_buffer);
tiger_renderer_set_surface_clear_color (tiger->tr, 1, 0.0, 0.0, 0.0, 0.0);
} else {
ptr = GST_BUFFER_DATA (buf);
@@ -1278,8 +896,12 @@ gst_kate_tiger_video_chain (GstPad * pad, GstBuffer * buf)
}
if (gst_video_format_is_yuv (tiger->video_format)) {
- gst_kate_tiger_blend_yuv (tiger, buf, tiger->render_buffer,
- tiger->video_width, tiger->video_height);
+ /* As the GstVideoOverlayComposition supports only unpremultiply ARGB,
+ * we need to unpermultiply it */
+ gst_kate_tiger_unpremultiply (tiger);
+ gst_kate_tiger_set_composition (tiger);
+ if (tiger->composition)
+ gst_video_overlay_composition_blend (tiger->composition, buf);
}
pass:
diff --git a/ext/kate/gstkatetiger.h b/ext/kate/gstkatetiger.h
index f966cbf15..ad7cb1433 100644
--- a/ext/kate/gstkatetiger.h
+++ b/ext/kate/gstkatetiger.h
@@ -50,6 +50,7 @@
#include <tiger/tiger.h>
#include <gst/gst.h>
#include <gst/video/video.h>
+#include <gst/video/video-overlay-composition.h>
#include "gstkateutil.h"
G_BEGIN_DECLS
@@ -95,7 +96,8 @@ struct _GstKateTiger
gint video_width;
gint video_height;
gboolean swap_rgb;
- guint8 *render_buffer;
+ GstBuffer *render_buffer;
+ GstVideoOverlayComposition *composition;
GMutex *mutex;
GCond *cond;
diff --git a/ext/opencv/gstmotioncells.c b/ext/opencv/gstmotioncells.c
index bf2eb7f60..d300f9d31 100644
--- a/ext/opencv/gstmotioncells.c
+++ b/ext/opencv/gstmotioncells.c
@@ -194,8 +194,6 @@ gst_motion_cells_finalize (GObject * obj)
GFREE (filter->basename_datafile);
GFREE (filter->datafile_extension);
- g_mutex_free (filter->propset_mutex);
-
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
@@ -328,7 +326,6 @@ gst_motion_cells_class_init (GstMotioncellsClass * klass)
static void
gst_motion_cells_init (GstMotioncells * filter, GstMotioncellsClass * gclass)
{
- filter->propset_mutex = g_mutex_new ();
filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_setcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_motion_cells_set_caps));
@@ -398,9 +395,7 @@ gst_motion_cells_init (GstMotioncells * filter, GstMotioncellsClass * gclass)
filter->thickness = THICKNESS_DEF;
filter->datafileidx = 0;
- g_mutex_lock (filter->propset_mutex);
filter->id = motion_cells_init ();
- g_mutex_unlock (filter->propset_mutex);
}
@@ -419,28 +414,19 @@ gst_motion_cells_set_property (GObject * object, guint prop_id,
int tmpuy = -1;
int tmplx = -1;
int tmply = -1;
- GstStateChangeReturn ret;
- g_mutex_lock (filter->propset_mutex);
+ GST_OBJECT_LOCK (filter);
switch (prop_id) {
case PROP_GRID_X:
- ret = gst_element_get_state (GST_ELEMENT (filter),
- &filter->state, NULL, 250 * GST_NSECOND);
filter->gridx = g_value_get_int (value);
- if (filter->prevgridx != filter->gridx
- && ret == GST_STATE_CHANGE_SUCCESS
- && filter->state == GST_STATE_PLAYING) {
+ if (filter->prevgridx != filter->gridx && !filter->firstframe) {
filter->changed_gridx = true;
}
filter->prevgridx = filter->gridx;
break;
case PROP_GRID_Y:
- ret = gst_element_get_state (GST_ELEMENT (filter),
- &filter->state, NULL, 250 * GST_NSECOND);
filter->gridy = g_value_get_int (value);
- if (filter->prevgridy != filter->gridy
- && ret == GST_STATE_CHANGE_SUCCESS
- && filter->state == GST_STATE_PLAYING) {
+ if (filter->prevgridy != filter->gridy && !filter->firstframe) {
filter->changed_gridy = true;
}
filter->prevgridy = filter->gridy;
@@ -473,9 +459,7 @@ gst_motion_cells_set_property (GObject * object, guint prop_id,
filter->calculate_motion = g_value_get_boolean (value);
break;
case PROP_DATE:
- ret = gst_element_get_state (GST_ELEMENT (filter),
- &filter->state, NULL, 250 * GST_NSECOND);
- if (ret == GST_STATE_CHANGE_SUCCESS && filter->state == GST_STATE_PLAYING) {
+ if (!filter->firstframe) {
filter->changed_startime = true;
}
filter->starttime = g_value_get_long (value);
@@ -629,7 +613,7 @@ gst_motion_cells_set_property (GObject * object, guint prop_id,
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
- g_mutex_unlock (filter->propset_mutex);
+ GST_OBJECT_UNLOCK (filter);
}
static void
@@ -640,6 +624,7 @@ gst_motion_cells_get_property (GObject * object, guint prop_id,
GString *str;
int i;
+ GST_OBJECT_LOCK (filter);
switch (prop_id) {
case PROP_GRID_X:
g_value_set_int (value, filter->gridx);
@@ -751,6 +736,7 @@ gst_motion_cells_get_property (GObject * object, guint prop_id,
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
+ GST_OBJECT_UNLOCK (filter);
}
static void
@@ -861,17 +847,17 @@ gst_motion_cells_set_caps (GstPad * pad, GstCaps * caps)
static GstFlowReturn
gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
{
-
GstMotioncells *filter;
-
filter = gst_motion_cells (GST_OBJECT_PARENT (pad));
+ GST_OBJECT_LOCK (filter);
if (filter->calculate_motion) {
double sensitivity;
int framerate, gridx, gridy, motionmaskcells_count, motionmaskcoord_count,
motioncells_count, i;
int thickness, success, motioncellsidxcnt, numberOfCells,
motioncellsnumber, cellsOfInterestNumber;
- int mincellsOfInterestNumber, motiondetect;
+ int mincellsOfInterestNumber, motiondetect, minimum_motion_frames,
+ postnomotion;
char *datafile;
bool display, changed_datafile, useAlpha;
gint64 starttime;
@@ -879,14 +865,14 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
motioncellidx *motionmaskcellsidx;
cellscolor motioncellscolor;
motioncellidx *motioncellsidx;
- g_mutex_lock (filter->propset_mutex);
buf = gst_buffer_make_writable (buf);
filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);
if (filter->firstframe) {
setPrevFrame (filter->cvImage, filter->id);
filter->firstframe = FALSE;
}
-
+ minimum_motion_frames = filter->minimum_motion_frames;
+ postnomotion = filter->postnomotion;
sensitivity = filter->sensitivity;
framerate = filter->framerate;
gridx = filter->gridx;
@@ -963,6 +949,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
motionmaskcoords, motionmaskcells_count, motionmaskcellsidx,
motioncellscolor, motioncells_count, motioncellsidx, starttime,
datafile, changed_datafile, thickness, filter->id);
+
if ((success == 1) && (filter->sent_init_error_msg == false)) {
char *initfailedreason;
int initerrorcode;
@@ -996,7 +983,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
GFREE (motionmaskcoords);
GFREE (motionmaskcellsidx);
GFREE (motioncellsidx);
- g_mutex_unlock (filter->propset_mutex);
+ GST_OBJECT_UNLOCK (filter);
return gst_pad_push (filter->srcpad, buf);
}
filter->changed_datafile = getChangedDataFile (filter->id);
@@ -1007,6 +994,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
(filter->motioncells_count) : (numberOfCells);
mincellsOfInterestNumber =
floor ((double) cellsOfInterestNumber * filter->threshold);
+ GST_OBJECT_UNLOCK (filter);
motiondetect = (motioncellsnumber >= mincellsOfInterestNumber) ? 1 : 0;
if ((motioncellsidxcnt > 0) && (motiondetect == 1)) {
char *detectedmotioncells;
@@ -1015,7 +1003,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
if (detectedmotioncells) {
filter->consecutive_motion++;
if ((filter->previous_motion == false)
- && (filter->consecutive_motion >= filter->minimum_motion_frames)) {
+ && (filter->consecutive_motion >= minimum_motion_frames)) {
GstStructure *s;
GstMessage *m;
filter->previous_motion = true;
@@ -1061,7 +1049,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
}
}
}
- if (filter->postnomotion > 0) {
+ if (postnomotion > 0) {
guint64 last_buf_timestamp = GST_BUFFER_TIMESTAMP (buf) / 1000000000l;
if ((last_buf_timestamp -
(filter->last_motion_timestamp / 1000000000l)) >=
@@ -1086,10 +1074,9 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
GFREE (motionmaskcoords);
GFREE (motionmaskcellsidx);
GFREE (motioncellsidx);
-
- g_mutex_unlock (filter->propset_mutex);
+ } else {
+ GST_OBJECT_UNLOCK (filter);
}
-
return gst_pad_push (filter->srcpad, buf);
}
diff --git a/ext/opencv/gstmotioncells.h b/ext/opencv/gstmotioncells.h
index da4e822ef..8a2930eee 100644
--- a/ext/opencv/gstmotioncells.h
+++ b/ext/opencv/gstmotioncells.h
@@ -107,7 +107,6 @@ struct _GstMotioncells
gint width, height;
//time stuff
struct timeval tv;
- GMutex *propset_mutex;
double framerate;
};
diff --git a/ext/opus/Makefile.am b/ext/opus/Makefile.am
index cb0a9b338..cdf3c30ac 100644
--- a/ext/opus/Makefile.am
+++ b/ext/opus/Makefile.am
@@ -1,6 +1,6 @@
plugin_LTLIBRARIES = libgstopus.la
-libgstopus_la_SOURCES = gstopus.c gstopusdec.c gstopusenc.c gstopusparse.c gstopusheader.c gstopuscommon.c
+libgstopus_la_SOURCES = gstopus.c gstopusdec.c gstopusenc.c gstopusparse.c gstopusheader.c gstopuscommon.c gstrtpopuspay.c gstrtpopusdepay.c
libgstopus_la_CFLAGS = \
-DGST_USE_UNSTABLE_API \
$(GST_PLUGINS_BASE_CFLAGS) \
@@ -9,10 +9,11 @@ libgstopus_la_CFLAGS = \
libgstopus_la_LIBADD = \
-lgstaudio-$(GST_MAJORMINOR) \
$(GST_PLUGINS_BASE_LIBS) -lgsttag-$(GST_MAJORMINOR) \
+ -lgstrtp-@GST_MAJORMINOR@ \
$(GST_BASE_LIBS) \
$(GST_LIBS) \
$(OPUS_LIBS)
libgstopus_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) $(LIBM)
libgstopus_la_LIBTOOLFLAGS = --tag=disable-static
-noinst_HEADERS = gstopusenc.h gstopusdec.h gstopusparse.h gstopusheader.h gstopuscommon.h
+noinst_HEADERS = gstopusenc.h gstopusdec.h gstopusparse.h gstopusheader.h gstopuscommon.h gstrtpopuspay.h gstrtpopusdepay.h
diff --git a/ext/opus/gstopus.c b/ext/opus/gstopus.c
index c5f68a131..8db6e197f 100644
--- a/ext/opus/gstopus.c
+++ b/ext/opus/gstopus.c
@@ -25,6 +25,9 @@
#include "gstopusenc.h"
#include "gstopusparse.h"
+#include "gstrtpopuspay.h"
+#include "gstrtpopusdepay.h"
+
#include <gst/tag/tag.h>
static gboolean
@@ -43,6 +46,14 @@ plugin_init (GstPlugin * plugin)
GST_TYPE_OPUS_PARSE))
return FALSE;
+ if (!gst_element_register (plugin, "rtpopusdepay", GST_RANK_NONE,
+ GST_TYPE_RTP_OPUS_DEPAY))
+ return FALSE;
+
+ if (!gst_element_register (plugin, "rtpopuspay", GST_RANK_NONE,
+ GST_TYPE_RTP_OPUS_PAY))
+ return FALSE;
+
gst_tag_register_musicbrainz_tags ();
return TRUE;
diff --git a/ext/opus/gstopuscommon.c b/ext/opus/gstopuscommon.c
index 426c5b897..dbf585a82 100644
--- a/ext/opus/gstopuscommon.c
+++ b/ext/opus/gstopuscommon.c
@@ -17,6 +17,8 @@
* Boston, MA 02111-1307, USA.
*/
+#include <stdio.h>
+#include <string.h>
#include "gstopuscommon.h"
/* http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html#x1-800004.3.9 */
@@ -86,3 +88,19 @@ const char *gst_opus_channel_names[] = {
"side right",
"none"
};
+
+void
+gst_opus_common_log_channel_mapping_table (GstElement * element,
+ GstDebugCategory * category, const char *msg, int n_channels,
+ const guint8 * table)
+{
+ char s[8 + 256 * 4] = "[ "; /* enough for 256 times "255 " at most */
+ int n;
+
+ for (n = 0; n < n_channels; ++n) {
+ size_t len = strlen (s);
+ snprintf (s + len, sizeof (s) - len, "%d ", table[n]);
+ }
+ strcat (s, "]");
+ GST_CAT_LEVEL_LOG (category, GST_LEVEL_INFO, element, "%s: %s", msg, s);
+}
diff --git a/ext/opus/gstopuscommon.h b/ext/opus/gstopuscommon.h
index 65b944e9e..1fba5650d 100644
--- a/ext/opus/gstopuscommon.h
+++ b/ext/opus/gstopuscommon.h
@@ -28,6 +28,9 @@ G_BEGIN_DECLS
extern const GstAudioChannelPosition gst_opus_channel_positions[][8];
extern const char *gst_opus_channel_names[];
+extern void gst_opus_common_log_channel_mapping_table (GstElement *element,
+ GstDebugCategory * category, const char *msg,
+ int n_channels, const guint8 *table);
G_END_DECLS
diff --git a/ext/opus/gstopusdec.c b/ext/opus/gstopusdec.c
index b060b22ae..67662456e 100644
--- a/ext/opus/gstopusdec.c
+++ b/ext/opus/gstopusdec.c
@@ -38,12 +38,11 @@
*/
#ifdef HAVE_CONFIG_H
-# include "config.h"
+#include "config.h"
#endif
#include <math.h>
#include <string.h>
-#include <gst/tag/tag.h>
#include "gstopusheader.h"
#include "gstopuscommon.h"
#include "gstopusdec.h"
@@ -57,7 +56,7 @@ GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-raw, "
"format = (string) { " GST_AUDIO_NE (S16) " }, "
- "rate = (int) { 8000, 12000, 16000, 24000, 48000 }, "
+ "rate = (int) { 48000, 24000, 16000, 12000, 8000 }, "
"channels = (int) [ 1, 8 ] ")
);
@@ -207,12 +206,32 @@ gst_opus_dec_get_r128_volume (gint16 r128_gain)
return DB_TO_LINEAR (gst_opus_dec_get_r128_gain (r128_gain));
}
+static GstCaps *
+gst_opus_dec_negotiate (GstOpusDec * dec)
+{
+ GstCaps *caps = gst_pad_get_allowed_caps (GST_AUDIO_DECODER_SRC_PAD (dec));
+ GstStructure *s;
+
+ caps = gst_caps_make_writable (caps);
+ gst_caps_truncate (caps);
+
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_fixate_field_nearest_int (s, "rate", 48000);
+ gst_structure_get_int (s, "rate", &dec->sample_rate);
+ gst_structure_fixate_field_nearest_int (s, "channels", dec->n_channels);
+ gst_structure_get_int (s, "channels", &dec->n_channels);
+
+ GST_INFO_OBJECT (dec, "Negotiated %d channels, %d Hz", dec->n_channels,
+ dec->sample_rate);
+
+ return caps;
+}
+
static GstFlowReturn
gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
{
const guint8 *data;
GstCaps *caps;
- GstStructure *s;
const GstAudioChannelPosition *pos = NULL;
g_return_val_if_fail (gst_opus_header_is_id_header (buf), GST_FLOW_ERROR);
@@ -277,16 +296,7 @@ gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
}
}
- /* negotiate width with downstream */
- caps = gst_pad_get_allowed_caps (GST_AUDIO_DECODER_SRC_PAD (dec));
- s = gst_caps_get_structure (caps, 0);
- gst_structure_fixate_field_nearest_int (s, "rate", 48000);
- gst_structure_get_int (s, "rate", &dec->sample_rate);
- gst_structure_fixate_field_nearest_int (s, "channels", dec->n_channels);
- gst_structure_get_int (s, "channels", &dec->n_channels);
-
- GST_INFO_OBJECT (dec, "Negotiated %d channels, %d Hz", dec->n_channels,
- dec->sample_rate);
+ caps = gst_opus_dec_negotiate (dec);
if (pos) {
GST_DEBUG_OBJECT (dec, "Setting channel positions on caps");
@@ -327,11 +337,36 @@ opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer)
GstBuffer *buf;
if (dec->state == NULL) {
+ /* If we did not get any headers, default to 2 channels */
+ if (dec->n_channels == 0) {
+ GstCaps *caps;
+ GST_INFO_OBJECT (dec, "No header, assuming single stream");
+ dec->n_channels = 2;
+ dec->sample_rate = 48000;
+ caps = gst_opus_dec_negotiate (dec);
+ GST_INFO_OBJECT (dec, "Setting src caps to %" GST_PTR_FORMAT, caps);
+ gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec), caps);
+ gst_caps_unref (caps);
+ /* default stereo mapping */
+ dec->channel_mapping_family = 0;
+ dec->channel_mapping[0] = 0;
+ dec->channel_mapping[1] = 1;
+ dec->n_streams = 1;
+ dec->n_stereo_streams = 1;
+ }
+
GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz",
dec->n_channels, dec->sample_rate);
- dec->state = opus_multistream_decoder_create (dec->sample_rate,
- dec->n_channels, dec->n_streams, dec->n_stereo_streams,
- dec->channel_mapping, &err);
+#ifndef GST_DISABLE_DEBUG
+ gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug,
+ "Mapping table", dec->n_channels, dec->channel_mapping);
+#endif
+
+ GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams,
+ dec->n_stereo_streams);
+ dec->state =
+ opus_multistream_decoder_create (dec->sample_rate, dec->n_channels,
+ dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err);
if (!dec->state || err != OPUS_OK)
goto creation_failed;
}
@@ -411,11 +446,11 @@ opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer)
GST_INFO_OBJECT (dec,
"Skipping %u samples (%u at 48000 Hz, %u left to skip)", skip,
scaled_skip, dec->pre_skip);
+ }
- if (gst_buffer_get_size (outbuf) == 0) {
- gst_buffer_unref (outbuf);
- outbuf = NULL;
- }
+ if (gst_buffer_get_size (outbuf) == 0) {
+ gst_buffer_unref (outbuf);
+ outbuf = NULL;
}
/* Apply gain */
diff --git a/ext/opus/gstopusenc.c b/ext/opus/gstopusenc.c
index ff9243ad4..999c0f39f 100644
--- a/ext/opus/gstopusenc.c
+++ b/ext/opus/gstopusenc.c
@@ -161,6 +161,8 @@ static void gst_opus_enc_finalize (GObject * object);
static gboolean gst_opus_enc_sink_event (GstAudioEncoder * benc,
GstEvent * event);
+static GstCaps *gst_opus_enc_sink_getcaps (GstAudioEncoder * benc,
+ GstCaps * filter);
static gboolean gst_opus_enc_setup (GstOpusEnc * enc);
static void gst_opus_enc_get_property (GObject * object, guint prop_id,
@@ -211,6 +213,7 @@ gst_opus_enc_class_init (GstOpusEncClass * klass)
base_class->set_format = GST_DEBUG_FUNCPTR (gst_opus_enc_set_format);
base_class->handle_frame = GST_DEBUG_FUNCPTR (gst_opus_enc_handle_frame);
base_class->event = GST_DEBUG_FUNCPTR (gst_opus_enc_sink_event);
+ base_class->getcaps = GST_DEBUG_FUNCPTR (gst_opus_enc_sink_getcaps);
g_object_class_install_property (gobject_class, PROP_AUDIO,
g_param_spec_boolean ("audio", "Audio or voice",
@@ -401,7 +404,50 @@ gst_opus_enc_get_frame_samples (GstOpusEnc * enc)
}
static void
-gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
+gst_opus_enc_setup_trivial_mapping (GstOpusEnc * enc, guint8 mapping[256])
+{
+ int n;
+
+ for (n = 0; n < 255; ++n)
+ mapping[n] = n;
+}
+
+static int
+gst_opus_enc_find_channel_position (GstOpusEnc * enc, const GstAudioInfo * info,
+ GstAudioChannelPosition position)
+{
+ int n;
+ for (n = 0; n < enc->n_channels; ++n) {
+ if (GST_AUDIO_INFO_POSITION (info, n) == position) {
+ return n;
+ }
+ }
+ return -1;
+}
+
+static int
+gst_opus_enc_find_channel_position_in_vorbis_order (GstOpusEnc * enc,
+ GstAudioChannelPosition position)
+{
+ int c;
+
+ for (c = 0; c < enc->n_channels; ++c) {
+ if (gst_opus_channel_positions[enc->n_channels - 1][c] == position) {
+ GST_INFO_OBJECT (enc,
+ "Channel position %s maps to index %d in Vorbis order",
+ gst_opus_channel_names[position], c);
+ return c;
+ }
+ }
+ GST_WARNING_OBJECT (enc,
+ "Channel position %s is not representable in Vorbis order",
+ gst_opus_channel_names[position]);
+ return -1;
+}
+
+static void
+gst_opus_enc_setup_channel_mappings (GstOpusEnc * enc,
+ const GstAudioInfo * info)
{
#define MAPS(idx,pos) (GST_AUDIO_INFO_POSITION (info, (idx)) == GST_AUDIO_CHANNEL_POSITION_##pos)
@@ -411,14 +457,15 @@ gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
enc->n_channels);
/* Start by setting up a default trivial mapping */
- for (n = 0; n < 255; ++n)
- enc->channel_mapping[n] = n;
+ enc->n_stereo_streams = 0;
+ gst_opus_enc_setup_trivial_mapping (enc, enc->encoding_channel_mapping);
+ gst_opus_enc_setup_trivial_mapping (enc, enc->decoding_channel_mapping);
/* For one channel, use the basic RTP mapping */
if (enc->n_channels == 1) {
GST_INFO_OBJECT (enc, "Mono, trivial RTP mapping");
enc->channel_mapping_family = 0;
- enc->channel_mapping[0] = 0;
+ /* implicit mapping for family 0 */
return;
}
@@ -428,9 +475,11 @@ gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
if (MAPS (0, FRONT_LEFT) && MAPS (1, FRONT_RIGHT)) {
GST_INFO_OBJECT (enc, "Stereo, canonical mapping");
enc->channel_mapping_family = 0;
+ enc->n_stereo_streams = 1;
/* The channel mapping is implicit for family 0, that's why we do not
attempt to create one for right/left - this will be mapped to the
Vorbis mapping below. */
+ return;
} else {
GST_DEBUG_OBJECT (enc, "Stereo, but not canonical mapping, continuing");
}
@@ -438,42 +487,115 @@ gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
/* For channels between 1 and 8, we use the Vorbis mapping if we can
find a permutation that matches it. Mono will have been taken care
- of earlier, but this code also handles it. */
+ of earlier, but this code also handles it. Same for left/right stereo.
+ There are two mappings. One maps the input channels to an ordering
+ which has the natural pairs first so they can benefit from the Opus
+ stereo channel coupling, and the other maps this ordering to the
+ Vorbis ordering. */
if (enc->n_channels >= 1 && enc->n_channels <= 8) {
+ int c0, c1, c0v, c1v;
+ int mapped;
+ gboolean positions_done[256];
+ static const GstAudioChannelPosition pairs[][2] = {
+ {GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT},
+ {GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT},
+ {GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER},
+ {GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER},
+ {GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT},
+ };
+ size_t pair;
+
GST_DEBUG_OBJECT (enc,
- "In range for the Vorbis mapping, checking channel positions");
- for (n = 0; n < enc->n_channels; ++n) {
- GstAudioChannelPosition pos = GST_AUDIO_INFO_POSITION (info, n);
- int c;
-
- GST_DEBUG_OBJECT (enc, "Channel %d has position %d (%s)", n, pos,
- gst_opus_channel_names[pos]);
- for (c = 0; c < enc->n_channels; ++c) {
- if (gst_opus_channel_positions[enc->n_channels - 1][c] == pos) {
- GST_DEBUG_OBJECT (enc, "Found in Vorbis mapping as channel %d", c);
- break;
+ "In range for the Vorbis mapping, building channel mapping tables");
+
+ enc->n_stereo_streams = 0;
+ mapped = 0;
+ for (n = 0; n < 256; ++n)
+ positions_done[n] = FALSE;
+
+ /* First, find any natural pairs, and move them to the front */
+ for (pair = 0; pair < G_N_ELEMENTS (pairs); ++pair) {
+ GstAudioChannelPosition p0 = pairs[pair][0];
+ GstAudioChannelPosition p1 = pairs[pair][1];
+ c0 = gst_opus_enc_find_channel_position (enc, info, p0);
+ c1 = gst_opus_enc_find_channel_position (enc, info, p1);
+ if (c0 >= 0 && c1 >= 0) {
+ /* We found a natural pair */
+ GST_DEBUG_OBJECT (enc, "Natural pair '%s/%s' found at %d %d",
+ gst_opus_channel_names[p0], gst_opus_channel_names[p1], c0, c1);
+ /* Find where they map in Vorbis order */
+ c0v = gst_opus_enc_find_channel_position_in_vorbis_order (enc, p0);
+ c1v = gst_opus_enc_find_channel_position_in_vorbis_order (enc, p1);
+ if (c0v < 0 || c1v < 0) {
+ GST_WARNING_OBJECT (enc,
+ "Cannot map channel positions to Vorbis order, using unknown mapping");
+ enc->channel_mapping_family = 255;
+ enc->n_stereo_streams = 0;
+ return;
}
+
+ enc->encoding_channel_mapping[mapped] = c0;
+ enc->encoding_channel_mapping[mapped + 1] = c1;
+ enc->decoding_channel_mapping[c0v] = mapped;
+ enc->decoding_channel_mapping[c1v] = mapped + 1;
+ enc->n_stereo_streams++;
+ mapped += 2;
+ positions_done[p0] = positions_done[p1] = TRUE;
}
- if (c == enc->n_channels) {
- /* We did not find that position, so use undefined */
- GST_WARNING_OBJECT (enc,
- "Position %d (%s) not found in Vorbis mapping, using unknown mapping",
- pos, gst_opus_channel_positions[pos]);
- enc->channel_mapping_family = 255;
- return;
+ }
+
+ /* Now add all other input channels as mono streams */
+ for (n = 0; n < enc->n_channels; ++n) {
+ GstAudioChannelPosition position = GST_AUDIO_INFO_POSITION (info, n);
+
+ /* if we already mapped it while searching for pairs, nothing else
+ needs to be done */
+ if (!positions_done[position]) {
+ int cv;
+ GST_DEBUG_OBJECT (enc, "Channel position %s is not mapped yet, adding",
+ gst_opus_channel_names[position]);
+ cv = gst_opus_enc_find_channel_position_in_vorbis_order (enc, position);
+ if (cv < 0) {
+ GST_WARNING_OBJECT (enc,
+ "Cannot map channel positions to Vorbis order, using unknown mapping");
+ enc->channel_mapping_family = 255;
+ enc->n_stereo_streams = 0;
+ return;
+ }
+ enc->encoding_channel_mapping[mapped] = n;
+ enc->decoding_channel_mapping[cv] = mapped;
+ mapped++;
}
- GST_DEBUG_OBJECT (enc, "Mapping output channel %d to %d (%s)", c, n,
- gst_opus_channel_names[pos]);
- enc->channel_mapping[c] = n;
}
- GST_INFO_OBJECT (enc, "Permutation found, using Vorbis mapping");
+
+#ifndef GST_DISABLE_DEBUG
+ GST_INFO_OBJECT (enc,
+ "Mapping tables built: %d channels, %d stereo streams", enc->n_channels,
+ enc->n_stereo_streams);
+ gst_opus_common_log_channel_mapping_table (GST_ELEMENT (enc), opusenc_debug,
+ "Encoding mapping table", enc->n_channels,
+ enc->encoding_channel_mapping);
+ gst_opus_common_log_channel_mapping_table (GST_ELEMENT (enc), opusenc_debug,
+ "Decoding mapping table", enc->n_channels,
+ enc->decoding_channel_mapping);
+#endif
+
enc->channel_mapping_family = 1;
return;
}
- /* For other cases, we use undefined, with the default trivial mapping */
+ /* More than 8 channels, if future mappings are added for those */
+
+ /* For other cases, we use undefined, with the default trivial mapping
+ and all mono streams */
GST_WARNING_OBJECT (enc, "Unknown mapping");
enc->channel_mapping_family = 255;
+ enc->n_stereo_streams = 0;
#undef MAPS
}
@@ -489,7 +611,7 @@ gst_opus_enc_set_format (GstAudioEncoder * benc, GstAudioInfo * info)
enc->n_channels = GST_AUDIO_INFO_CHANNELS (info);
enc->sample_rate = GST_AUDIO_INFO_RATE (info);
- gst_opus_enc_setup_channel_mapping (enc, info);
+ gst_opus_enc_setup_channel_mappings (enc, info);
GST_DEBUG_OBJECT (benc, "Setup with %d channels, %d Hz", enc->n_channels,
enc->sample_rate);
@@ -514,17 +636,24 @@ gst_opus_enc_set_format (GstAudioEncoder * benc, GstAudioInfo * info)
static gboolean
gst_opus_enc_setup (GstOpusEnc * enc)
{
- int error = OPUS_OK, n;
- guint8 trivial_mapping[256];
-
- GST_DEBUG_OBJECT (enc, "setup");
-
- for (n = 0; n < 256; ++n)
- trivial_mapping[n] = n;
+ int error = OPUS_OK;
+
+#ifndef GST_DISABLE_DEBUG
+ GST_DEBUG_OBJECT (enc,
+ "setup: %d Hz, %d channels, %d stereo streams, family %d",
+ enc->sample_rate, enc->n_channels, enc->n_stereo_streams,
+ enc->channel_mapping_family);
+ GST_INFO_OBJECT (enc, "Mapping tables built: %d channels, %d stereo streams",
+ enc->n_channels, enc->n_stereo_streams);
+ gst_opus_common_log_channel_mapping_table (GST_ELEMENT (enc), opusenc_debug,
+ "Encoding mapping table", enc->n_channels, enc->encoding_channel_mapping);
+ gst_opus_common_log_channel_mapping_table (GST_ELEMENT (enc), opusenc_debug,
+ "Decoding mapping table", enc->n_channels, enc->decoding_channel_mapping);
+#endif
- enc->state =
- opus_multistream_encoder_create (enc->sample_rate, enc->n_channels,
- enc->n_channels, 0, trivial_mapping,
+ enc->state = opus_multistream_encoder_create (enc->sample_rate,
+ enc->n_channels, enc->n_channels - enc->n_stereo_streams,
+ enc->n_stereo_streams, enc->encoding_channel_mapping,
enc->audio_or_voip ? OPUS_APPLICATION_AUDIO : OPUS_APPLICATION_VOIP,
&error);
if (!enc->state || error != OPUS_OK)
@@ -580,6 +709,75 @@ gst_opus_enc_sink_event (GstAudioEncoder * benc, GstEvent * event)
return FALSE;
}
+static GstCaps *
+gst_opus_enc_sink_getcaps (GstAudioEncoder * benc, GstCaps * filter)
+{
+ GstOpusEnc *enc;
+ GstCaps *caps;
+ GstCaps *peercaps = NULL;
+ GstCaps *intersect = NULL;
+ guint i;
+ gboolean allow_multistream;
+
+ enc = GST_OPUS_ENC (benc);
+
+ GST_DEBUG_OBJECT (enc, "sink getcaps");
+
+ peercaps = gst_pad_peer_query_caps (GST_AUDIO_ENCODER_SRC_PAD (benc), filter);
+ if (!peercaps) {
+ GST_DEBUG_OBJECT (benc, "No peercaps, returning template sink caps");
+ return
+ gst_caps_copy (gst_pad_get_pad_template_caps
+ (GST_AUDIO_ENCODER_SINK_PAD (benc)));
+ }
+
+ intersect = gst_caps_intersect (peercaps,
+ gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (benc)));
+ gst_caps_unref (peercaps);
+
+ if (gst_caps_is_empty (intersect))
+ return intersect;
+
+ allow_multistream = FALSE;
+ for (i = 0; i < gst_caps_get_size (intersect); i++) {
+ GstStructure *s = gst_caps_get_structure (intersect, i);
+ gboolean multistream;
+ if (gst_structure_get_boolean (s, "multistream", &multistream)) {
+ if (multistream) {
+ allow_multistream = TRUE;
+ }
+ } else {
+ allow_multistream = TRUE;
+ }
+ }
+
+ gst_caps_unref (intersect);
+
+ caps =
+ gst_caps_copy (gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SINK_PAD
+ (benc)));
+ if (!allow_multistream) {
+ GValue range = { 0 };
+ g_value_init (&range, GST_TYPE_INT_RANGE);
+ gst_value_set_int_range (&range, 1, 2);
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+ gst_structure_set_value (s, "channels", &range);
+ }
+ g_value_unset (&range);
+ }
+
+ if (filter) {
+ GstCaps *tmp = gst_caps_intersect_full (caps, filter,
+ GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = tmp;
+ }
+
+ GST_DEBUG_OBJECT (enc, "Returning caps: %" GST_PTR_FORMAT, caps);
+ return caps;
+}
+
static GstFlowReturn
gst_opus_enc_encode (GstOpusEnc * enc, GstBuffer * buf)
{
@@ -684,7 +882,8 @@ gst_opus_enc_handle_frame (GstAudioEncoder * benc, GstBuffer * buf)
enc->headers = NULL;
gst_opus_header_create_caps (&caps, &enc->headers, enc->n_channels,
- enc->sample_rate, enc->channel_mapping_family, enc->channel_mapping,
+ enc->n_stereo_streams, enc->sample_rate, enc->channel_mapping_family,
+ enc->decoding_channel_mapping,
gst_tag_setter_get_tag_list (GST_TAG_SETTER (enc)));
diff --git a/ext/opus/gstopusenc.h b/ext/opus/gstopusenc.h
index 8c2c3c6e8..1e39ad03d 100644
--- a/ext/opus/gstopusenc.h
+++ b/ext/opus/gstopusenc.h
@@ -79,7 +79,9 @@ struct _GstOpusEnc {
GstTagList *tags;
guint8 channel_mapping_family;
- guint8 channel_mapping[256];
+ guint8 encoding_channel_mapping[256];
+ guint8 decoding_channel_mapping[256];
+ guint8 n_stereo_streams;
};
struct _GstOpusEncClass {
diff --git a/ext/opus/gstopusheader.c b/ext/opus/gstopusheader.c
index a46c5d6f6..68826a56b 100644
--- a/ext/opus/gstopusheader.c
+++ b/ext/opus/gstopusheader.c
@@ -27,12 +27,17 @@
#include "gstopusheader.h"
static GstBuffer *
-gst_opus_enc_create_id_buffer (gint nchannels, gint sample_rate,
- guint8 channel_mapping_family, const guint8 * channel_mapping)
+gst_opus_enc_create_id_buffer (gint nchannels, gint n_stereo_streams,
+ gint sample_rate, guint8 channel_mapping_family,
+ const guint8 * channel_mapping)
{
GstBuffer *buffer;
GstByteWriter bw;
+ g_return_val_if_fail (nchannels > 0 && nchannels < 256, NULL);
+ g_return_val_if_fail (n_stereo_streams >= 0, NULL);
+ g_return_val_if_fail (n_stereo_streams <= nchannels - n_stereo_streams, NULL);
+
gst_byte_writer_init (&bw);
/* See http://wiki.xiph.org/OggOpus */
@@ -44,8 +49,8 @@ gst_opus_enc_create_id_buffer (gint nchannels, gint sample_rate,
gst_byte_writer_put_uint16_le (&bw, 0); /* output gain */
gst_byte_writer_put_uint8 (&bw, channel_mapping_family);
if (channel_mapping_family > 0) {
- gst_byte_writer_put_uint8 (&bw, nchannels);
- gst_byte_writer_put_uint8 (&bw, 0);
+ gst_byte_writer_put_uint8 (&bw, nchannels - n_stereo_streams);
+ gst_byte_writer_put_uint8 (&bw, n_stereo_streams);
gst_byte_writer_put_data (&bw, channel_mapping, nchannels);
}
@@ -145,11 +150,38 @@ void
gst_opus_header_create_caps_from_headers (GstCaps ** caps, GSList ** headers,
GstBuffer * buf1, GstBuffer * buf2)
{
+ int n_streams, family;
+ gboolean multistream;
+ guint8 *data;
+ gsize size;
+
g_return_if_fail (caps);
g_return_if_fail (headers && !*headers);
+ g_return_if_fail (gst_buffer_get_size (buf1) >= 19);
+
+ data = gst_buffer_map (buf1, &size, NULL, GST_MAP_READ);
+
+ /* work out the number of streams */
+ family = data[18];
+ if (family == 0) {
+ n_streams = 1;
+ } else {
+ /* only included in the header for family > 0 */
+ if (size >= 20)
+ n_streams = data[19];
+ else {
+ g_warning ("family > 0 but header buffer size < 20");
+ gst_buffer_unmap (buf1, data, size);
+ return;
+ }
+ }
+
+ gst_buffer_unmap (buf1, data, size);
/* mark and put on caps */
- *caps = gst_caps_from_string ("audio/x-opus");
+ multistream = n_streams > 1;
+ *caps = gst_caps_new_simple ("audio/x-opus",
+ "multistream", G_TYPE_BOOLEAN, multistream, NULL);
*caps = _gst_caps_set_buffer_array (*caps, "streamheader", buf1, buf2, NULL);
*headers = g_slist_prepend (*headers, buf2);
@@ -158,7 +190,7 @@ gst_opus_header_create_caps_from_headers (GstCaps ** caps, GSList ** headers,
void
gst_opus_header_create_caps (GstCaps ** caps, GSList ** headers, gint nchannels,
- gint sample_rate, guint8 channel_mapping_family,
+ gint n_stereo_streams, gint sample_rate, guint8 channel_mapping_family,
const guint8 * channel_mapping, const GstTagList * tags)
{
GstBuffer *buf1, *buf2;
@@ -175,7 +207,7 @@ gst_opus_header_create_caps (GstCaps ** caps, GSList ** headers, gint nchannels,
/* create header buffers */
buf1 =
- gst_opus_enc_create_id_buffer (nchannels, sample_rate,
+ gst_opus_enc_create_id_buffer (nchannels, n_stereo_streams, sample_rate,
channel_mapping_family, channel_mapping);
buf2 = gst_opus_enc_create_metadata_buffer (tags);
diff --git a/ext/opus/gstopusheader.h b/ext/opus/gstopusheader.h
index 3b2cfc265..c6278eff3 100644
--- a/ext/opus/gstopusheader.h
+++ b/ext/opus/gstopusheader.h
@@ -28,7 +28,7 @@ G_BEGIN_DECLS
extern void gst_opus_header_create_caps_from_headers (GstCaps **caps, GSList **headers,
GstBuffer *id_header, GstBuffer *comment_header);
extern void gst_opus_header_create_caps (GstCaps **caps, GSList **headers,
- gint nchannels, gint sample_rate,
+ gint nchannels, gint n_stereo_streams, gint sample_rate,
guint8 channel_mapping_family, const guint8 *channel_mapping,
const GstTagList *tags);
extern gboolean gst_opus_header_is_header (GstBuffer * buf,
diff --git a/ext/opus/gstopusparse.c b/ext/opus/gstopusparse.c
index fd7024c62..05963ca79 100644
--- a/ext/opus/gstopusparse.c
+++ b/ext/opus/gstopusparse.c
@@ -314,7 +314,7 @@ gst_opus_parse_parse_frame (GstBaseParse * base, GstBaseParseFrame * frame)
channel_mapping_family = 0;
channel_mapping[0] = 0;
channel_mapping[1] = 1;
- gst_opus_header_create_caps (&caps, &parse->headers, channels, 0,
+ gst_opus_header_create_caps (&caps, &parse->headers, channels, 1, 0,
channel_mapping_family, channel_mapping, NULL);
}
diff --git a/ext/opus/gstrtpopusdepay.c b/ext/opus/gstrtpopusdepay.c
new file mode 100644
index 000000000..f8effbb6c
--- /dev/null
+++ b/ext/opus/gstrtpopusdepay.c
@@ -0,0 +1,120 @@
+/*
+ * Opus Depayloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include "gstrtpopusdepay.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpopusdepay_debug);
+#define GST_CAT_DEFAULT (rtpopusdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_opus_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ","
+ "clock-rate = (int) 48000, "
+ "encoding-name = (string) \"X-GST-OPUS-DRAFT-SPITTKA-00\"")
+ );
+
+static GstStaticPadTemplate gst_rtp_opus_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-opus")
+ );
+
+static GstBuffer *gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload,
+ GstBuffer * buf);
+static gboolean gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+
+G_DEFINE_TYPE (GstRTPOpusDepay, gst_rtp_opus_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+
+static void
+gst_rtp_opus_depay_class_init (GstRTPOpusDepayClass * klass)
+{
+ GstRTPBaseDepayloadClass *gstbasertpdepayload_class;
+ GstElementClass *element_class;
+
+ element_class = GST_ELEMENT_CLASS (klass);
+ gstbasertpdepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_opus_depay_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_opus_depay_sink_template));
+ gst_element_class_set_details_simple (element_class,
+ "RTP Opus packet depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Opus audio from RTP packets",
+ "Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>");
+
+ gstbasertpdepayload_class->process = gst_rtp_opus_depay_process;
+ gstbasertpdepayload_class->set_caps = gst_rtp_opus_depay_setcaps;
+
+ GST_DEBUG_CATEGORY_INIT (rtpopusdepay_debug, "rtpopusdepay", 0,
+ "Opus RTP Depayloader");
+}
+
+static void
+gst_rtp_opus_depay_init (GstRTPOpusDepay * rtpopusdepay)
+{
+
+}
+
+static gboolean
+gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ gboolean ret;
+
+ srccaps = gst_caps_new_empty_simple ("audio/x-opus");
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+
+ GST_DEBUG_OBJECT (depayload,
+ "set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret);
+ gst_caps_unref (srccaps);
+
+ depayload->clock_rate = 48000;
+
+ return ret;
+}
+
+static GstBuffer *
+gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
+{
+ GstBuffer *outbuf;
+ GstRTPBuffer rtpbuf = { NULL, };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtpbuf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtpbuf);
+ gst_rtp_buffer_unmap (&rtpbuf);
+
+ return outbuf;
+}
diff --git a/ext/opus/gstrtpopusdepay.h b/ext/opus/gstrtpopusdepay.h
new file mode 100644
index 000000000..968ae52ae
--- /dev/null
+++ b/ext/opus/gstrtpopusdepay.h
@@ -0,0 +1,57 @@
+/*
+ * Opus Depayloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.eu@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_RTP_OPUS_DEPAY_H__
+#define __GST_RTP_OPUS_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS typedef struct _GstRTPOpusDepay GstRTPOpusDepay;
+typedef struct _GstRTPOpusDepayClass GstRTPOpusDepayClass;
+
+#define GST_TYPE_RTP_OPUS_DEPAY \
+ (gst_rtp_opus_depay_get_type())
+#define GST_RTP_OPUS_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_OPUS_DEPAY,GstRTPOpusDepay))
+#define GST_RTP_OPUS_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_OPUS_DEPAY,GstRTPOpusDepayClass))
+#define GST_IS_RTP_OPUS_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_OPUS_DEPAY))
+#define GST_IS_RTP_OPUS_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_OPUS_DEPAY))
+
+
+struct _GstRTPOpusDepay
+{
+ GstRTPBaseDepayload depayload;
+
+};
+
+struct _GstRTPOpusDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_opus_depay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_OPUS_DEPAY_H__ */
diff --git a/ext/opus/gstrtpopuspay.c b/ext/opus/gstrtpopuspay.c
new file mode 100644
index 000000000..5003c739f
--- /dev/null
+++ b/ext/opus/gstrtpopuspay.c
@@ -0,0 +1,137 @@
+/*
+ * Opus Payloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpopuspay.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpopuspay_debug);
+#define GST_CAT_DEFAULT (rtpopuspay_debug)
+
+
+static GstStaticPadTemplate gst_rtp_opus_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-opus, multistream = (boolean) FALSE")
+ );
+
+static GstStaticPadTemplate gst_rtp_opus_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 48000, "
+ "encoding-name = (string) \"X-GST-OPUS-DRAFT-SPITTKA-00\"")
+ );
+
+static gboolean gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+
+G_DEFINE_TYPE (GstRtpOPUSPay, gst_rtp_opus_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+
+static void
+gst_rtp_opus_pay_class_init (GstRtpOPUSPayClass * klass)
+{
+ GstRTPBasePayloadClass *gstbasertppayload_class;
+ GstElementClass *element_class;
+
+ gstbasertppayload_class = (GstRTPBasePayloadClass *) klass;
+ element_class = GST_ELEMENT_CLASS (klass);
+
+ gstbasertppayload_class->set_caps = gst_rtp_opus_pay_setcaps;
+ gstbasertppayload_class->handle_buffer = gst_rtp_opus_pay_handle_buffer;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_opus_pay_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_opus_pay_sink_template));
+
+ gst_element_class_set_details_simple (element_class,
+ "RTP Opus payloader",
+ "Codec/Payloader/Network/RTP",
+ "Puts Opus audio in RTP packets",
+ "Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpopuspay_debug, "rtpopuspay", 0,
+ "Opus RTP Payloader");
+}
+
+static void
+gst_rtp_opus_pay_init (GstRtpOPUSPay * rtpopuspay)
+{
+}
+
+static gboolean
+gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gboolean res;
+ gchar *capsstr;
+
+ capsstr = gst_caps_to_string (caps);
+
+ gst_rtp_base_payload_set_options (payload, "audio", FALSE,
+ "X-GST-OPUS-DRAFT-SPITTKA-00", 48000);
+ res =
+ gst_rtp_base_payload_set_outcaps (payload, "caps", G_TYPE_STRING, capsstr,
+ NULL);
+ g_free (capsstr);
+
+ return res;
+}
+
+static GstFlowReturn
+gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRTPBuffer rtpbuf = { NULL, };
+ GstBuffer *outbuf;
+ gsize size;
+ gpointer *data;
+
+ /* Copy data and timestamp to a new output buffer
+ * FIXME : Don't we have a convenience function for this ? */
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+ outbuf = gst_rtp_buffer_new_copy_data (data, size);
+ GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
+
+ /* Unmap and free input buffer */
+ gst_buffer_unmap (buffer, data, size);
+ gst_buffer_unref (buffer);
+
+ /* Remove marker from RTP buffer */
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtpbuf);
+ gst_rtp_buffer_set_marker (&rtpbuf, FALSE);
+ gst_rtp_buffer_unmap (&rtpbuf);
+
+ /* Push out */
+ return gst_rtp_base_payload_push (basepayload, outbuf);
+}
diff --git a/ext/opus/gstrtpopuspay.h b/ext/opus/gstrtpopuspay.h
new file mode 100644
index 000000000..81160fe2a
--- /dev/null
+++ b/ext/opus/gstrtpopuspay.h
@@ -0,0 +1,58 @@
+/*
+ * Opus Payloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.eu@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_RTP_OPUS_PAY_H__
+#define __GST_RTP_OPUS_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_OPUS_PAY \
+ (gst_rtp_opus_pay_get_type())
+#define GST_RTP_OPUS_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_OPUS_PAY,GstRtpOPUSPay))
+#define GST_RTP_OPUS_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_OPUS_PAY,GstRtpOPUSPayClass))
+#define GST_IS_RTP_OPUS_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_OPUS_PAY))
+#define GST_IS_RTP_OPUS_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_OPUS_PAY))
+
+typedef struct _GstRtpOPUSPay GstRtpOPUSPay;
+typedef struct _GstRtpOPUSPayClass GstRtpOPUSPayClass;
+
+struct _GstRtpOPUSPay
+{
+ GstRTPBasePayload payload;
+};
+
+struct _GstRtpOPUSPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_opus_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_OPUS_PAY_H__ */
diff --git a/ext/resindvd/rsndec.c b/ext/resindvd/rsndec.c
index 7db1e46eb..3abc0065c 100644
--- a/ext/resindvd/rsndec.c
+++ b/ext/resindvd/rsndec.c
@@ -277,8 +277,7 @@ rsn_dec_change_state (GstElement * element, GstStateChange transition)
new_child = gst_element_factory_make ("autoconvert", NULL);
decoder_factories = klass->get_decoder_factories (klass);
- g_object_set (G_OBJECT (new_child), "initial-identity", TRUE,
- "factories", decoder_factories, NULL);
+ g_object_set (G_OBJECT (new_child), "factories", decoder_factories, NULL);
if (new_child == NULL || !rsn_dec_set_child (self, new_child))
ret = GST_STATE_CHANGE_FAILURE;
break;
diff --git a/ext/voaacenc/gstvoaacenc.c b/ext/voaacenc/gstvoaacenc.c
index c9fa92180..a0bd9dae5 100644
--- a/ext/voaacenc/gstvoaacenc.c
+++ b/ext/voaacenc/gstvoaacenc.c
@@ -454,9 +454,6 @@ gst_voaacenc_handle_frame (GstAudioEncoder * benc, GstBuffer * buf)
g_return_val_if_fail (voaacenc->handle, GST_FLOW_NOT_NEGOTIATED);
- if (voaacenc->rate == 0 || voaacenc->channels == 0)
- goto not_negotiated;
-
/* we don't deal with squeezing remnants, so simply discard those */
if (G_UNLIKELY (buf == NULL)) {
GST_DEBUG_OBJECT (benc, "no data");
@@ -508,13 +505,6 @@ exit:
return ret;
/* ERRORS */
-not_negotiated:
- {
- GST_ELEMENT_ERROR (voaacenc, STREAM, TYPE_NOT_FOUND,
- (NULL), ("unknown type"));
- ret = GST_FLOW_NOT_NEGOTIATED;
- goto exit;
- }
encode_failed:
{
GST_ELEMENT_ERROR (voaacenc, STREAM, ENCODE, (NULL), ("encode failed"));
diff --git a/ext/voamrwbenc/gstvoamrwbenc.c b/ext/voamrwbenc/gstvoamrwbenc.c
index 4ecc5768c..4647b84ab 100644
--- a/ext/voamrwbenc/gstvoamrwbenc.c
+++ b/ext/voamrwbenc/gstvoamrwbenc.c
@@ -281,11 +281,6 @@ gst_voamrwbenc_handle_frame (GstAudioEncoder * benc, GstBuffer * buffer)
g_return_val_if_fail (amrwbenc->handle, GST_FLOW_NOT_NEGOTIATED);
- if (amrwbenc->rate == 0 || amrwbenc->channels == 0) {
- ret = GST_FLOW_NOT_NEGOTIATED;
- goto done;
- }
-
/* we don't deal with squeezing remnants, so simply discard those */
if (G_UNLIKELY (buffer == NULL)) {
GST_DEBUG_OBJECT (amrwbenc, "no data");
diff --git a/ext/xvid/gstxvidenc.c b/ext/xvid/gstxvidenc.c
index dc60d0d8c..57e9a3317 100644
--- a/ext/xvid/gstxvidenc.c
+++ b/ext/xvid/gstxvidenc.c
@@ -51,14 +51,37 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("video/x-xvid, "
+ GST_STATIC_CAPS ("video/mpeg, "
+ "mpegversion = (int) 4, "
+ "systemstream = (boolean) FALSE, "
+ "width = (int) [ 0, MAX ], "
+ "height = (int) [ 0, MAX ], "
+ "framerate = (fraction) [ 0/1, MAX ], "
+ "profile = (string) simple, "
+ "level = (string) { 0, 1, 2, 3, 4a, 5, 6 };"
+ "video/mpeg, "
+ "mpegversion = (int) 4, "
+ "systemstream = (boolean) FALSE, "
+ "width = (int) [ 0, MAX ], "
+ "height = (int) [ 0, MAX ], "
+ "framerate = (fraction) [ 0/1, MAX ], "
+ "profile = (string) advanced-real-time-simple, "
+ "level = (string) { 1, 2, 3, 4 };"
+ "video/mpeg, "
+ "mpegversion = (int) 4, "
+ "systemstream = (boolean) FALSE, "
"width = (int) [ 0, MAX ], "
- "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ]; "
+ "height = (int) [ 0, MAX ], "
+ "framerate = (fraction) [ 0/1, MAX ], "
+ "profile = (string) advanced-simple, "
+ "level = (string) { 0, 1, 2, 3, 4 };"
"video/mpeg, "
"mpegversion = (int) 4, "
"systemstream = (boolean) FALSE, "
+ "width = (int) [ 0, MAX ], " "height = (int) [ 0, MAX ]; "
+ "video/x-xvid, "
"width = (int) [ 0, MAX ], "
- "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ]")
+ "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ];")
);
@@ -106,6 +129,9 @@ gst_xvidenc_profile_get_type (void)
{XVID_PROFILE_S_L1, "S_L1", "Simple profile, L1"},
{XVID_PROFILE_S_L2, "S_L2", "Simple profile, L2"},
{XVID_PROFILE_S_L3, "S_L3", "Simple profile, L3"},
+ {XVID_PROFILE_S_L4a, "S_L4a", "Simple profile, L4a"},
+ {XVID_PROFILE_S_L5, "S_L5", "Simple profile, L5"},
+ {XVID_PROFILE_S_L6, "S_L6", "Simple profile, L6"},
{XVID_PROFILE_ARTS_L1, "ARTS_L1",
"Advanced real-time simple profile, L1"},
{XVID_PROFILE_ARTS_L2, "ARTS_L2",
@@ -578,11 +604,97 @@ gst_xvidenc_setup (GstXvidEnc * xvidenc)
xvid_enc_create_t xenc;
xvid_enc_plugin_t xplugin[2];
gint ret;
+ GstCaps *allowed_caps;
+ gint profile = -1;
+
+ /* Negotiate profile/level with downstream */
+ allowed_caps = gst_pad_get_allowed_caps (xvidenc->srcpad);
+ if (allowed_caps && !gst_caps_is_empty (allowed_caps)) {
+ const gchar *profile_str, *level_str;
+
+ allowed_caps = gst_caps_make_writable (allowed_caps);
+ gst_caps_truncate (allowed_caps);
+
+ profile_str =
+ gst_structure_get_string (gst_caps_get_structure (allowed_caps, 0),
+ "profile");
+ level_str =
+ gst_structure_get_string (gst_caps_get_structure (allowed_caps, 0),
+ "level");
+ if (profile_str) {
+ if (g_str_equal (profile_str, "simple")) {
+ if (!level_str) {
+ profile = XVID_PROFILE_S_L0;
+ } else if (g_str_equal (level_str, "0")) {
+ profile = XVID_PROFILE_S_L0;
+ } else if (g_str_equal (level_str, "1")) {
+ profile = XVID_PROFILE_S_L1;
+ } else if (g_str_equal (level_str, "2")) {
+ profile = XVID_PROFILE_S_L2;
+ } else if (g_str_equal (level_str, "3")) {
+ profile = XVID_PROFILE_S_L3;
+ } else if (g_str_equal (level_str, "4a")) {
+ profile = XVID_PROFILE_S_L4a;
+ } else if (g_str_equal (level_str, "5")) {
+ profile = XVID_PROFILE_S_L5;
+ } else if (g_str_equal (level_str, "6")) {
+ profile = XVID_PROFILE_S_L6;
+ } else {
+ GST_ERROR_OBJECT (xvidenc,
+ "Invalid profile/level combination (%s %s)", profile_str,
+ level_str);
+ }
+ } else if (g_str_equal (profile_str, "advanced-real-time-simple")) {
+ if (!level_str) {
+ profile = XVID_PROFILE_ARTS_L1;
+ } else if (g_str_equal (level_str, "1")) {
+ profile = XVID_PROFILE_ARTS_L1;
+ } else if (g_str_equal (level_str, "2")) {
+ profile = XVID_PROFILE_ARTS_L2;
+ } else if (g_str_equal (level_str, "3")) {
+ profile = XVID_PROFILE_ARTS_L3;
+ } else if (g_str_equal (level_str, "4")) {
+ profile = XVID_PROFILE_ARTS_L4;
+ } else {
+ GST_ERROR_OBJECT (xvidenc,
+ "Invalid profile/level combination (%s %s)", profile_str,
+ level_str);
+ }
+ } else if (g_str_equal (profile_str, "advanced-simple")) {
+ if (!level_str) {
+ profile = XVID_PROFILE_AS_L0;
+ } else if (g_str_equal (level_str, "0")) {
+ profile = XVID_PROFILE_AS_L0;
+ } else if (g_str_equal (level_str, "1")) {
+ profile = XVID_PROFILE_AS_L1;
+ } else if (g_str_equal (level_str, "2")) {
+ profile = XVID_PROFILE_AS_L2;
+ } else if (g_str_equal (level_str, "3")) {
+ profile = XVID_PROFILE_AS_L3;
+ } else if (g_str_equal (level_str, "4")) {
+ profile = XVID_PROFILE_AS_L4;
+ } else {
+ GST_ERROR_OBJECT (xvidenc,
+ "Invalid profile/level combination (%s %s)", profile_str,
+ level_str);
+ }
+ } else {
+ GST_ERROR_OBJECT (xvidenc, "Invalid profile (%s)", profile_str);
+ }
+ }
+ }
+ if (allowed_caps)
+ gst_caps_unref (allowed_caps);
+
+ if (profile != -1) {
+ xvidenc->profile = profile;
+ g_object_notify (G_OBJECT (xvidenc), "profile");
+ }
/* see xvid.h for the meaning of all this. */
gst_xvid_init_struct (xenc);
- xenc.profile = xvidenc->profile;
+ xenc.profile = xvidenc->used_profile = xvidenc->profile;
xenc.width = xvidenc->width;
xenc.height = xvidenc->height;
xenc.max_bframes = xvidenc->max_bframes;
@@ -783,6 +895,78 @@ gst_xvidenc_setcaps (GstPad * pad, GstCaps * vscaps)
/* just to be sure */
gst_pad_fixate_caps (xvidenc->srcpad, new_caps);
+ if (xvidenc->used_profile != 0) {
+ switch (xvidenc->used_profile) {
+ case XVID_PROFILE_S_L0:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "0", NULL);
+ break;
+ case XVID_PROFILE_S_L1:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "1", NULL);
+ break;
+ case XVID_PROFILE_S_L2:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "2", NULL);
+ break;
+ case XVID_PROFILE_S_L3:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "3", NULL);
+ break;
+ case XVID_PROFILE_S_L4a:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "4a", NULL);
+ break;
+ case XVID_PROFILE_S_L5:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "5", NULL);
+ break;
+ case XVID_PROFILE_S_L6:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
+ "level", G_TYPE_STRING, "6", NULL);
+ break;
+ case XVID_PROFILE_ARTS_L1:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-real-time-simple", "level", G_TYPE_STRING, "1", NULL);
+ break;
+ case XVID_PROFILE_ARTS_L2:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-real-time-simple", "level", G_TYPE_STRING, "2", NULL);
+ break;
+ case XVID_PROFILE_ARTS_L3:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-real-time-simple", "level", G_TYPE_STRING, "3", NULL);
+ break;
+ case XVID_PROFILE_ARTS_L4:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-real-time-simple", "level", G_TYPE_STRING, "4", NULL);
+ break;
+ case XVID_PROFILE_AS_L0:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-simple", "level", G_TYPE_STRING, "0", NULL);
+ break;
+ case XVID_PROFILE_AS_L1:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-simple", "level", G_TYPE_STRING, "1", NULL);
+ break;
+ case XVID_PROFILE_AS_L2:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-simple", "level", G_TYPE_STRING, "2", NULL);
+ break;
+ case XVID_PROFILE_AS_L3:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-simple", "level", G_TYPE_STRING, "3", NULL);
+ break;
+ case XVID_PROFILE_AS_L4:
+ gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
+ "advanced-simple", "level", G_TYPE_STRING, "4", NULL);
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ }
+
/* src pad should accept anyway */
ret = gst_pad_set_caps (xvidenc->srcpad, new_caps);
gst_caps_unref (new_caps);
diff --git a/ext/xvid/gstxvidenc.h b/ext/xvid/gstxvidenc.h
index a2f32d0a8..121c99d41 100644
--- a/ext/xvid/gstxvidenc.h
+++ b/ext/xvid/gstxvidenc.h
@@ -64,6 +64,7 @@ struct _GstXvidEnc {
/* encoding profile */
gint profile;
+ gint used_profile;
/* quantizer type; h263, MPEG */
gint quant_type;