summaryrefslogtreecommitdiff
path: root/sys
diff options
context:
space:
mode:
authorNicolas Dufresne <nicolas.dufresne@collabora.com>2021-08-19 11:40:22 -0400
committerGStreamer Marge Bot <gitlab-merge-bot@gstreamer-foundation.org>2021-08-20 19:29:53 +0000
commitad5dcfb0912a17d4f8a77c59750bcdfee399b175 (patch)
tree4abde7800c2a4971891a7f277f6531f778fad903 /sys
parent0b05b9b3e68334d94cc47129aa0648e58482733e (diff)
downloadgstreamer-plugins-bad-ad5dcfb0912a17d4f8a77c59750bcdfee399b175.tar.gz
v4l2codec: h264: Implement support for split fields
When a frame is composed of two fields, the base class now split the picture in two. In order to support this, we need to ensure that picture buffer is held in VB2 queue so that the second field get decoded into it. This also implements the new_field_picture() virtual and sets the previous request on the new picture. Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/2474>
Diffstat (limited to 'sys')
-rw-r--r--sys/v4l2codecs/gstv4l2codech264dec.c34
1 files changed, 33 insertions, 1 deletions
diff --git a/sys/v4l2codecs/gstv4l2codech264dec.c b/sys/v4l2codecs/gstv4l2codech264dec.c
index 5c912edca..bd482b585 100644
--- a/sys/v4l2codecs/gstv4l2codech264dec.c
+++ b/sys/v4l2codecs/gstv4l2codech264dec.c
@@ -1216,7 +1216,37 @@ gst_v4l2_codec_h264_dec_end_picture (GstH264Decoder * decoder,
GstH264Picture * picture)
{
GstV4l2CodecH264Dec *self = GST_V4L2_CODEC_H264_DEC (decoder);
- return gst_v4l2_codec_h264_dec_submit_bitstream (self, picture, 0);
+ guint flags = 0;
+
+ /* Hold on the output frame if this is first field of a pair */
+ if (picture->field != GST_H264_PICTURE_FIELD_FRAME && !picture->second_field)
+ flags = V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF;
+
+ return gst_v4l2_codec_h264_dec_submit_bitstream (self, picture, flags);
+}
+
+static gboolean
+gst_v4l2_codec_h264_dec_new_field_picture (GstH264Decoder * decoder,
+ const GstH264Picture * first_field, GstH264Picture * second_field)
+{
+ GstV4l2CodecH264Dec *self = GST_V4L2_CODEC_H264_DEC (decoder);
+ GstV4l2Request *request =
+ gst_h264_picture_get_user_data ((GstH264Picture *) first_field);
+
+ if (!request) {
+ GST_WARNING_OBJECT (self,
+ "First picture does not have an associated request");
+ return TRUE;
+ }
+
+ GST_DEBUG_OBJECT (self, "Assigned request %p to second field.", request);
+
+ /* Associate the previous request with the new picture so that
+ * submit_bitstream can create sub-request */
+ gst_h264_picture_set_user_data (second_field, gst_v4l2_request_ref (request),
+ (GDestroyNotify) gst_v4l2_request_unref);
+
+ return TRUE;
}
static guint
@@ -1393,6 +1423,8 @@ gst_v4l2_codec_h264_dec_subclass_init (GstV4l2CodecH264DecClass * klass,
GST_DEBUG_FUNCPTR (gst_v4l2_codec_h264_dec_decode_slice);
h264decoder_class->end_picture =
GST_DEBUG_FUNCPTR (gst_v4l2_codec_h264_dec_end_picture);
+ h264decoder_class->new_field_picture =
+ GST_DEBUG_FUNCPTR (gst_v4l2_codec_h264_dec_new_field_picture);
h264decoder_class->get_preferred_output_delay =
GST_DEBUG_FUNCPTR (gst_v4l2_codec_h264_dec_get_preferred_output_delay);