summaryrefslogtreecommitdiff
path: root/sys/d3d11/gstd3d11h264dec.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'sys/d3d11/gstd3d11h264dec.cpp')
-rw-r--r--sys/d3d11/gstd3d11h264dec.cpp993
1 files changed, 371 insertions, 622 deletions
diff --git a/sys/d3d11/gstd3d11h264dec.cpp b/sys/d3d11/gstd3d11h264dec.cpp
index 5723fa2ac..39dca6bab 100644
--- a/sys/d3d11/gstd3d11h264dec.cpp
+++ b/sys/d3d11/gstd3d11h264dec.cpp
@@ -70,6 +70,7 @@
#include <gst/codecs/gsth264decoder.h>
#include <string.h>
+#include <vector>
/* HACK: to expose dxva data structure on UWP */
#ifdef WINAPI_PARTITION_DESKTOP
@@ -82,36 +83,34 @@
GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_h264_dec_debug);
#define GST_CAT_DEFAULT gst_d3d11_h264_dec_debug
+/* *INDENT-OFF* */
+typedef struct _GstD3D11H264DecInner
+{
+ GstD3D11Device *device = nullptr;
+ GstD3D11Decoder *d3d11_decoder = nullptr;
+
+ DXVA_PicParams_H264 pic_params;
+ DXVA_Qmatrix_H264 iq_matrix;
+
+ std::vector<DXVA_Slice_H264_Short> slice_list;
+ std::vector<guint8> bitstream_buffer;
+
+ gint width = 0;
+ gint height = 0;
+ gint coded_width = 0;
+ gint coded_height = 0;
+ gint bitdepth = 0;
+ guint8 chroma_format_idc = 0;
+ GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
+ gboolean interlaced = FALSE;
+ gint max_dpb_size = 0;
+} GstD3D11H264DecInner;
+
+/* *INDENT-ON* */
typedef struct _GstD3D11H264Dec
{
GstH264Decoder parent;
-
- GstD3D11Device *device;
-
- gint width, height;
- gint coded_width, coded_height;
- gint bitdepth;
- guint8 chroma_format_idc;
- GstVideoFormat out_format;
- gboolean interlaced;
- gint max_dpb_size;
-
- /* Array of DXVA_Slice_H264_Short */
- GArray *slice_list;
-
- GstD3D11Decoder *d3d11_decoder;
-
- /* Pointing current bitstream buffer */
- gboolean bad_aligned_bitstream_buffer;
- guint written_buffer_size;
- guint remaining_buffer_size;
- guint8 *bitstream_buffer_data;
-
- DXVA_PicEntry_H264 ref_frame_list[16];
- INT field_order_cnt_list[16][2];
- USHORT frame_num_list[16];
- UINT used_for_reference_flags;
- USHORT non_existing_frame_flags;
+ GstD3D11H264DecInner *inner;
} GstD3D11H264Dec;
typedef struct _GstD3D11H264DecClass
@@ -128,7 +127,7 @@ static GstElementClass *parent_class = NULL;
static void gst_d3d11_h264_dec_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
-static void gst_d3d11_h264_dec_dispose (GObject * object);
+static void gst_d3d11_h264_dec_finalize (GObject * object);
static void gst_d3d11_h264_dec_set_context (GstElement * element,
GstContext * context);
@@ -149,8 +148,6 @@ static gboolean gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
GstVideoCodecFrame * frame, GstH264Picture * picture);
static gboolean gst_d3d11_h264_dec_new_field_picture (GstH264Decoder *
decoder, const GstH264Picture * first_field, GstH264Picture * second_field);
-static GstFlowReturn gst_d3d11_h264_dec_output_picture (GstH264Decoder *
- decoder, GstVideoCodecFrame * frame, GstH264Picture * picture);
static gboolean gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb);
static gboolean gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder,
@@ -158,8 +155,8 @@ static gboolean gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder,
GArray * ref_pic_list1);
static gboolean gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
GstH264Picture * picture);
-static gboolean gst_d3d11_h264_dec_fill_picture_params (GstD3D11H264Dec * self,
- const GstH264SliceHdr * slice_header, DXVA_PicParams_H264 * params);
+static GstFlowReturn gst_d3d11_h264_dec_output_picture (GstH264Decoder *
+ decoder, GstVideoCodecFrame * frame, GstH264Picture * picture);
static void
gst_d3d11_h264_dec_class_init (GstD3D11H264DecClass * klass, gpointer data)
@@ -171,7 +168,7 @@ gst_d3d11_h264_dec_class_init (GstD3D11H264DecClass * klass, gpointer data)
GstD3D11DecoderClassData *cdata = (GstD3D11DecoderClassData *) data;
gobject_class->get_property = gst_d3d11_h264_dec_get_property;
- gobject_class->dispose = gst_d3d11_h264_dec_dispose;
+ gobject_class->finalize = gst_d3d11_h264_dec_finalize;
element_class->set_context =
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_set_context);
@@ -208,7 +205,7 @@ gst_d3d11_h264_dec_class_init (GstD3D11H264DecClass * klass, gpointer data)
static void
gst_d3d11_h264_dec_init (GstD3D11H264Dec * self)
{
- self->slice_list = g_array_new (FALSE, TRUE, sizeof (DXVA_Slice_H264_Short));
+ self->inner = new GstD3D11H264DecInner ();
}
static void
@@ -222,27 +219,25 @@ gst_d3d11_h264_dec_get_property (GObject * object, guint prop_id,
}
static void
-gst_d3d11_h264_dec_dispose (GObject * object)
+gst_d3d11_h264_dec_finalize (GObject * object)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (object);
- if (self->slice_list) {
- g_array_unref (self->slice_list);
- self->slice_list = NULL;
- }
+ delete self->inner;
- G_OBJECT_CLASS (parent_class)->dispose (object);
+ G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_d3d11_h264_dec_set_context (GstElement * element, GstContext * context)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (element);
+ GstD3D11H264DecInner *inner = self->inner;
GstD3D11H264DecClass *klass = GST_D3D11_H264_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
gst_d3d11_handle_set_context (element, context, cdata->adapter,
- &self->device);
+ &inner->device);
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
}
@@ -251,35 +246,39 @@ gst_d3d11_h264_dec_set_context (GstElement * element, GstContext * context)
static void
gst_d3d11_h264_dec_reset (GstD3D11H264Dec * self)
{
- self->width = 0;
- self->height = 0;
- self->coded_width = 0;
- self->coded_height = 0;
- self->bitdepth = 0;
- self->chroma_format_idc = 0;
- self->out_format = GST_VIDEO_FORMAT_UNKNOWN;
- self->interlaced = FALSE;
- self->max_dpb_size = 0;
+ GstD3D11H264DecInner *inner = self->inner;
+
+ inner->width = 0;
+ inner->height = 0;
+ inner->coded_width = 0;
+ inner->coded_height = 0;
+ inner->bitdepth = 0;
+ inner->chroma_format_idc = 0;
+ inner->out_format = GST_VIDEO_FORMAT_UNKNOWN;
+ inner->interlaced = FALSE;
+ inner->max_dpb_size = 0;
}
static gboolean
gst_d3d11_h264_dec_open (GstVideoDecoder * decoder)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
GstD3D11H264DecClass *klass = GST_D3D11_H264_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
if (!gst_d3d11_ensure_element_data (GST_ELEMENT_CAST (self), cdata->adapter,
- &self->device)) {
+ &inner->device)) {
GST_ERROR_OBJECT (self, "Cannot create d3d11device");
return FALSE;
}
- self->d3d11_decoder = gst_d3d11_decoder_new (self->device);
+ inner->d3d11_decoder = gst_d3d11_decoder_new (inner->device,
+ GST_DXVA_CODEC_H264);
- if (!self->d3d11_decoder) {
+ if (!inner->d3d11_decoder) {
GST_ERROR_OBJECT (self, "Cannot create d3d11 decoder");
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->device);
return FALSE;
}
@@ -292,9 +291,10 @@ static gboolean
gst_d3d11_h264_dec_close (GstVideoDecoder * decoder)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
- gst_clear_object (&self->d3d11_decoder);
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->d3d11_decoder);
+ gst_clear_object (&inner->device);
return TRUE;
}
@@ -303,8 +303,9 @@ static gboolean
gst_d3d11_h264_dec_negotiate (GstVideoDecoder * decoder)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_negotiate (self->d3d11_decoder, decoder))
+ if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
return FALSE;
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
@@ -315,8 +316,9 @@ gst_d3d11_h264_dec_decide_allocation (GstVideoDecoder * decoder,
GstQuery * query)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_decide_allocation (self->d3d11_decoder,
+ if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder,
decoder, query)) {
return FALSE;
}
@@ -329,11 +331,12 @@ static gboolean
gst_d3d11_h264_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
- query, self->device)) {
+ query, inner->device)) {
return TRUE;
}
break;
@@ -348,15 +351,16 @@ static gboolean
gst_d3d11_h264_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, TRUE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
break;
case GST_EVENT_FLUSH_STOP:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, FALSE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
default:
break;
}
@@ -369,6 +373,7 @@ gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
const GstH264SPS * sps, gint max_dpb_size)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
gint crop_width, crop_height;
gboolean interlaced;
gboolean modified = FALSE;
@@ -383,63 +388,63 @@ gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
crop_height = sps->height;
}
- if (self->width != crop_width || self->height != crop_height ||
- self->coded_width != sps->width || self->coded_height != sps->height) {
+ if (inner->width != crop_width || inner->height != crop_height ||
+ inner->coded_width != sps->width || inner->coded_height != sps->height) {
GST_INFO_OBJECT (self, "resolution changed %dx%d (%dx%d)",
crop_width, crop_height, sps->width, sps->height);
- self->width = crop_width;
- self->height = crop_height;
- self->coded_width = sps->width;
- self->coded_height = sps->height;
+ inner->width = crop_width;
+ inner->height = crop_height;
+ inner->coded_width = sps->width;
+ inner->coded_height = sps->height;
modified = TRUE;
}
- if (self->bitdepth != sps->bit_depth_luma_minus8 + 8) {
+ if (inner->bitdepth != sps->bit_depth_luma_minus8 + 8) {
GST_INFO_OBJECT (self, "bitdepth changed");
- self->bitdepth = (guint) sps->bit_depth_luma_minus8 + 8;
+ inner->bitdepth = (guint) sps->bit_depth_luma_minus8 + 8;
modified = TRUE;
}
- if (self->chroma_format_idc != sps->chroma_format_idc) {
+ if (inner->chroma_format_idc != sps->chroma_format_idc) {
GST_INFO_OBJECT (self, "chroma format changed");
- self->chroma_format_idc = sps->chroma_format_idc;
+ inner->chroma_format_idc = sps->chroma_format_idc;
modified = TRUE;
}
interlaced = !sps->frame_mbs_only_flag;
- if (self->interlaced != interlaced) {
+ if (inner->interlaced != interlaced) {
GST_INFO_OBJECT (self, "interlaced sequence changed");
- self->interlaced = interlaced;
+ inner->interlaced = interlaced;
modified = TRUE;
}
- if (self->max_dpb_size < max_dpb_size) {
+ if (inner->max_dpb_size < max_dpb_size) {
GST_INFO_OBJECT (self, "Requires larger DPB size (%d -> %d)",
- self->max_dpb_size, max_dpb_size);
+ inner->max_dpb_size, max_dpb_size);
modified = TRUE;
}
- if (modified || !gst_d3d11_decoder_is_configured (self->d3d11_decoder)) {
+ if (modified || !gst_d3d11_decoder_is_configured (inner->d3d11_decoder)) {
GstVideoInfo info;
- self->out_format = GST_VIDEO_FORMAT_UNKNOWN;
+ inner->out_format = GST_VIDEO_FORMAT_UNKNOWN;
- if (self->bitdepth == 8) {
- if (self->chroma_format_idc == 1)
- self->out_format = GST_VIDEO_FORMAT_NV12;
+ if (inner->bitdepth == 8) {
+ if (inner->chroma_format_idc == 1)
+ inner->out_format = GST_VIDEO_FORMAT_NV12;
else {
GST_FIXME_OBJECT (self, "Could not support 8bits non-4:2:0 format");
}
}
- if (self->out_format == GST_VIDEO_FORMAT_UNKNOWN) {
+ if (inner->out_format == GST_VIDEO_FORMAT_UNKNOWN) {
GST_ERROR_OBJECT (self, "Could not support bitdepth/chroma format");
return FALSE;
}
gst_video_info_set_format (&info,
- self->out_format, self->width, self->height);
- if (self->interlaced)
+ inner->out_format, inner->width, inner->height);
+ if (inner->interlaced)
GST_VIDEO_INFO_INTERLACE_MODE (&info) = GST_VIDEO_INTERLACE_MODE_MIXED;
/* Store configured DPB size here. Then, it will be referenced later
@@ -447,9 +452,10 @@ gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
* For instance, if every configuration is same apart from DPB size and
* new DPB size is decreased, we can reuse existing decoder object.
*/
- self->max_dpb_size = max_dpb_size;
- if (!gst_d3d11_decoder_configure (self->d3d11_decoder, GST_D3D11_CODEC_H264,
- decoder->input_state, &info, self->coded_width, self->coded_height,
+ inner->max_dpb_size = max_dpb_size;
+ if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
+ decoder->input_state, &info,
+ inner->coded_width, inner->coded_height,
/* Additional 4 views margin for zero-copy rendering */
max_dpb_size + 4)) {
GST_ERROR_OBJECT (self, "Failed to create decoder");
@@ -466,262 +472,14 @@ gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
}
static gboolean
-gst_d3d11_h264_dec_get_bitstream_buffer (GstD3D11H264Dec * self)
-{
- GST_TRACE_OBJECT (self, "Getting bitstream buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM, &self->remaining_buffer_size,
- (gpointer *) & self->bitstream_buffer_data)) {
- GST_ERROR_OBJECT (self, "Faild to get bitstream buffer");
- return FALSE;
- }
-
- GST_TRACE_OBJECT (self, "Got bitstream buffer %p with size %d",
- self->bitstream_buffer_data, self->remaining_buffer_size);
- self->written_buffer_size = 0;
- if ((self->remaining_buffer_size & 127) != 0) {
- GST_WARNING_OBJECT (self,
- "The size of bitstream buffer is not 128 bytes aligned");
- self->bad_aligned_bitstream_buffer = TRUE;
- } else {
- self->bad_aligned_bitstream_buffer = FALSE;
- }
-
- return TRUE;
-}
-
-static ID3D11VideoDecoderOutputView *
-gst_d3d11_h264_dec_get_output_view_from_picture (GstD3D11H264Dec * self,
- GstH264Picture * picture, guint8 * view_id)
-{
- GstBuffer *view_buffer;
- ID3D11VideoDecoderOutputView *view;
-
- view_buffer = (GstBuffer *) gst_h264_picture_get_user_data (picture);
- if (!view_buffer) {
- GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
- return NULL;
- }
-
- view = gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
- view_buffer, view_id);
- if (!view) {
- GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
- return NULL;
- }
-
- return view;
-}
-
-static gboolean
-gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
- GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb)
-{
- GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
- ID3D11VideoDecoderOutputView *view;
- guint8 view_id = 0xff;
- GArray *dpb_array;
- GstH264SPS *sps;
- GstH264PPS *pps;
- DXVA_PicParams_H264 pic_params = { 0, };
- DXVA_Qmatrix_H264 iq_matrix = { 0, };
- guint d3d11_buffer_size = 0;
- gpointer d3d11_buffer = NULL;
- gint i, j;
-
- pps = slice->header.pps;
- g_assert (pps != NULL);
-
- sps = pps->sequence;
- g_assert (sps != NULL);
-
- view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture,
- &view_id);
- if (!view) {
- GST_ERROR_OBJECT (self, "current picture does not have output view handle");
- return FALSE;
- }
-
- GST_TRACE_OBJECT (self, "Begin frame");
- if (!gst_d3d11_decoder_begin_frame (self->d3d11_decoder, view, 0, NULL)) {
- GST_ERROR_OBJECT (self, "Failed to begin frame");
- return FALSE;
- }
-
- for (i = 0; i < 16; i++) {
- self->ref_frame_list[i].bPicEntry = 0xFF;
- self->field_order_cnt_list[i][0] = 0;
- self->field_order_cnt_list[i][1] = 0;
- self->frame_num_list[i] = 0;
- }
- self->used_for_reference_flags = 0;
- self->non_existing_frame_flags = 0;
-
- dpb_array = gst_h264_dpb_get_pictures_all (dpb);
-
- for (i = dpb_array->len - 1, j = 0; i >= 0 && j < 16; i--) {
- GstH264Picture *other = g_array_index (dpb_array, GstH264Picture *, i);
- guint8 id = 0xff;
-
- if (!GST_H264_PICTURE_IS_REF (other))
- continue;
-
- /* The second field picture will be handled differently */
- if (other->second_field)
- continue;
-
- gst_d3d11_h264_dec_get_output_view_from_picture (self, other, &id);
- self->ref_frame_list[j].Index7Bits = id;
-
- if (GST_H264_PICTURE_IS_LONG_TERM_REF (other)) {
- self->ref_frame_list[j].AssociatedFlag = 1;
- self->frame_num_list[j] = other->long_term_frame_idx;
- } else {
- self->ref_frame_list[j].AssociatedFlag = 0;
- self->frame_num_list[j] = other->frame_num;
- }
-
- switch (other->field) {
- case GST_H264_PICTURE_FIELD_TOP_FIELD:
- self->field_order_cnt_list[j][0] = other->top_field_order_cnt;
- self->used_for_reference_flags |= 0x1 << (2 * j);
- break;
- case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
- self->field_order_cnt_list[j][1] = other->bottom_field_order_cnt;
- self->used_for_reference_flags |= 0x1 << (2 * j + 1);
- break;
- default:
- self->field_order_cnt_list[j][0] = other->top_field_order_cnt;
- self->field_order_cnt_list[j][1] = other->bottom_field_order_cnt;
- self->used_for_reference_flags |= 0x3 << (2 * j);
- break;
- }
-
- if (other->other_field) {
- GstH264Picture *other_field = other->other_field;
-
- switch (other_field->field) {
- case GST_H264_PICTURE_FIELD_TOP_FIELD:
- self->field_order_cnt_list[j][0] = other_field->top_field_order_cnt;
- self->used_for_reference_flags |= 0x1 << (2 * j);
- break;
- case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
- self->field_order_cnt_list[j][1] =
- other_field->bottom_field_order_cnt;
- self->used_for_reference_flags |= 0x1 << (2 * j + 1);
- break;
- default:
- break;
- }
- }
-
- self->non_existing_frame_flags |= (other->nonexisting) << j;
- j++;
- }
-
- gst_d3d11_h264_dec_fill_picture_params (self, &slice->header, &pic_params);
-
- pic_params.CurrPic.Index7Bits = view_id;
- pic_params.RefPicFlag = GST_H264_PICTURE_IS_REF (picture);
- pic_params.frame_num = picture->frame_num;
-
- if (picture->field == GST_H264_PICTURE_FIELD_TOP_FIELD) {
- pic_params.CurrFieldOrderCnt[0] = picture->top_field_order_cnt;
- pic_params.CurrFieldOrderCnt[1] = 0;
- } else if (picture->field == GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
- pic_params.CurrFieldOrderCnt[0] = 0;
- pic_params.CurrFieldOrderCnt[1] = picture->bottom_field_order_cnt;
- } else {
- pic_params.CurrFieldOrderCnt[0] = picture->top_field_order_cnt;
- pic_params.CurrFieldOrderCnt[1] = picture->bottom_field_order_cnt;
- }
-
- memcpy (pic_params.RefFrameList, self->ref_frame_list,
- sizeof (pic_params.RefFrameList));
- memcpy (pic_params.FieldOrderCntList, self->field_order_cnt_list,
- sizeof (pic_params.FieldOrderCntList));
- memcpy (pic_params.FrameNumList, self->frame_num_list,
- sizeof (pic_params.FrameNumList));
-
- pic_params.UsedForReferenceFlags = self->used_for_reference_flags;
- pic_params.NonExistingFrameFlags = self->non_existing_frame_flags;
-
- GST_TRACE_OBJECT (self, "Getting picture param decoder buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS, &d3d11_buffer_size,
- &d3d11_buffer)) {
- GST_ERROR_OBJECT (self,
- "Failed to get decoder buffer for picture parameters");
- return FALSE;
- }
-
- memcpy (d3d11_buffer, &pic_params, sizeof (DXVA_PicParams_H264));
-
- GST_TRACE_OBJECT (self, "Release picture param decoder buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS)) {
- GST_ERROR_OBJECT (self, "Failed to release decoder buffer");
- return FALSE;
- }
-
- if (pps->pic_scaling_matrix_present_flag) {
- for (i = 0; i < 6; i++) {
- for (j = 0; j < 16; j++) {
- iq_matrix.bScalingLists4x4[i][j] = pps->scaling_lists_4x4[i][j];
- }
- }
-
- for (i = 0; i < 2; i++) {
- for (j = 0; j < 64; j++) {
- iq_matrix.bScalingLists8x8[i][j] = pps->scaling_lists_8x8[i][j];
- }
- }
- } else {
- for (i = 0; i < 6; i++) {
- for (j = 0; j < 16; j++) {
- iq_matrix.bScalingLists4x4[i][j] = sps->scaling_lists_4x4[i][j];
- }
- }
-
- for (i = 0; i < 2; i++) {
- for (j = 0; j < 64; j++) {
- iq_matrix.bScalingLists8x8[i][j] = sps->scaling_lists_8x8[i][j];
- }
- }
- }
-
- GST_TRACE_OBJECT (self, "Getting inverse quantization matrix buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX,
- &d3d11_buffer_size, &d3d11_buffer)) {
- GST_ERROR_OBJECT (self,
- "Failed to get decoder buffer for inv. quantization matrix");
- return FALSE;
- }
-
- memcpy (d3d11_buffer, &iq_matrix, sizeof (DXVA_Qmatrix_H264));
-
- GST_TRACE_OBJECT (self, "Release inverse quantization matrix buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX)) {
- GST_ERROR_OBJECT (self, "Failed to release decoder buffer");
- return FALSE;
- }
-
- g_array_unref (dpb_array);
- g_array_set_size (self->slice_list, 0);
-
- return gst_d3d11_h264_dec_get_bitstream_buffer (self);
-}
-
-static gboolean
gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
GstVideoCodecFrame * frame, GstH264Picture * picture)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
GstBuffer *view_buffer;
- view_buffer = gst_d3d11_decoder_get_output_view_buffer (self->d3d11_decoder,
+ view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
@@ -762,178 +520,28 @@ gst_d3d11_h264_dec_new_field_picture (GstH264Decoder * decoder,
return TRUE;
}
-static GstFlowReturn
-gst_d3d11_h264_dec_output_picture (GstH264Decoder * decoder,
- GstVideoCodecFrame * frame, GstH264Picture * picture)
+static ID3D11VideoDecoderOutputView *
+gst_d3d11_h264_dec_get_output_view_from_picture (GstD3D11H264Dec * self,
+ GstH264Picture * picture, guint8 * view_id)
{
- GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
- GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
GstBuffer *view_buffer;
-
- GST_LOG_OBJECT (self,
- "Outputting picture %p (poc %d)", picture, picture->pic_order_cnt);
+ ID3D11VideoDecoderOutputView *view;
view_buffer = (GstBuffer *) gst_h264_picture_get_user_data (picture);
-
if (!view_buffer) {
- GST_ERROR_OBJECT (self, "Could not get output view");
- goto error;
- }
-
- if (!gst_d3d11_decoder_process_output (self->d3d11_decoder, vdec,
- self->width, self->height, view_buffer, &frame->output_buffer)) {
- GST_ERROR_OBJECT (self, "Failed to copy buffer");
- goto error;
- }
-
- if (picture->buffer_flags != 0) {
- gboolean interlaced =
- (picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_INTERLACED) != 0;
- gboolean tff = (picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_TFF) != 0;
-
- GST_TRACE_OBJECT (self,
- "apply buffer flags 0x%x (interlaced %d, top-field-first %d)",
- picture->buffer_flags, interlaced, tff);
- GST_BUFFER_FLAG_SET (frame->output_buffer, picture->buffer_flags);
- }
-
- gst_h264_picture_unref (picture);
-
- return gst_video_decoder_finish_frame (vdec, frame);
-
-error:
- gst_video_decoder_drop_frame (vdec, frame);
- gst_h264_picture_unref (picture);
-
- return GST_FLOW_ERROR;
-}
-
-static gboolean
-gst_d3d11_h264_dec_submit_slice_data (GstD3D11H264Dec * self)
-{
- guint buffer_size;
- gpointer buffer;
- guint8 *data;
- gsize offset = 0;
- guint i;
- D3D11_VIDEO_DECODER_BUFFER_DESC buffer_desc[4];
- gboolean ret;
- DXVA_Slice_H264_Short *slice_data;
-
- if (self->slice_list->len < 1) {
- GST_WARNING_OBJECT (self, "Nothing to submit");
- return FALSE;
- }
-
- memset (buffer_desc, 0, sizeof (buffer_desc));
-
- slice_data = &g_array_index (self->slice_list, DXVA_Slice_H264_Short,
- self->slice_list->len - 1);
-
- /* DXVA2 spec is saying that written bitstream data must be 128 bytes
- * aligned if the bitstream buffer contains end of slice
- * (i.e., wBadSliceChopping == 0 or 2) */
- if (slice_data->wBadSliceChopping == 0 || slice_data->wBadSliceChopping == 2) {
- guint padding =
- MIN (GST_ROUND_UP_128 (self->written_buffer_size) -
- self->written_buffer_size, self->remaining_buffer_size);
-
- if (padding) {
- GST_TRACE_OBJECT (self,
- "Written bitstream buffer size %u is not 128 bytes aligned, "
- "add padding %u bytes", self->written_buffer_size, padding);
- memset (self->bitstream_buffer_data, 0, padding);
- self->written_buffer_size += padding;
- slice_data->SliceBytesInBuffer += padding;
- }
- }
-
- GST_TRACE_OBJECT (self, "Getting slice control buffer");
-
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL, &buffer_size, &buffer)) {
- GST_ERROR_OBJECT (self, "Couldn't get slice control buffer");
- return FALSE;
- }
-
- data = (guint8 *) buffer;
- for (i = 0; i < self->slice_list->len; i++) {
- DXVA_Slice_H264_Short *slice_data =
- &g_array_index (self->slice_list, DXVA_Slice_H264_Short, i);
-
- memcpy (data + offset, slice_data, sizeof (DXVA_Slice_H264_Short));
- offset += sizeof (DXVA_Slice_H264_Short);
- }
-
- GST_TRACE_OBJECT (self, "Release slice control buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL)) {
- GST_ERROR_OBJECT (self, "Failed to release slice control buffer");
- return FALSE;
- }
-
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM)) {
- GST_ERROR_OBJECT (self, "Failed to release bitstream buffer");
- return FALSE;
- }
-
- buffer_desc[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
- buffer_desc[0].DataOffset = 0;
- buffer_desc[0].DataSize = sizeof (DXVA_PicParams_H264);
-
- buffer_desc[1].BufferType =
- D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX;
- buffer_desc[1].DataOffset = 0;
- buffer_desc[1].DataSize = sizeof (DXVA_Qmatrix_H264);
-
- buffer_desc[2].BufferType = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
- buffer_desc[2].DataOffset = 0;
- buffer_desc[2].DataSize =
- sizeof (DXVA_Slice_H264_Short) * self->slice_list->len;
-
- if (!self->bad_aligned_bitstream_buffer
- && (self->written_buffer_size & 127) != 0) {
- GST_WARNING_OBJECT (self,
- "Written bitstream buffer size %u is not 128 bytes aligned",
- self->written_buffer_size);
- }
-
- buffer_desc[3].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
- buffer_desc[3].DataOffset = 0;
- buffer_desc[3].DataSize = self->written_buffer_size;
-
- ret = gst_d3d11_decoder_submit_decoder_buffers (self->d3d11_decoder,
- 4, buffer_desc);
-
- self->written_buffer_size = 0;
- self->bitstream_buffer_data = NULL;
- self->remaining_buffer_size = 0;
- g_array_set_size (self->slice_list, 0);
-
- return ret;
-}
-
-static gboolean
-gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
- GstH264Picture * picture)
-{
- GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
-
- GST_LOG_OBJECT (self, "end picture %p, (poc %d)",
- picture, picture->pic_order_cnt);
-
- if (!gst_d3d11_h264_dec_submit_slice_data (self)) {
- GST_ERROR_OBJECT (self, "Failed to submit slice data");
- return FALSE;
+ GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
+ return NULL;
}
- if (!gst_d3d11_decoder_end_frame (self->d3d11_decoder)) {
- GST_ERROR_OBJECT (self, "Failed to EndFrame");
- return FALSE;
+ view = gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
+ view_buffer, view_id);
+ if (!view) {
+ GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
+ return NULL;
}
- return TRUE;
+ return view;
}
static void
@@ -1020,8 +628,6 @@ gst_d3d11_h264_dec_fill_picture_params (GstD3D11H264Dec * self,
pps = slice_header->pps;
sps = pps->sequence;
- memset (params, 0, sizeof (DXVA_PicParams_H264));
-
params->MbsConsecutiveFlag = 1;
params->Reserved16Bits = 3;
params->ContinuationFlag = 1;
@@ -1038,127 +644,275 @@ gst_d3d11_h264_dec_fill_picture_params (GstD3D11H264Dec * self,
return TRUE;
}
+static inline void
+init_pic_params (DXVA_PicParams_H264 * params)
+{
+ memset (params, 0, sizeof (DXVA_PicParams_H264));
+ for (guint i = 0; i < G_N_ELEMENTS (params->RefFrameList); i++)
+ params->RefFrameList[i].bPicEntry = 0xff;
+}
+
+static gboolean
+gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
+ GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb)
+{
+ GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
+ DXVA_PicParams_H264 *pic_params = &inner->pic_params;
+ DXVA_Qmatrix_H264 *iq_matrix = &inner->iq_matrix;
+ ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
+ GArray *dpb_array;
+ GstH264PPS *pps;
+ guint i, j;
+
+ pps = slice->header.pps;
+
+ view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture,
+ &view_id);
+ if (!view) {
+ GST_ERROR_OBJECT (self, "current picture does not have output view handle");
+ return FALSE;
+ }
+
+ init_pic_params (pic_params);
+ gst_d3d11_h264_dec_fill_picture_params (self, &slice->header, pic_params);
+
+ pic_params->CurrPic.Index7Bits = view_id;
+ pic_params->RefPicFlag = GST_H264_PICTURE_IS_REF (picture);
+ pic_params->frame_num = picture->frame_num;
+
+ if (picture->field == GST_H264_PICTURE_FIELD_TOP_FIELD) {
+ pic_params->CurrFieldOrderCnt[0] = picture->top_field_order_cnt;
+ pic_params->CurrFieldOrderCnt[1] = 0;
+ } else if (picture->field == GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
+ pic_params->CurrFieldOrderCnt[0] = 0;
+ pic_params->CurrFieldOrderCnt[1] = picture->bottom_field_order_cnt;
+ } else {
+ pic_params->CurrFieldOrderCnt[0] = picture->top_field_order_cnt;
+ pic_params->CurrFieldOrderCnt[1] = picture->bottom_field_order_cnt;
+ }
+
+ dpb_array = gst_h264_dpb_get_pictures_all (dpb);
+ for (i = 0, j = 0; i < dpb_array->len && j < 16; i++) {
+ GstH264Picture *other = g_array_index (dpb_array, GstH264Picture *, i);
+ guint8 id = 0xff;
+
+ if (!GST_H264_PICTURE_IS_REF (other))
+ continue;
+
+ /* The second field picture will be handled differently */
+ if (other->second_field)
+ continue;
+
+ gst_d3d11_h264_dec_get_output_view_from_picture (self, other, &id);
+ pic_params->RefFrameList[j].Index7Bits = id;
+
+ if (GST_H264_PICTURE_IS_LONG_TERM_REF (other)) {
+ pic_params->RefFrameList[j].AssociatedFlag = 1;
+ pic_params->FrameNumList[j] = other->long_term_frame_idx;
+ } else {
+ pic_params->RefFrameList[j].AssociatedFlag = 0;
+ pic_params->FrameNumList[j] = other->frame_num;
+ }
+
+ switch (other->field) {
+ case GST_H264_PICTURE_FIELD_TOP_FIELD:
+ pic_params->FieldOrderCntList[j][0] = other->top_field_order_cnt;
+ pic_params->UsedForReferenceFlags |= 0x1 << (2 * j);
+ break;
+ case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
+ pic_params->FieldOrderCntList[j][1] = other->bottom_field_order_cnt;
+ pic_params->UsedForReferenceFlags |= 0x1 << (2 * j + 1);
+ break;
+ default:
+ pic_params->FieldOrderCntList[j][0] = other->top_field_order_cnt;
+ pic_params->FieldOrderCntList[j][1] = other->bottom_field_order_cnt;
+ pic_params->UsedForReferenceFlags |= 0x3 << (2 * j);
+ break;
+ }
+
+ if (other->other_field) {
+ GstH264Picture *other_field = other->other_field;
+
+ switch (other_field->field) {
+ case GST_H264_PICTURE_FIELD_TOP_FIELD:
+ pic_params->FieldOrderCntList[j][0] =
+ other_field->top_field_order_cnt;
+ pic_params->UsedForReferenceFlags |= 0x1 << (2 * j);
+ break;
+ case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
+ pic_params->FieldOrderCntList[j][1] =
+ other_field->bottom_field_order_cnt;
+ pic_params->UsedForReferenceFlags |= 0x1 << (2 * j + 1);
+ break;
+ default:
+ break;
+ }
+ }
+
+ pic_params->NonExistingFrameFlags |= (other->nonexisting) << j;
+ j++;
+ }
+ g_array_unref (dpb_array);
+
+ G_STATIC_ASSERT (sizeof (iq_matrix->bScalingLists4x4) ==
+ sizeof (pps->scaling_lists_4x4));
+ memcpy (iq_matrix->bScalingLists4x4, pps->scaling_lists_4x4,
+ sizeof (pps->scaling_lists_4x4));
+
+ G_STATIC_ASSERT (sizeof (iq_matrix->bScalingLists8x8[0]) ==
+ sizeof (pps->scaling_lists_8x8[0]));
+ memcpy (iq_matrix->bScalingLists8x8[0], pps->scaling_lists_8x8[0],
+ sizeof (pps->scaling_lists_8x8[0]));
+ memcpy (iq_matrix->bScalingLists8x8[1], pps->scaling_lists_8x8[1],
+ sizeof (pps->scaling_lists_8x8[1]));
+
+ inner->slice_list.resize (0);
+ inner->bitstream_buffer.resize (0);
+
+ return TRUE;
+}
+
static gboolean
gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0,
GArray * ref_pic_list1)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
+ DXVA_Slice_H264_Short dxva_slice;
+ static const guint8 start_code[] = { 0, 0, 1 };
+ const size_t start_code_size = sizeof (start_code);
- {
- guint to_write = slice->nalu.size + 3;
- gboolean is_first = TRUE;
-
- while (to_write > 0) {
- guint bytes_to_copy;
- gboolean is_last = TRUE;
- DXVA_Slice_H264_Short slice_short = { 0, };
-
- if (self->remaining_buffer_size < to_write && self->slice_list->len > 0) {
- if (!gst_d3d11_h264_dec_submit_slice_data (self)) {
- GST_ERROR_OBJECT (self, "Failed to submit bitstream buffers");
- return FALSE;
- }
-
- if (!gst_d3d11_h264_dec_get_bitstream_buffer (self)) {
- GST_ERROR_OBJECT (self, "Failed to get bitstream buffer");
- return FALSE;
- }
- }
+ dxva_slice.BSNALunitDataLocation = inner->bitstream_buffer.size ();
+ /* Includes 3 bytes start code prefix */
+ dxva_slice.SliceBytesInBuffer = slice->nalu.size + start_code_size;
+ dxva_slice.wBadSliceChopping = 0;
- /* remaining_buffer_size: the size of remaining d3d11 decoder
- * bitstream memory allowed to write more
- * written_buffer_size: the size of written bytes to this d3d11 decoder
- * bitstream memory
- * bytes_to_copy: the size of which we would write to d3d11 decoder
- * bitstream memory in this loop
- */
-
- bytes_to_copy = to_write;
-
- if (bytes_to_copy > self->remaining_buffer_size) {
- /* if the size of this slice is larger than the size of remaining d3d11
- * decoder bitstream memory, write the data up to the remaining d3d11
- * decoder bitstream memory size and the rest would be written to the
- * next d3d11 bitstream memory */
- bytes_to_copy = self->remaining_buffer_size;
- is_last = FALSE;
- }
+ inner->slice_list.push_back (dxva_slice);
- if (bytes_to_copy >= 3 && is_first) {
- /* normal case */
- self->bitstream_buffer_data[0] = 0;
- self->bitstream_buffer_data[1] = 0;
- self->bitstream_buffer_data[2] = 1;
- memcpy (self->bitstream_buffer_data + 3,
- slice->nalu.data + slice->nalu.offset, bytes_to_copy - 3);
- } else {
- /* when this nal unit date is splitted into two buffer */
- memcpy (self->bitstream_buffer_data,
- slice->nalu.data + slice->nalu.offset, bytes_to_copy);
- }
+ size_t pos = inner->bitstream_buffer.size ();
+ inner->bitstream_buffer.resize (pos + start_code_size + slice->nalu.size);
- /* For wBadSliceChopping value 0 or 1, BSNALunitDataLocation means
- * the offset of the first start code of this slice in this d3d11
- * memory buffer.
- * 1) If this is the first slice of picture, it should be zero
- * since we write start code at offset 0 (written size before this
- * slice also must be zero).
- * 2) If this is not the first slice of picture but this is the first
- * d3d11 bitstream buffer (meaning that one bitstream buffer contains
- * multiple slices), then this is the written size of buffer
- * excluding this loop.
- * And for wBadSliceChopping value 2 or 3, this should be zero by spec */
- if (is_first)
- slice_short.BSNALunitDataLocation = self->written_buffer_size;
- else
- slice_short.BSNALunitDataLocation = 0;
- slice_short.SliceBytesInBuffer = bytes_to_copy;
-
- /* wBadSliceChopping: (dxva h264 spec.)
- * 0: All bits for the slice are located within the corresponding
- * bitstream data buffer
- * 1: The bitstream data buffer contains the start of the slice,
- * but not the entire slice, because the buffer is full
- * 2: The bitstream data buffer contains the end of the slice.
- * It does not contain the start of the slice, because the start of
- * the slice was located in the previous bitstream data buffer.
- * 3: The bitstream data buffer does not contain the start of the slice
- * (because the start of the slice was located in the previous
- * bitstream data buffer), and it does not contain the end of the slice
- * (because the current bitstream data buffer is also full).
- */
- if (is_last && is_first) {
- slice_short.wBadSliceChopping = 0;
- } else if (!is_last && is_first) {
- slice_short.wBadSliceChopping = 1;
- } else if (is_last && !is_first) {
- slice_short.wBadSliceChopping = 2;
- } else {
- slice_short.wBadSliceChopping = 3;
- }
+ /* Fill start code prefix */
+ memcpy (&inner->bitstream_buffer[0] + pos, start_code, start_code_size);
- g_array_append_val (self->slice_list, slice_short);
- self->remaining_buffer_size -= bytes_to_copy;
- self->written_buffer_size += bytes_to_copy;
- self->bitstream_buffer_data += bytes_to_copy;
- is_first = FALSE;
- to_write -= bytes_to_copy;
- }
- }
+ /* Copy bitstream */
+ memcpy (&inner->bitstream_buffer[0] + pos + start_code_size,
+ slice->nalu.data + slice->nalu.offset, slice->nalu.size);
return TRUE;
}
-typedef struct
+static gboolean
+gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
+ GstH264Picture * picture)
{
- guint width;
- guint height;
-} GstD3D11H264DecResolution;
+ GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
+ ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
+ size_t bitstream_buffer_size;
+ size_t bitstream_pos;
+ GstD3D11DecodeInputStreamArgs input_args;
+
+ GST_LOG_OBJECT (self, "end picture %p, (poc %d)",
+ picture, picture->pic_order_cnt);
+
+ if (inner->bitstream_buffer.empty () || inner->slice_list.empty ()) {
+ GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
+ return FALSE;
+ }
+
+ view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture,
+ &view_id);
+ if (!view) {
+ GST_ERROR_OBJECT (self, "current picture does not have output view handle");
+ return FALSE;
+ }
+
+ memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
+
+ bitstream_pos = inner->bitstream_buffer.size ();
+ bitstream_buffer_size = GST_ROUND_UP_128 (bitstream_pos);
+
+ if (bitstream_buffer_size > bitstream_pos) {
+ size_t padding = bitstream_buffer_size - bitstream_pos;
+
+ /* As per DXVA spec, total amount of bitstream buffer size should be
+ * 128 bytes aligned. If actual data is not multiple of 128 bytes,
+ * the last slice data needs to be zero-padded */
+ inner->bitstream_buffer.resize (bitstream_buffer_size, 0);
+
+ DXVA_Slice_H264_Short & slice = inner->slice_list.back ();
+ slice.SliceBytesInBuffer += padding;
+ }
+
+ input_args.picture_params = &inner->pic_params;
+ input_args.picture_params_size = sizeof (DXVA_PicParams_H264);
+ input_args.slice_control = &inner->slice_list[0];
+ input_args.slice_control_size =
+ sizeof (DXVA_Slice_H264_Short) * inner->slice_list.size ();
+ input_args.bitstream = &inner->bitstream_buffer[0];
+ input_args.bitstream_size = inner->bitstream_buffer.size ();
+ input_args.inverse_quantization_matrix = &inner->iq_matrix;
+ input_args.inverse_quantization_matrix_size = sizeof (DXVA_Qmatrix_H264);
+
+ return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
+ view, &input_args);
+}
+
+static GstFlowReturn
+gst_d3d11_h264_dec_output_picture (GstH264Decoder * decoder,
+ GstVideoCodecFrame * frame, GstH264Picture * picture)
+{
+ GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
+ GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
+ GstBuffer *view_buffer;
+
+ GST_LOG_OBJECT (self,
+ "Outputting picture %p (poc %d)", picture, picture->pic_order_cnt);
+
+ view_buffer = (GstBuffer *) gst_h264_picture_get_user_data (picture);
+
+ if (!view_buffer) {
+ GST_ERROR_OBJECT (self, "Could not get output view");
+ goto error;
+ }
+
+ if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
+ inner->width, inner->height, view_buffer, &frame->output_buffer)) {
+ GST_ERROR_OBJECT (self, "Failed to copy buffer");
+ goto error;
+ }
+
+ if (picture->buffer_flags != 0) {
+ gboolean interlaced =
+ (picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_INTERLACED) != 0;
+ gboolean tff = (picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_TFF) != 0;
+
+ GST_TRACE_OBJECT (self,
+ "apply buffer flags 0x%x (interlaced %d, top-field-first %d)",
+ picture->buffer_flags, interlaced, tff);
+ GST_BUFFER_FLAG_SET (frame->output_buffer, picture->buffer_flags);
+ }
+
+ gst_h264_picture_unref (picture);
+
+ return gst_video_decoder_finish_frame (vdec, frame);
+
+error:
+ gst_h264_picture_unref (picture);
+ gst_video_decoder_release_frame (vdec, frame);
+
+ return GST_FLOW_ERROR;
+}
void
gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
- GstD3D11Decoder * decoder, guint rank, gboolean legacy)
+ guint rank, gboolean legacy)
{
GType type;
gchar *type_name;
@@ -1178,19 +932,14 @@ gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
(GInstanceInitFunc) gst_d3d11_h264_dec_init,
};
const GUID *supported_profile = NULL;
- /* values were taken from chromium. See supported_profile_helper.cc */
- GstD3D11H264DecResolution resolutions_to_check[] = {
- {1920, 1088}, {2560, 1440}, {3840, 2160}, {4096, 2160},
- {4096, 2304}
- };
GstCaps *sink_caps = NULL;
GstCaps *src_caps = NULL;
guint max_width = 0;
guint max_height = 0;
guint resolution;
- ret = gst_d3d11_decoder_get_supported_decoder_profile (decoder,
- GST_D3D11_CODEC_H264, GST_VIDEO_FORMAT_NV12, &supported_profile);
+ ret = gst_d3d11_decoder_get_supported_decoder_profile (device,
+ GST_DXVA_CODEC_H264, GST_VIDEO_FORMAT_NV12, &supported_profile);
if (!ret) {
GST_WARNING_OBJECT (device, "decoder profile unavailable");
@@ -1198,7 +947,7 @@ gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
}
ret =
- gst_d3d11_decoder_supports_format (decoder, supported_profile,
+ gst_d3d11_decoder_supports_format (device, supported_profile,
DXGI_FORMAT_NV12);
if (!ret) {
GST_FIXME_OBJECT (device, "device does not support NV12 format");
@@ -1208,15 +957,15 @@ gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
/* we will not check the maximum resolution for legacy devices.
* it might cause crash */
if (legacy) {
- max_width = resolutions_to_check[0].width;
- max_height = resolutions_to_check[0].height;
+ max_width = gst_dxva_resolutions[0].width;
+ max_height = gst_dxva_resolutions[0].height;
} else {
- for (i = 0; i < G_N_ELEMENTS (resolutions_to_check); i++) {
- if (gst_d3d11_decoder_supports_resolution (decoder, supported_profile,
- DXGI_FORMAT_NV12, resolutions_to_check[i].width,
- resolutions_to_check[i].height)) {
- max_width = resolutions_to_check[i].width;
- max_height = resolutions_to_check[i].height;
+ for (i = 0; i < G_N_ELEMENTS (gst_dxva_resolutions); i++) {
+ if (gst_d3d11_decoder_supports_resolution (device, supported_profile,
+ DXGI_FORMAT_NV12, gst_dxva_resolutions[i].width,
+ gst_dxva_resolutions[i].height)) {
+ max_width = gst_dxva_resolutions[i].width;
+ max_height = gst_dxva_resolutions[i].height;
GST_DEBUG_OBJECT (device,
"device support resolution %dx%d", max_width, max_height);
@@ -1249,7 +998,7 @@ gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
"height", GST_TYPE_INT_RANGE, 1, resolution, NULL);
type_info.class_data =
- gst_d3d11_decoder_class_data_new (device, GST_D3D11_CODEC_H264,
+ gst_d3d11_decoder_class_data_new (device, GST_DXVA_CODEC_H264,
sink_caps, src_caps);
type_name = g_strdup ("GstD3D11H264Dec");