summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sys/d3d11/gstd3d11av1dec.cpp359
-rw-r--r--sys/d3d11/gstd3d11av1dec.h1
-rw-r--r--sys/d3d11/gstd3d11decoder.cpp348
-rw-r--r--sys/d3d11/gstd3d11decoder.h78
-rw-r--r--sys/d3d11/gstd3d11h264dec.cpp993
-rw-r--r--sys/d3d11/gstd3d11h264dec.h1
-rw-r--r--sys/d3d11/gstd3d11h265dec.cpp1185
-rw-r--r--sys/d3d11/gstd3d11h265dec.h1
-rw-r--r--sys/d3d11/gstd3d11mpeg2dec.cpp539
-rw-r--r--sys/d3d11/gstd3d11mpeg2dec.h1
-rw-r--r--sys/d3d11/gstd3d11vp8dec.cpp463
-rw-r--r--sys/d3d11/gstd3d11vp8dec.h1
-rw-r--r--sys/d3d11/gstd3d11vp9dec.cpp487
-rw-r--r--sys/d3d11/gstd3d11vp9dec.h1
-rw-r--r--sys/d3d11/plugin.cpp41
15 files changed, 1801 insertions, 2698 deletions
diff --git a/sys/d3d11/gstd3d11av1dec.cpp b/sys/d3d11/gstd3d11av1dec.cpp
index e148dbb6c..aa1392d6a 100644
--- a/sys/d3d11/gstd3d11av1dec.cpp
+++ b/sys/d3d11/gstd3d11av1dec.cpp
@@ -40,6 +40,7 @@
#include <gst/codecs/gstav1decoder.h>
#include <string.h>
+#include <vector>
/* HACK: to expose dxva data structure on UWP */
#ifdef WINAPI_PARTITION_DESKTOP
@@ -353,26 +354,28 @@ typedef struct _GST_DXVA_Tile_AV1
/* reference list 8 + 4 margin */
#define NUM_OUTPUT_VIEW 12
-typedef struct _GstD3D11AV1Dec
+/* *INDENT-OFF* */
+typedef struct _GstD3D11AV1DecInner
{
- GstAV1Decoder parent;
-
- GstD3D11Device *device;
- GstD3D11Decoder *d3d11_decoder;
+ GstD3D11Device *device = nullptr;
+ GstD3D11Decoder *d3d11_decoder = nullptr;
GstAV1SequenceHeaderOBU seq_hdr;
-
GST_DXVA_PicParams_AV1 pic_params;
- /* Array of GST_DXVA_Tile_AV1 */
- GArray *tile_list;
- guint written_buffer_size;
- guint remaining_buffer_size;
- guint8 *bitstream_buffer_data;
+ std::vector<GST_DXVA_Tile_AV1> tile_list;
+ std::vector<guint8> bitstream_buffer;
+
+ guint max_width = 0;
+ guint max_height = 0;
+ guint bitdepth = 0;
+} GstD3D11AV1DecInner;
+/* *INDENT-ON* */
- guint max_width;
- guint max_height;
- guint bitdepth;
+typedef struct _GstD3D11AV1Dec
+{
+ GstAV1Decoder parent;
+ GstD3D11AV1DecInner *inner;
} GstD3D11AV1Dec;
typedef struct _GstD3D11AV1DecClass
@@ -389,7 +392,7 @@ static GstElementClass *parent_class = NULL;
static void gst_d3d11_av1_dec_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
-static void gst_d3d11_av1_dec_dispose (GObject * object);
+static void gst_d3d11_av1_dec_finalize (GObject * object);
static void gst_d3d11_av1_dec_set_context (GstElement * element,
GstContext * context);
@@ -430,7 +433,7 @@ gst_d3d11_av1_dec_class_init (GstD3D11AV1DecClass * klass, gpointer data)
GstD3D11DecoderClassData *cdata = (GstD3D11DecoderClassData *) data;
gobject_class->get_property = gst_d3d11_av1_dec_get_property;
- gobject_class->dispose = gst_d3d11_av1_dec_dispose;
+ gobject_class->finalize = gst_d3d11_av1_dec_finalize;
element_class->set_context =
GST_DEBUG_FUNCPTR (gst_d3d11_av1_dec_set_context);
@@ -467,7 +470,7 @@ gst_d3d11_av1_dec_class_init (GstD3D11AV1DecClass * klass, gpointer data)
static void
gst_d3d11_av1_dec_init (GstD3D11AV1Dec * self)
{
- self->tile_list = g_array_new (FALSE, TRUE, sizeof (GST_DXVA_Tile_AV1));
+ self->inner = new GstD3D11AV1DecInner ();
}
static void
@@ -481,24 +484,25 @@ gst_d3d11_av1_dec_get_property (GObject * object, guint prop_id,
}
static void
-gst_d3d11_av1_dec_dispose (GObject * object)
+gst_d3d11_av1_dec_finalize (GObject * object)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (object);
- g_clear_pointer (&self->tile_list, g_array_unref);
+ delete self->inner;
- G_OBJECT_CLASS (parent_class)->dispose (object);
+ G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_d3d11_av1_dec_set_context (GstElement * element, GstContext * context)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (element);
+ GstD3D11AV1DecInner *inner = self->inner;
GstD3D11AV1DecClass *klass = GST_D3D11_AV1_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
gst_d3d11_handle_set_context (element, context, cdata->adapter,
- &self->device);
+ &inner->device);
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
}
@@ -507,20 +511,22 @@ static gboolean
gst_d3d11_av1_dec_open (GstVideoDecoder * decoder)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
+ GstD3D11AV1DecInner *inner = self->inner;
GstD3D11AV1DecClass *klass = GST_D3D11_AV1_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
if (!gst_d3d11_ensure_element_data (GST_ELEMENT_CAST (self), cdata->adapter,
- &self->device)) {
+ &inner->device)) {
GST_ERROR_OBJECT (self, "Cannot create d3d11device");
return FALSE;
}
- self->d3d11_decoder = gst_d3d11_decoder_new (self->device);
+ inner->d3d11_decoder = gst_d3d11_decoder_new (inner->device,
+ GST_DXVA_CODEC_AV1);
- if (!self->d3d11_decoder) {
+ if (!inner->d3d11_decoder) {
GST_ERROR_OBJECT (self, "Cannot create d3d11 decoder");
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->device);
return FALSE;
}
@@ -531,9 +537,10 @@ static gboolean
gst_d3d11_av1_dec_close (GstVideoDecoder * decoder)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
+ GstD3D11AV1DecInner *inner = self->inner;
- gst_clear_object (&self->d3d11_decoder);
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->d3d11_decoder);
+ gst_clear_object (&inner->device);
return TRUE;
}
@@ -542,8 +549,9 @@ static gboolean
gst_d3d11_av1_dec_negotiate (GstVideoDecoder * decoder)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
+ GstD3D11AV1DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_negotiate (self->d3d11_decoder, decoder))
+ if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
return FALSE;
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
@@ -554,8 +562,9 @@ gst_d3d11_av1_dec_decide_allocation (GstVideoDecoder * decoder,
GstQuery * query)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
+ GstD3D11AV1DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_decide_allocation (self->d3d11_decoder,
+ if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder,
decoder, query)) {
return FALSE;
}
@@ -568,11 +577,12 @@ static gboolean
gst_d3d11_av1_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
+ GstD3D11AV1DecInner *inner = self->inner;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
- query, self->device)) {
+ query, inner->device)) {
return TRUE;
}
break;
@@ -587,15 +597,16 @@ static gboolean
gst_d3d11_av1_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
+ GstD3D11AV1DecInner *inner = self->inner;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, TRUE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
break;
case GST_EVENT_FLUSH_STOP:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, FALSE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
break;
default:
break;
@@ -609,6 +620,7 @@ gst_d3d11_av1_dec_new_sequence (GstAV1Decoder * decoder,
const GstAV1SequenceHeaderOBU * seq_hdr)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
+ GstD3D11AV1DecInner *inner = self->inner;
gboolean modified = FALSE;
guint max_width, max_height;
@@ -624,33 +636,33 @@ gst_d3d11_av1_dec_new_sequence (GstAV1Decoder * decoder,
return FALSE;
}
- self->seq_hdr = *seq_hdr;
+ inner->seq_hdr = *seq_hdr;
- if (self->bitdepth != seq_hdr->bit_depth) {
- GST_INFO_OBJECT (self, "Bitdepth changed %d -> %d", self->bitdepth,
+ if (inner->bitdepth != seq_hdr->bit_depth) {
+ GST_INFO_OBJECT (self, "Bitdepth changed %d -> %d", inner->bitdepth,
seq_hdr->bit_depth);
- self->bitdepth = seq_hdr->bit_depth;
+ inner->bitdepth = seq_hdr->bit_depth;
modified = TRUE;
}
max_width = seq_hdr->max_frame_width_minus_1 + 1;
max_height = seq_hdr->max_frame_height_minus_1 + 1;
- if (self->max_width != max_width || self->max_height != max_height) {
+ if (inner->max_width != max_width || inner->max_height != max_height) {
GST_INFO_OBJECT (self, "Resolution changed %dx%d -> %dx%d",
- self->max_width, self->max_height, max_width, max_height);
- self->max_width = max_width;
- self->max_height = max_height;
+ inner->max_width, inner->max_height, max_width, max_height);
+ inner->max_width = max_width;
+ inner->max_height = max_height;
modified = TRUE;
}
- if (modified || !gst_d3d11_decoder_is_configured (self->d3d11_decoder)) {
+ if (modified || !gst_d3d11_decoder_is_configured (inner->d3d11_decoder)) {
GstVideoInfo info;
GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
- if (self->bitdepth == 8) {
+ if (inner->bitdepth == 8) {
out_format = GST_VIDEO_FORMAT_NV12;
- } else if (self->bitdepth == 10) {
+ } else if (inner->bitdepth == 10) {
out_format = GST_VIDEO_FORMAT_P010_10LE;
} else {
GST_WARNING_OBJECT (self, "Invalid bit-depth %d", seq_hdr->bit_depth);
@@ -658,11 +670,11 @@ gst_d3d11_av1_dec_new_sequence (GstAV1Decoder * decoder,
}
gst_video_info_set_format (&info,
- out_format, self->max_width, self->max_height);
+ out_format, inner->max_width, inner->max_height);
- if (!gst_d3d11_decoder_configure (self->d3d11_decoder, GST_D3D11_CODEC_AV1,
- decoder->input_state, &info, (gint) self->max_width,
- (gint) self->max_height, NUM_OUTPUT_VIEW)) {
+ if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
+ decoder->input_state, &info, (gint) inner->max_width,
+ (gint) inner->max_height, NUM_OUTPUT_VIEW)) {
GST_ERROR_OBJECT (self, "Failed to create decoder");
return FALSE;
}
@@ -681,9 +693,10 @@ gst_d3d11_av1_dec_new_picture (GstAV1Decoder * decoder,
GstVideoCodecFrame * frame, GstAV1Picture * picture)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
+ GstD3D11AV1DecInner *inner = self->inner;
GstBuffer *view_buffer;
- view_buffer = gst_d3d11_decoder_get_output_view_buffer (self->d3d11_decoder,
+ view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
@@ -726,28 +739,11 @@ gst_d3d11_av1_dec_duplicate_picture (GstAV1Decoder * decoder,
return new_picture;
}
-static gboolean
-gst_d3d11_av1_dec_get_bitstream_buffer (GstD3D11AV1Dec * self)
-{
- GST_TRACE_OBJECT (self, "Getting bitstream buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM, &self->remaining_buffer_size,
- (gpointer *) & self->bitstream_buffer_data)) {
- GST_ERROR_OBJECT (self, "Faild to get bitstream buffer");
- return FALSE;
- }
-
- GST_TRACE_OBJECT (self, "Got bitstream buffer %p with size %d",
- self->bitstream_buffer_data, self->remaining_buffer_size);
- self->written_buffer_size = 0;
-
- return TRUE;
-}
-
static ID3D11VideoDecoderOutputView *
gst_d3d11_av1_dec_get_output_view_from_picture (GstD3D11AV1Dec * self,
GstAV1Picture * picture, guint8 * view_id)
{
+ GstD3D11AV1DecInner *inner = self->inner;
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
@@ -758,7 +754,7 @@ gst_d3d11_av1_dec_get_output_view_from_picture (GstD3D11AV1Dec * self,
}
view =
- gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
+ gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
@@ -773,10 +769,11 @@ gst_d3d11_av1_dec_start_picture (GstAV1Decoder * decoder,
GstAV1Picture * picture, GstAV1Dpb * dpb)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
- const GstAV1SequenceHeaderOBU *seq_hdr = &self->seq_hdr;
+ GstD3D11AV1DecInner *inner = self->inner;
+ const GstAV1SequenceHeaderOBU *seq_hdr = &inner->seq_hdr;
const GstAV1FrameHeaderOBU *frame_hdr = &picture->frame_hdr;
ID3D11VideoDecoderOutputView *view;
- GST_DXVA_PicParams_AV1 *pic_params = &self->pic_params;
+ GST_DXVA_PicParams_AV1 *pic_params = &inner->pic_params;
guint8 view_id = 0xff;
guint i, j;
@@ -787,13 +784,6 @@ gst_d3d11_av1_dec_start_picture (GstAV1Decoder * decoder,
return FALSE;
}
- GST_TRACE_OBJECT (self, "Begin frame");
-
- if (!gst_d3d11_decoder_begin_frame (self->d3d11_decoder, view, 0, NULL)) {
- GST_ERROR_OBJECT (self, "Failed to begin frame");
- return FALSE;
- }
-
memset (pic_params, 0, sizeof (GST_DXVA_PicParams_AV1));
pic_params->width = frame_hdr->frame_width;
@@ -1097,7 +1087,10 @@ gst_d3d11_av1_dec_start_picture (GstAV1Decoder * decoder,
pic_params->film_grain.cr_offset = frame_hdr->film_grain_params.cr_offset;
}
- return gst_d3d11_av1_dec_get_bitstream_buffer (self);
+ inner->bitstream_buffer.resize (0);
+ inner->tile_list.resize (0);
+
+ return TRUE;
}
static gboolean
@@ -1105,43 +1098,27 @@ gst_d3d11_av1_dec_decode_tile (GstAV1Decoder * decoder,
GstAV1Picture * picture, GstAV1Tile * tile)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
+ GstD3D11AV1DecInner *inner = self->inner;
GstAV1TileGroupOBU *tile_group = &tile->tile_group;
- ID3D11VideoDecoderOutputView *view;
- guint8 view_id = 0xff;
- guint i;
- view = gst_d3d11_av1_dec_get_output_view_from_picture (self, picture,
- &view_id);
- if (!view) {
- GST_ERROR_OBJECT (self, "current picture does not have output view handle");
- return FALSE;
- }
+ if (tile_group->num_tiles > inner->tile_list.size ())
+ inner->tile_list.resize (tile_group->num_tiles);
- if (tile_group->num_tiles > self->tile_list->len)
- g_array_set_size (self->tile_list, tile_group->num_tiles);
-
- g_assert (tile_group->tg_end < self->tile_list->len);
-
- if (tile->obu.obu_size > self->remaining_buffer_size) {
- GST_ERROR_OBJECT (self, "Too large OBU size");
- return FALSE;
- }
+ g_assert (tile_group->tg_end < inner->tile_list.size ());
GST_LOG_OBJECT (self, "Decode tile, tile count %d (start: %d - end: %d)",
tile_group->num_tiles, tile_group->tg_start, tile_group->tg_end);
- for (i = tile_group->tg_start; i <= tile_group->tg_end; i++) {
- GST_DXVA_Tile_AV1 *dxva_tile =
- &g_array_index (self->tile_list, GST_DXVA_Tile_AV1, i);
+ for (guint i = tile_group->tg_start; i <= tile_group->tg_end; i++) {
+ GST_DXVA_Tile_AV1 *dxva_tile = &inner->tile_list[i];
GST_TRACE_OBJECT (self,
- "Written size %d, Tile offset %d, size %d, row %d, col %d",
- self->written_buffer_size,
+ "Tile offset %d, size %d, row %d, col %d",
tile_group->entry[i].tile_offset, tile_group->entry[i].tile_size,
tile_group->entry[i].tile_row, tile_group->entry[i].tile_col);
- dxva_tile->DataOffset =
- self->written_buffer_size + tile_group->entry[i].tile_offset;
+ dxva_tile->DataOffset = inner->bitstream_buffer.size () +
+ tile_group->entry[i].tile_offset;
dxva_tile->DataSize = tile_group->entry[i].tile_size;
dxva_tile->row = tile_group->entry[i].tile_row;
dxva_tile->column = tile_group->entry[i].tile_col;
@@ -1151,10 +1128,11 @@ gst_d3d11_av1_dec_decode_tile (GstAV1Decoder * decoder,
GST_TRACE_OBJECT (self, "OBU size %d", tile->obu.obu_size);
- memcpy (self->bitstream_buffer_data, tile->obu.data, tile->obu.obu_size);
- self->remaining_buffer_size -= tile->obu.obu_size;
- self->bitstream_buffer_data += tile->obu.obu_size;
- self->written_buffer_size += tile->obu.obu_size;
+ size_t pos = inner->bitstream_buffer.size ();
+ inner->bitstream_buffer.resize (pos + tile->obu.obu_size);
+
+ memcpy (&inner->bitstream_buffer[0] + pos,
+ tile->obu.data, tile->obu.obu_size);
return TRUE;
}
@@ -1163,112 +1141,52 @@ static gboolean
gst_d3d11_av1_dec_end_picture (GstAV1Decoder * decoder, GstAV1Picture * picture)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
- guint d3d11_buffer_size;
- gpointer d3d11_buffer;
- guint padding;
- D3D11_VIDEO_DECODER_BUFFER_DESC buffer_desc[3];
- guint i;
- guint8 *data;
- gsize offset = 0;
-
- GST_TRACE_OBJECT (self, "Getting picture params buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS, &d3d11_buffer_size,
- &d3d11_buffer)) {
- GST_ERROR_OBJECT (self,
- "Failed to get decoder buffer for picture parameters");
- return FALSE;
- }
-
- if (d3d11_buffer_size < sizeof (GST_DXVA_PicParams_AV1)) {
- GST_ERROR_OBJECT (self,
- "Too small picture param buffer %d", d3d11_buffer_size);
-
- gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS);
- return FALSE;
- }
-
- memcpy (d3d11_buffer, &self->pic_params, sizeof (GST_DXVA_PicParams_AV1));
-
- GST_TRACE_OBJECT (self, "Release picture param decoder buffer");
-
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS)) {
- GST_ERROR_OBJECT (self, "Failed to release decoder buffer");
- return FALSE;
- }
-
- GST_TRACE_OBJECT (self, "Getting slice control buffer");
-
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL, &d3d11_buffer_size,
- &d3d11_buffer)) {
- GST_ERROR_OBJECT (self, "Couldn't get slice control buffer");
- return FALSE;
- }
+ GstD3D11AV1DecInner *inner = self->inner;
+ ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
+ size_t bitstream_buffer_size;
+ size_t bitstream_pos;
+ GstD3D11DecodeInputStreamArgs input_args;
- if (d3d11_buffer_size < sizeof (GST_DXVA_Tile_AV1) * self->tile_list->len) {
- GST_ERROR_OBJECT (self, "Too small slice control buffer %d",
- d3d11_buffer_size);
- gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL);
+ if (inner->bitstream_buffer.empty () || inner->tile_list.empty ()) {
+ GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
return FALSE;
}
- data = (guint8 *) d3d11_buffer;
- for (i = 0; i < self->tile_list->len; i++) {
- GST_DXVA_Tile_AV1 *dxva_tile =
- &g_array_index (self->tile_list, GST_DXVA_Tile_AV1, i);
-
- memcpy (data + offset, dxva_tile, sizeof (GST_DXVA_Tile_AV1));
- offset += sizeof (GST_DXVA_Tile_AV1);
- }
-
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL)) {
- GST_ERROR_OBJECT (self, "Failed to release slice control buffer");
+ view = gst_d3d11_av1_dec_get_output_view_from_picture (self, picture,
+ &view_id);
+ if (!view) {
+ GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
}
- padding = MIN (GST_ROUND_UP_128 (self->written_buffer_size) -
- self->written_buffer_size, self->remaining_buffer_size);
- if (padding) {
- memset (self->bitstream_buffer_data, 0, padding);
- self->written_buffer_size += padding;
- }
+ memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM)) {
- GST_ERROR_OBJECT (self, "Failed to release bitstream buffer");
+ bitstream_pos = inner->bitstream_buffer.size ();
+ bitstream_buffer_size = GST_ROUND_UP_128 (bitstream_pos);
- return FALSE;
- }
+ if (bitstream_buffer_size > bitstream_pos) {
+ size_t padding = bitstream_buffer_size - bitstream_pos;
- buffer_desc[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
- buffer_desc[0].DataOffset = 0;
- buffer_desc[0].DataSize = sizeof (GST_DXVA_PicParams_AV1);
+ /* As per DXVA spec, total amount of bitstream buffer size should be
+ * 128 bytes aligned. If actual data is not multiple of 128 bytes,
+ * the last slice data needs to be zero-padded */
+ inner->bitstream_buffer.resize (bitstream_buffer_size, 0);
- buffer_desc[1].BufferType = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
- buffer_desc[1].DataOffset = 0;
- buffer_desc[1].DataSize = sizeof (GST_DXVA_Tile_AV1) * self->tile_list->len;
-
- buffer_desc[2].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
- buffer_desc[2].DataOffset = 0;
- buffer_desc[2].DataSize = self->written_buffer_size;
-
- if (!gst_d3d11_decoder_submit_decoder_buffers (self->d3d11_decoder,
- 3, buffer_desc)) {
- GST_ERROR_OBJECT (self, "Couldn't submit decoder buffers");
- return FALSE;
+ GST_DXVA_Tile_AV1 & tile = inner->tile_list.back ();
+ tile.DataSize += padding;
}
- if (!gst_d3d11_decoder_end_frame (self->d3d11_decoder)) {
- GST_ERROR_OBJECT (self, "Failed to EndFrame");
- return FALSE;
- }
+ input_args.picture_params = &inner->pic_params;
+ input_args.picture_params_size = sizeof (GST_DXVA_PicParams_AV1);
+ input_args.slice_control = &inner->tile_list[0];
+ input_args.slice_control_size =
+ sizeof (GST_DXVA_Tile_AV1) * inner->tile_list.size ();
+ input_args.bitstream = &inner->bitstream_buffer[0];
+ input_args.bitstream_size = inner->bitstream_buffer.size ();
- return TRUE;
+ return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
+ view, &input_args);
}
static GstFlowReturn
@@ -1276,6 +1194,7 @@ gst_d3d11_av1_dec_output_picture (GstAV1Decoder * decoder,
GstVideoCodecFrame * frame, GstAV1Picture * picture)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
+ GstD3D11AV1DecInner *inner = self->inner;
GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
GstBuffer *view_buffer;
@@ -1289,7 +1208,7 @@ gst_d3d11_av1_dec_output_picture (GstAV1Decoder * decoder,
goto error;
}
- if (!gst_d3d11_decoder_process_output (self->d3d11_decoder, vdec,
+ if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
picture->frame_hdr.render_width, picture->frame_hdr.render_height,
view_buffer, &frame->output_buffer)) {
GST_ERROR_OBJECT (self, "Failed to copy buffer");
@@ -1302,20 +1221,14 @@ gst_d3d11_av1_dec_output_picture (GstAV1Decoder * decoder,
error:
gst_av1_picture_unref (picture);
- gst_video_decoder_drop_frame (vdec, frame);
+ gst_video_decoder_release_frame (vdec, frame);
return GST_FLOW_ERROR;
}
-typedef struct
-{
- guint width;
- guint height;
-} GstD3D11AV1DecResolution;
-
void
gst_d3d11_av1_dec_register (GstPlugin * plugin, GstD3D11Device * device,
- GstD3D11Decoder * decoder, guint rank)
+ guint rank)
{
GType type;
gchar *type_name;
@@ -1334,10 +1247,6 @@ gst_d3d11_av1_dec_register (GstPlugin * plugin, GstD3D11Device * device,
(GInstanceInitFunc) gst_d3d11_av1_dec_init,
};
const GUID *profile_guid = NULL;
- /* values were taken from chromium. See supported_profile_helper.cc */
- GstD3D11AV1DecResolution resolutions_to_check[] = {
- {4096, 2160}, {4096, 2304}, {7680, 4320}, {8192, 4320}, {8192, 8192}
- };
GstCaps *sink_caps = NULL;
GstCaps *src_caps = NULL;
guint max_width = 0;
@@ -1347,29 +1256,29 @@ gst_d3d11_av1_dec_register (GstPlugin * plugin, GstD3D11Device * device,
gboolean have_gray = FALSE;
gboolean have_gray10 = FALSE;
- if (!gst_d3d11_decoder_get_supported_decoder_profile (decoder,
- GST_D3D11_CODEC_AV1, GST_VIDEO_FORMAT_NV12, &profile_guid)) {
+ if (!gst_d3d11_decoder_get_supported_decoder_profile (device,
+ GST_DXVA_CODEC_AV1, GST_VIDEO_FORMAT_NV12, &profile_guid)) {
GST_INFO_OBJECT (device, "device does not support VP8 decoding");
return;
}
- have_p010 = gst_d3d11_decoder_supports_format (decoder,
+ have_p010 = gst_d3d11_decoder_supports_format (device,
profile_guid, DXGI_FORMAT_P010);
- have_gray = gst_d3d11_decoder_supports_format (decoder,
+ have_gray = gst_d3d11_decoder_supports_format (device,
profile_guid, DXGI_FORMAT_R8_UNORM);
- have_gray10 = gst_d3d11_decoder_supports_format (decoder,
+ have_gray10 = gst_d3d11_decoder_supports_format (device,
profile_guid, DXGI_FORMAT_R16_UNORM);
GST_INFO_OBJECT (device, "Decoder support P010: %d, R8: %d, R16: %d",
have_p010, have_gray, have_gray10);
/* TODO: add test monochrome formats */
- for (i = 0; i < G_N_ELEMENTS (resolutions_to_check); i++) {
- if (gst_d3d11_decoder_supports_resolution (decoder, profile_guid,
- DXGI_FORMAT_NV12, resolutions_to_check[i].width,
- resolutions_to_check[i].height)) {
- max_width = resolutions_to_check[i].width;
- max_height = resolutions_to_check[i].height;
+ for (i = 0; i < G_N_ELEMENTS (gst_dxva_resolutions); i++) {
+ if (gst_d3d11_decoder_supports_resolution (device, profile_guid,
+ DXGI_FORMAT_NV12, gst_dxva_resolutions[i].width,
+ gst_dxva_resolutions[i].height)) {
+ max_width = gst_dxva_resolutions[i].width;
+ max_height = gst_dxva_resolutions[i].height;
GST_DEBUG_OBJECT (device,
"device support resolution %dx%d", max_width, max_height);
@@ -1419,7 +1328,7 @@ gst_d3d11_av1_dec_register (GstPlugin * plugin, GstD3D11Device * device,
"height", GST_TYPE_INT_RANGE, 1, resolution, NULL);
type_info.class_data =
- gst_d3d11_decoder_class_data_new (device, GST_D3D11_CODEC_AV1,
+ gst_d3d11_decoder_class_data_new (device, GST_DXVA_CODEC_AV1,
sink_caps, src_caps);
type_name = g_strdup ("GstD3D11AV1Dec");
diff --git a/sys/d3d11/gstd3d11av1dec.h b/sys/d3d11/gstd3d11av1dec.h
index 7c5174300..f46ef64cd 100644
--- a/sys/d3d11/gstd3d11av1dec.h
+++ b/sys/d3d11/gstd3d11av1dec.h
@@ -26,7 +26,6 @@ G_BEGIN_DECLS
void gst_d3d11_av1_dec_register (GstPlugin * plugin,
GstD3D11Device * device,
- GstD3D11Decoder * decoder,
guint rank);
G_END_DECLS
diff --git a/sys/d3d11/gstd3d11decoder.cpp b/sys/d3d11/gstd3d11decoder.cpp
index a9f59e4ad..ce65b537f 100644
--- a/sys/d3d11/gstd3d11decoder.cpp
+++ b/sys/d3d11/gstd3d11decoder.cpp
@@ -61,8 +61,8 @@
#include <timeapi.h>
#endif
-GST_DEBUG_CATEGORY (d3d11_decoder_debug);
-#define GST_CAT_DEFAULT d3d11_decoder_debug
+GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_decoder_debug);
+#define GST_CAT_DEFAULT gst_d3d11_decoder_debug
/* GUID might not be defined in MinGW header */
DEFINE_GUID (GST_GUID_D3D11_DECODER_PROFILE_H264_IDCT_FGT, 0x1b81be67, 0xa0c7,
@@ -146,7 +146,7 @@ struct _GstD3D11Decoder
GstVideoInfo info;
GstVideoInfo output_info;
- GstD3D11Codec codec;
+ GstDXVACodec codec;
gint coded_width;
gint coded_height;
DXGI_FORMAT decoder_format;
@@ -214,9 +214,6 @@ gst_d3d11_decoder_class_init (GstD3D11DecoderClass * klass)
"D3D11 Devicd to use", GST_TYPE_D3D11_DEVICE,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY |
G_PARAM_STATIC_STRINGS)));
-
- GST_DEBUG_CATEGORY_INIT (d3d11_decoder_debug,
- "d3d11decoder", 0, "Direct3D11 Base Video Decoder object");
}
static void
@@ -359,11 +356,13 @@ gst_d3d11_decoder_finalize (GObject * obj)
}
GstD3D11Decoder *
-gst_d3d11_decoder_new (GstD3D11Device * device)
+gst_d3d11_decoder_new (GstD3D11Device * device, GstDXVACodec codec)
{
GstD3D11Decoder *self;
- g_return_val_if_fail (GST_IS_D3D11_DEVICE (device), NULL);
+ g_return_val_if_fail (GST_IS_D3D11_DEVICE (device), nullptr);
+ g_return_val_if_fail (codec > GST_DXVA_CODEC_NONE, nullptr);
+ g_return_val_if_fail (codec < GST_DXVA_CODEC_LAST, nullptr);
self = (GstD3D11Decoder *)
g_object_new (GST_TYPE_D3D11_DECODER, "device", device, NULL);
@@ -373,6 +372,8 @@ gst_d3d11_decoder_new (GstD3D11Device * device)
return NULL;
}
+ self->codec = codec;
+
gst_object_ref_sink (self);
return self;
@@ -537,22 +538,22 @@ error:
}
static const gchar *
-gst_d3d11_codec_to_string (GstD3D11Codec codec)
+gst_dxva_codec_to_string (GstDXVACodec codec)
{
switch (codec) {
- case GST_D3D11_CODEC_NONE:
+ case GST_DXVA_CODEC_NONE:
return "none";
- case GST_D3D11_CODEC_H264:
+ case GST_DXVA_CODEC_H264:
return "H.264";
- case GST_D3D11_CODEC_VP9:
+ case GST_DXVA_CODEC_VP9:
return "VP9";
- case GST_D3D11_CODEC_H265:
+ case GST_DXVA_CODEC_H265:
return "H.265";
- case GST_D3D11_CODEC_VP8:
+ case GST_DXVA_CODEC_VP8:
return "VP8";
- case GST_D3D11_CODEC_MPEG2:
+ case GST_DXVA_CODEC_MPEG2:
return "MPEG2";
- case GST_D3D11_CODEC_AV1:
+ case GST_DXVA_CODEC_AV1:
return "AV1";
default:
g_assert_not_reached ();
@@ -563,29 +564,33 @@ gst_d3d11_codec_to_string (GstD3D11Codec codec)
}
gboolean
-gst_d3d11_decoder_get_supported_decoder_profile (GstD3D11Decoder * decoder,
- GstD3D11Codec codec, GstVideoFormat format, const GUID ** selected_profile)
+gst_d3d11_decoder_get_supported_decoder_profile (GstD3D11Device * device,
+ GstDXVACodec codec, GstVideoFormat format, const GUID ** selected_profile)
{
- GUID *guid_list = NULL;
- const GUID *profile = NULL;
+ GUID *guid_list = nullptr;
+ const GUID *profile = nullptr;
guint available_profile_count;
guint i, j;
HRESULT hr;
ID3D11VideoDevice *video_device;
- const GUID **profile_list = NULL;
+ const GUID **profile_list = nullptr;
guint profile_size = 0;
- g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
- g_return_val_if_fail (selected_profile != NULL, FALSE);
+ g_return_val_if_fail (GST_IS_D3D11_DEVICE (device), FALSE);
+ g_return_val_if_fail (selected_profile != nullptr, FALSE);
+
+ video_device = gst_d3d11_device_get_video_device_handle (device);
+ if (!video_device)
+ return FALSE;
switch (codec) {
- case GST_D3D11_CODEC_H264:
+ case GST_DXVA_CODEC_H264:
if (format == GST_VIDEO_FORMAT_NV12) {
profile_list = profile_h264_list;
profile_size = G_N_ELEMENTS (profile_h264_list);
}
break;
- case GST_D3D11_CODEC_H265:
+ case GST_DXVA_CODEC_H265:
if (format == GST_VIDEO_FORMAT_NV12) {
profile_list = profile_hevc_list;
profile_size = G_N_ELEMENTS (profile_hevc_list);
@@ -594,13 +599,13 @@ gst_d3d11_decoder_get_supported_decoder_profile (GstD3D11Decoder * decoder,
profile_size = G_N_ELEMENTS (profile_hevc_10_list);
}
break;
- case GST_D3D11_CODEC_VP8:
+ case GST_DXVA_CODEC_VP8:
if (format == GST_VIDEO_FORMAT_NV12) {
profile_list = profile_vp8_list;
profile_size = G_N_ELEMENTS (profile_vp8_list);
}
break;
- case GST_D3D11_CODEC_VP9:
+ case GST_DXVA_CODEC_VP9:
if (format == GST_VIDEO_FORMAT_NV12) {
profile_list = profile_vp9_list;
profile_size = G_N_ELEMENTS (profile_vp9_list);
@@ -609,13 +614,13 @@ gst_d3d11_decoder_get_supported_decoder_profile (GstD3D11Decoder * decoder,
profile_size = G_N_ELEMENTS (profile_vp9_10_list);
}
break;
- case GST_D3D11_CODEC_MPEG2:
+ case GST_DXVA_CODEC_MPEG2:
if (format == GST_VIDEO_FORMAT_NV12) {
profile_list = profile_mpeg2_list;
profile_size = G_N_ELEMENTS (profile_mpeg2_list);
}
break;
- case GST_D3D11_CODEC_AV1:
+ case GST_DXVA_CODEC_AV1:
profile_list = profile_av1_list;
profile_size = G_N_ELEMENTS (profile_av1_list);
break;
@@ -624,50 +629,48 @@ gst_d3d11_decoder_get_supported_decoder_profile (GstD3D11Decoder * decoder,
}
if (!profile_list) {
- GST_ERROR_OBJECT (decoder,
+ GST_ERROR_OBJECT (device,
"Not supported codec (%d) and format (%s) configuration", codec,
gst_video_format_to_string (format));
return FALSE;
}
- video_device = decoder->video_device;
-
available_profile_count = video_device->GetVideoDecoderProfileCount ();
if (available_profile_count == 0) {
- GST_WARNING_OBJECT (decoder, "No available decoder profile");
+ GST_INFO_OBJECT (device, "No available decoder profile");
return FALSE;
}
- GST_DEBUG_OBJECT (decoder,
+ GST_DEBUG_OBJECT (device,
"Have %u available decoder profiles", available_profile_count);
guid_list = (GUID *) g_alloca (sizeof (GUID) * available_profile_count);
for (i = 0; i < available_profile_count; i++) {
hr = video_device->GetVideoDecoderProfile (i, &guid_list[i]);
- if (!gst_d3d11_result (hr, decoder->device)) {
- GST_WARNING_OBJECT (decoder, "Failed to get %d th decoder profile", i);
+ if (!gst_d3d11_result (hr, device)) {
+ GST_WARNING_OBJECT (device, "Failed to get %d th decoder profile", i);
return FALSE;
}
}
#ifndef GST_DISABLE_GST_DEBUG
- GST_LOG_OBJECT (decoder, "Supported decoder GUID");
+ GST_LOG_OBJECT (device, "Supported decoder GUID");
for (i = 0; i < available_profile_count; i++) {
const GUID *guid = &guid_list[i];
- GST_LOG_OBJECT (decoder,
+ GST_LOG_OBJECT (device,
"\t { %8.8x-%4.4x-%4.4x-%2.2x%2.2x-%2.2x%2.2x%2.2x%2.2x%2.2x%2.2x }",
(guint) guid->Data1, (guint) guid->Data2, (guint) guid->Data3,
guid->Data4[0], guid->Data4[1], guid->Data4[2], guid->Data4[3],
guid->Data4[4], guid->Data4[5], guid->Data4[6], guid->Data4[7]);
}
- GST_LOG_OBJECT (decoder, "Requested decoder GUID");
+ GST_LOG_OBJECT (device, "Requested decoder GUID");
for (i = 0; i < profile_size; i++) {
const GUID *guid = profile_list[i];
- GST_LOG_OBJECT (decoder,
+ GST_LOG_OBJECT (device,
"\t { %8.8x-%4.4x-%4.4x-%2.2x%2.2x-%2.2x%2.2x%2.2x%2.2x%2.2x%2.2x }",
(guint) guid->Data1, (guint) guid->Data2, (guint) guid->Data3,
guid->Data4[0], guid->Data4[1], guid->Data4[2], guid->Data4[3],
@@ -685,14 +688,14 @@ gst_d3d11_decoder_get_supported_decoder_profile (GstD3D11Decoder * decoder,
}
if (!profile) {
- GST_INFO_OBJECT (decoder, "No supported decoder profile for %s codec",
- gst_d3d11_codec_to_string (codec));
+ GST_INFO_OBJECT (device, "No supported decoder profile for %s codec",
+ gst_dxva_codec_to_string (codec));
return FALSE;
}
*selected_profile = profile;
- GST_DEBUG_OBJECT (decoder,
+ GST_DEBUG_OBJECT (device,
"Selected guid "
"{ %8.8x-%4.4x-%4.4x-%2.2x%2.2x-%2.2x%2.2x%2.2x%2.2x%2.2x%2.2x }",
(guint) profile->Data1, (guint) profile->Data2, (guint) profile->Data3,
@@ -705,15 +708,13 @@ gst_d3d11_decoder_get_supported_decoder_profile (GstD3D11Decoder * decoder,
gboolean
-gst_d3d11_decoder_configure (GstD3D11Decoder * decoder, GstD3D11Codec codec,
+gst_d3d11_decoder_configure (GstD3D11Decoder * decoder,
GstVideoCodecState * input_state, GstVideoInfo * info, gint coded_width,
gint coded_height, guint dpb_size)
{
const GstD3D11Format *d3d11_format;
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
- g_return_val_if_fail (codec > GST_D3D11_CODEC_NONE, FALSE);
- g_return_val_if_fail (codec < GST_D3D11_CODEC_LAST, FALSE);
g_return_val_if_fail (info != NULL, FALSE);
g_return_val_if_fail (input_state != NULL, FALSE);
g_return_val_if_fail (coded_width >= GST_VIDEO_INFO_WIDTH (info), FALSE);
@@ -730,7 +731,6 @@ gst_d3d11_decoder_configure (GstD3D11Decoder * decoder, GstD3D11Codec codec,
return FALSE;
}
- decoder->codec = codec;
decoder->input_state = gst_video_codec_state_ref (input_state);
decoder->info = decoder->output_info = *info;
decoder->coded_width = coded_width;
@@ -832,7 +832,7 @@ gst_d3d11_decoder_open (GstD3D11Decoder * self)
video_device = self->video_device;
gst_d3d11_device_lock (self->device);
- if (!gst_d3d11_decoder_get_supported_decoder_profile (self,
+ if (!gst_d3d11_decoder_get_supported_decoder_profile (self->device,
self->codec, GST_VIDEO_INFO_FORMAT (info), &selected_profile)) {
goto error;
}
@@ -867,15 +867,15 @@ gst_d3d11_decoder_open (GstD3D11Decoder * self)
* directx_va_Setup() in directx_va.c of vlc.
* But... where it is? */
switch (self->codec) {
- case GST_D3D11_CODEC_H265:
- case GST_D3D11_CODEC_AV1:
+ case GST_DXVA_CODEC_H265:
+ case GST_DXVA_CODEC_AV1:
/* See directx_va_Setup() impl. in vlc */
if (vendor != GST_D3D11_DEVICE_VENDOR_XBOX)
alignment = 128;
else
alignment = 16;
break;
- case GST_D3D11_CODEC_MPEG2:
+ case GST_DXVA_CODEC_MPEG2:
/* XXX: ffmpeg does this */
alignment = 32;
break;
@@ -926,15 +926,15 @@ gst_d3d11_decoder_open (GstD3D11Decoder * self)
/* FIXME: need support DXVA_Slice_H264_Long ?? */
/* this config uses DXVA_Slice_H264_Short */
switch (self->codec) {
- case GST_D3D11_CODEC_H264:
+ case GST_DXVA_CODEC_H264:
if (config_list[i].ConfigBitstreamRaw == 2)
best_config = &config_list[i];
break;
- case GST_D3D11_CODEC_H265:
- case GST_D3D11_CODEC_VP9:
- case GST_D3D11_CODEC_VP8:
- case GST_D3D11_CODEC_MPEG2:
- case GST_D3D11_CODEC_AV1:
+ case GST_DXVA_CODEC_H265:
+ case GST_DXVA_CODEC_VP9:
+ case GST_DXVA_CODEC_VP8:
+ case GST_DXVA_CODEC_MPEG2:
+ case GST_DXVA_CODEC_AV1:
if (config_list[i].ConfigBitstreamRaw == 1)
best_config = &config_list[i];
break;
@@ -1001,7 +1001,7 @@ error:
return FALSE;
}
-gboolean
+static gboolean
gst_d3d11_decoder_begin_frame (GstD3D11Decoder * decoder,
ID3D11VideoDecoderOutputView * output_view, guint content_key_size,
gconstpointer content_key)
@@ -1015,12 +1015,8 @@ gst_d3d11_decoder_begin_frame (GstD3D11Decoder * decoder,
if (decoder->timer_resolution)
retry_threshold = 500;
- g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
- g_return_val_if_fail (output_view != NULL, FALSE);
-
video_context = decoder->video_context;
- gst_d3d11_device_lock (decoder->device);
do {
GST_LOG_OBJECT (decoder, "Try begin frame, retry count %d", retry_count);
hr = video_context->DecoderBeginFrame (decoder->decoder_handle,
@@ -1041,7 +1037,6 @@ gst_d3d11_decoder_begin_frame (GstD3D11Decoder * decoder,
retry_count++;
} while (TRUE);
- gst_d3d11_device_unlock (decoder->device);
if (!gst_d3d11_result (hr, decoder->device)) {
GST_ERROR_OBJECT (decoder, "Failed to begin frame, hr: 0x%x", (guint) hr);
@@ -1051,19 +1046,14 @@ gst_d3d11_decoder_begin_frame (GstD3D11Decoder * decoder,
return TRUE;
}
-gboolean
+static gboolean
gst_d3d11_decoder_end_frame (GstD3D11Decoder * decoder)
{
HRESULT hr;
ID3D11VideoContext *video_context;
- g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
-
video_context = decoder->video_context;
-
- gst_d3d11_device_lock (decoder->device);
hr = video_context->DecoderEndFrame (decoder->decoder_handle);
- gst_d3d11_device_unlock (decoder->device);
if (!gst_d3d11_result (hr, decoder->device)) {
GST_WARNING_OBJECT (decoder, "EndFrame failed, hr: 0x%x", (guint) hr);
@@ -1073,7 +1063,7 @@ gst_d3d11_decoder_end_frame (GstD3D11Decoder * decoder)
return TRUE;
}
-gboolean
+static gboolean
gst_d3d11_decoder_get_decoder_buffer (GstD3D11Decoder * decoder,
D3D11_VIDEO_DECODER_BUFFER_TYPE type, guint * buffer_size,
gpointer * buffer)
@@ -1083,14 +1073,9 @@ gst_d3d11_decoder_get_decoder_buffer (GstD3D11Decoder * decoder,
HRESULT hr;
ID3D11VideoContext *video_context;
- g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
-
video_context = decoder->video_context;
-
- gst_d3d11_device_lock (decoder->device);
hr = video_context->GetDecoderBuffer (decoder->decoder_handle,
type, &size, &decoder_buffer);
- gst_d3d11_device_unlock (decoder->device);
if (!gst_d3d11_result (hr, decoder->device)) {
GST_WARNING_OBJECT (decoder, "Getting buffer type %d error, hr: 0x%x",
@@ -1104,20 +1089,15 @@ gst_d3d11_decoder_get_decoder_buffer (GstD3D11Decoder * decoder,
return TRUE;
}
-gboolean
+static gboolean
gst_d3d11_decoder_release_decoder_buffer (GstD3D11Decoder * decoder,
D3D11_VIDEO_DECODER_BUFFER_TYPE type)
{
HRESULT hr;
ID3D11VideoContext *video_context;
- g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
-
video_context = decoder->video_context;
-
- gst_d3d11_device_lock (decoder->device);
hr = video_context->ReleaseDecoderBuffer (decoder->decoder_handle, type);
- gst_d3d11_device_unlock (decoder->device);
if (!gst_d3d11_result (hr, decoder->device)) {
GST_WARNING_OBJECT (decoder, "ReleaseDecoderBuffer failed, hr: 0x%x",
@@ -1128,22 +1108,16 @@ gst_d3d11_decoder_release_decoder_buffer (GstD3D11Decoder * decoder,
return TRUE;
}
-gboolean
+static gboolean
gst_d3d11_decoder_submit_decoder_buffers (GstD3D11Decoder * decoder,
guint buffer_count, const D3D11_VIDEO_DECODER_BUFFER_DESC * buffers)
{
HRESULT hr;
ID3D11VideoContext *video_context;
- g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
-
video_context = decoder->video_context;
-
- gst_d3d11_device_lock (decoder->device);
hr = video_context->SubmitDecoderBuffers (decoder->decoder_handle,
buffer_count, buffers);
- gst_d3d11_device_unlock (decoder->device);
-
if (!gst_d3d11_result (hr, decoder->device)) {
GST_WARNING_OBJECT (decoder, "SubmitDecoderBuffers failed, hr: 0x%x",
(guint) hr);
@@ -1153,6 +1127,174 @@ gst_d3d11_decoder_submit_decoder_buffers (GstD3D11Decoder * decoder,
return TRUE;
}
+gboolean
+gst_d3d11_decoder_decode_frame (GstD3D11Decoder * decoder,
+ ID3D11VideoDecoderOutputView * output_view,
+ GstD3D11DecodeInputStreamArgs * input_args)
+{
+ guint d3d11_buffer_size;
+ gpointer d3d11_buffer;
+ D3D11_VIDEO_DECODER_BUFFER_DESC buffer_desc[4];
+ guint buffer_desc_size;
+
+ g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
+ g_return_val_if_fail (output_view != nullptr, FALSE);
+ g_return_val_if_fail (input_args != nullptr, FALSE);
+
+ memset (buffer_desc, 0, sizeof (buffer_desc));
+
+ buffer_desc[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
+ buffer_desc[0].DataSize = input_args->picture_params_size;
+
+ buffer_desc[1].BufferType = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
+ buffer_desc[1].DataSize = input_args->slice_control_size;
+
+ buffer_desc[2].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
+ buffer_desc[2].DataOffset = 0;
+ buffer_desc[2].DataSize = input_args->bitstream_size;
+
+ buffer_desc_size = 3;
+ if (input_args->inverse_quantization_matrix &&
+ input_args->inverse_quantization_matrix_size > 0) {
+ buffer_desc[3].BufferType =
+ D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX;
+ buffer_desc[3].DataSize = input_args->inverse_quantization_matrix_size;
+ buffer_desc_size++;
+ }
+
+ gst_d3d11_device_lock (decoder->device);
+ if (!gst_d3d11_decoder_begin_frame (decoder, output_view, 0, nullptr)) {
+ gst_d3d11_device_unlock (decoder->device);
+
+ return FALSE;
+ }
+
+ if (!gst_d3d11_decoder_get_decoder_buffer (decoder,
+ D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS, &d3d11_buffer_size,
+ &d3d11_buffer)) {
+ GST_ERROR_OBJECT (decoder,
+ "Failed to get decoder buffer for picture parameters");
+ goto error;
+ }
+
+ if (d3d11_buffer_size < input_args->picture_params_size) {
+ GST_ERROR_OBJECT (decoder,
+ "Too small picture param buffer size %d", d3d11_buffer_size);
+
+ gst_d3d11_decoder_release_decoder_buffer (decoder,
+ D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS);
+ goto error;
+ }
+
+ memcpy (d3d11_buffer, input_args->picture_params,
+ input_args->picture_params_size);
+
+ if (!gst_d3d11_decoder_release_decoder_buffer (decoder,
+ D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS)) {
+ GST_ERROR_OBJECT (decoder, "Failed to release picture param buffer");
+ goto error;
+ }
+
+ if (!gst_d3d11_decoder_get_decoder_buffer (decoder,
+ D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL, &d3d11_buffer_size,
+ &d3d11_buffer)) {
+ GST_ERROR_OBJECT (decoder, "Failed to get slice control buffer");
+ goto error;
+ }
+
+ if (d3d11_buffer_size < input_args->slice_control_size) {
+ GST_ERROR_OBJECT (decoder,
+ "Too small slice control buffer size %d", d3d11_buffer_size);
+
+ gst_d3d11_decoder_release_decoder_buffer (decoder,
+ D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL);
+ goto error;
+ }
+
+ memcpy (d3d11_buffer,
+ input_args->slice_control, input_args->slice_control_size);
+
+ if (!gst_d3d11_decoder_release_decoder_buffer (decoder,
+ D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL)) {
+ GST_ERROR_OBJECT (decoder, "Failed to release slice control buffer");
+ goto error;
+ }
+
+ if (!gst_d3d11_decoder_get_decoder_buffer (decoder,
+ D3D11_VIDEO_DECODER_BUFFER_BITSTREAM, &d3d11_buffer_size,
+ &d3d11_buffer)) {
+ GST_ERROR_OBJECT (decoder, "Failed to get bitstream buffer");
+ goto error;
+ }
+
+ if (d3d11_buffer_size < input_args->bitstream_size) {
+ GST_ERROR_OBJECT (decoder, "Too small bitstream buffer size %d",
+ d3d11_buffer_size);
+
+ gst_d3d11_decoder_release_decoder_buffer (decoder,
+ D3D11_VIDEO_DECODER_BUFFER_BITSTREAM);
+ goto error;
+ }
+
+ memcpy (d3d11_buffer, input_args->bitstream, input_args->bitstream_size);
+
+ if (!gst_d3d11_decoder_release_decoder_buffer (decoder,
+ D3D11_VIDEO_DECODER_BUFFER_BITSTREAM)) {
+ GST_ERROR_OBJECT (decoder, "Failed to release bitstream buffer");
+ goto error;
+ }
+
+ if (input_args->inverse_quantization_matrix_size > 0) {
+ if (!gst_d3d11_decoder_get_decoder_buffer (decoder,
+ D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX,
+ &d3d11_buffer_size, &d3d11_buffer)) {
+ GST_ERROR_OBJECT (decoder,
+ "Failed to get inverse quantization matrix buffer");
+ goto error;
+ }
+
+ if (d3d11_buffer_size < input_args->inverse_quantization_matrix_size) {
+ GST_ERROR_OBJECT (decoder,
+ "Too small inverse quantization matrix buffer buffer %d",
+ d3d11_buffer_size);
+
+ gst_d3d11_decoder_release_decoder_buffer (decoder,
+ D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX);
+ goto error;
+ }
+
+ memcpy (d3d11_buffer, input_args->inverse_quantization_matrix,
+ input_args->inverse_quantization_matrix_size);
+
+ if (!gst_d3d11_decoder_release_decoder_buffer (decoder,
+ D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX)) {
+ GST_ERROR_OBJECT (decoder,
+ "Failed to release inverse quantization matrix buffer");
+ goto error;
+ }
+ }
+
+ if (!gst_d3d11_decoder_submit_decoder_buffers (decoder,
+ buffer_desc_size, buffer_desc)) {
+ GST_ERROR_OBJECT (decoder, "Failed to submit decoder buffers");
+ goto error;
+ }
+
+ if (!gst_d3d11_decoder_end_frame (decoder)) {
+ gst_d3d11_device_unlock (decoder->device);
+ return FALSE;
+ }
+
+ gst_d3d11_device_unlock (decoder->device);
+
+ return TRUE;
+
+error:
+ gst_d3d11_decoder_end_frame (decoder);
+ gst_d3d11_device_unlock (decoder->device);
+ return FALSE;
+}
+
GstBuffer *
gst_d3d11_decoder_get_output_view_buffer (GstD3D11Decoder * decoder,
GstVideoDecoder * videodec)
@@ -1885,23 +2027,25 @@ gst_d3d11_decoder_util_is_legacy_device (GstD3D11Device * device)
}
gboolean
-gst_d3d11_decoder_supports_format (GstD3D11Decoder * decoder,
+gst_d3d11_decoder_supports_format (GstD3D11Device * device,
const GUID * decoder_profile, DXGI_FORMAT format)
{
HRESULT hr;
BOOL can_support = FALSE;
ID3D11VideoDevice *video_device;
- g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
+ g_return_val_if_fail (GST_IS_D3D11_DEVICE (device), FALSE);
g_return_val_if_fail (decoder_profile != NULL, FALSE);
g_return_val_if_fail (format != DXGI_FORMAT_UNKNOWN, FALSE);
- video_device = decoder->video_device;
+ video_device = gst_d3d11_device_get_video_device_handle (device);
+ if (!video_device)
+ return FALSE;
hr = video_device->CheckVideoDecoderFormat (decoder_profile, format,
&can_support);
- if (!gst_d3d11_result (hr, decoder->device) || !can_support) {
- GST_DEBUG_OBJECT (decoder,
+ if (!gst_d3d11_result (hr, device) || !can_support) {
+ GST_DEBUG_OBJECT (device,
"VideoDevice could not support dxgi format %d, hr: 0x%x",
format, (guint) hr);
@@ -1913,7 +2057,7 @@ gst_d3d11_decoder_supports_format (GstD3D11Decoder * decoder,
/* Don't call this method with legacy device */
gboolean
-gst_d3d11_decoder_supports_resolution (GstD3D11Decoder * decoder,
+gst_d3d11_decoder_supports_resolution (GstD3D11Device * device,
const GUID * decoder_profile, DXGI_FORMAT format, guint width, guint height)
{
D3D11_VIDEO_DECODER_DESC desc;
@@ -1921,11 +2065,13 @@ gst_d3d11_decoder_supports_resolution (GstD3D11Decoder * decoder,
UINT config_count;
ID3D11VideoDevice *video_device;
- g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
+ g_return_val_if_fail (GST_IS_D3D11_DEVICE (device), FALSE);
g_return_val_if_fail (decoder_profile != NULL, FALSE);
g_return_val_if_fail (format != DXGI_FORMAT_UNKNOWN, FALSE);
- video_device = decoder->video_device;
+ video_device = gst_d3d11_device_get_video_device_handle (device);
+ if (!video_device)
+ return FALSE;
desc.SampleWidth = width;
desc.SampleHeight = height;
@@ -1933,8 +2079,8 @@ gst_d3d11_decoder_supports_resolution (GstD3D11Decoder * decoder,
desc.Guid = *decoder_profile;
hr = video_device->GetVideoDecoderConfigCount (&desc, &config_count);
- if (!gst_d3d11_result (hr, decoder->device) || config_count == 0) {
- GST_DEBUG_OBJECT (decoder, "Could not get decoder config count, hr: 0x%x",
+ if (!gst_d3d11_result (hr, device) || config_count == 0) {
+ GST_DEBUG_OBJECT (device, "Could not get decoder config count, hr: 0x%x",
(guint) hr);
return FALSE;
}
@@ -1955,7 +2101,7 @@ struct _GstD3D11DecoderClassData
GstCaps *sink_caps;
GstCaps *src_caps;
gchar *description;
- GstD3D11Codec codec;
+ GstDXVACodec codec;
};
/**
@@ -1969,7 +2115,7 @@ struct _GstD3D11DecoderClassData
* Returns: (transfer full): the new #GstD3D11DecoderClassData
*/
GstD3D11DecoderClassData *
-gst_d3d11_decoder_class_data_new (GstD3D11Device * device, GstD3D11Codec codec,
+gst_d3d11_decoder_class_data_new (GstD3D11Device * device, GstDXVACodec codec,
GstCaps * sink_caps, GstCaps * src_caps)
{
GstD3D11DecoderClassData *ret;
@@ -2044,7 +2190,7 @@ gst_d3d11_decoder_proxy_class_init (GstElementClass * klass,
"DXGI Vendor ID", 0, G_MAXUINT32, 0,
(GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
- codec_name = gst_d3d11_codec_to_string (data->codec);
+ codec_name = gst_dxva_codec_to_string (data->codec);
long_name = "Direct3D11/DXVA " + std::string (codec_name) + " " +
std::string (data->description) + " Decoder";
description = "Direct3D11/DXVA based " + std::string (codec_name) +
diff --git a/sys/d3d11/gstd3d11decoder.h b/sys/d3d11/gstd3d11decoder.h
index f7ec66131..08445fe65 100644
--- a/sys/d3d11/gstd3d11decoder.h
+++ b/sys/d3d11/gstd3d11decoder.h
@@ -34,17 +34,17 @@ typedef struct _GstD3D11DecoderClassData GstD3D11DecoderClassData;
typedef enum
{
- GST_D3D11_CODEC_NONE,
- GST_D3D11_CODEC_H264,
- GST_D3D11_CODEC_VP9,
- GST_D3D11_CODEC_H265,
- GST_D3D11_CODEC_VP8,
- GST_D3D11_CODEC_MPEG2,
- GST_D3D11_CODEC_AV1,
+ GST_DXVA_CODEC_NONE,
+ GST_DXVA_CODEC_MPEG2,
+ GST_DXVA_CODEC_H264,
+ GST_DXVA_CODEC_H265,
+ GST_DXVA_CODEC_VP8,
+ GST_DXVA_CODEC_VP9,
+ GST_DXVA_CODEC_AV1,
/* the last of supported codec */
- GST_D3D11_CODEC_LAST
-} GstD3D11Codec;
+ GST_DXVA_CODEC_LAST
+} GstDXVACodec;
typedef struct
{
@@ -53,36 +53,37 @@ typedef struct
guint vendor_id;
} GstD3D11DecoderSubClassData;
-GstD3D11Decoder * gst_d3d11_decoder_new (GstD3D11Device * device);
+typedef struct _GstD3D11DecodeInputStreamArgs
+{
+ gpointer picture_params;
+ gsize picture_params_size;
+
+ gpointer slice_control;
+ gsize slice_control_size;
+
+ gpointer bitstream;
+ gsize bitstream_size;
+
+ gpointer inverse_quantization_matrix;
+ gsize inverse_quantization_matrix_size;
+} GstD3D11DecodeInputStreamArgs;
+
+GstD3D11Decoder * gst_d3d11_decoder_new (GstD3D11Device * device,
+ GstDXVACodec codec);
gboolean gst_d3d11_decoder_is_configured (GstD3D11Decoder * decoder);
gboolean gst_d3d11_decoder_configure (GstD3D11Decoder * decoder,
- GstD3D11Codec codec,
GstVideoCodecState * input_state,
GstVideoInfo * info,
gint coded_width,
gint coded_height,
guint dpb_size);
-gboolean gst_d3d11_decoder_begin_frame (GstD3D11Decoder * decoder,
- ID3D11VideoDecoderOutputView * output_view,
- guint content_key_size,
- gconstpointer content_key);
-
-gboolean gst_d3d11_decoder_end_frame (GstD3D11Decoder * decoder);
+gboolean gst_d3d11_decoder_decode_frame (GstD3D11Decoder * decoder,
+ ID3D11VideoDecoderOutputView * output_view,
+ GstD3D11DecodeInputStreamArgs * input_args);
-gboolean gst_d3d11_decoder_get_decoder_buffer (GstD3D11Decoder * decoder,
- D3D11_VIDEO_DECODER_BUFFER_TYPE type,
- guint * buffer_size,
- gpointer * buffer);
-
-gboolean gst_d3d11_decoder_release_decoder_buffer (GstD3D11Decoder * decoder,
- D3D11_VIDEO_DECODER_BUFFER_TYPE type);
-
-gboolean gst_d3d11_decoder_submit_decoder_buffers (GstD3D11Decoder * decoder,
- guint buffer_count,
- const D3D11_VIDEO_DECODER_BUFFER_DESC * buffers);
GstBuffer * gst_d3d11_decoder_get_output_view_buffer (GstD3D11Decoder * decoder,
GstVideoDecoder * videodec);
@@ -110,25 +111,36 @@ gboolean gst_d3d11_decoder_set_flushing (GstD3D11Decoder * decod
gboolean flushing);
/* Utils for class registration */
+typedef struct _GstDXVAResolution
+{
+ guint width;
+ guint height;
+} GstDXVAResolution;
+
+static const GstDXVAResolution gst_dxva_resolutions[] = {
+ {1920, 1088}, {2560, 1440}, {3840, 2160}, {4096, 2160},
+ {7680, 4320}, {8192, 4320}
+};
+
gboolean gst_d3d11_decoder_util_is_legacy_device (GstD3D11Device * device);
-gboolean gst_d3d11_decoder_get_supported_decoder_profile (GstD3D11Decoder * decoder,
- GstD3D11Codec codec,
+gboolean gst_d3d11_decoder_get_supported_decoder_profile (GstD3D11Device * device,
+ GstDXVACodec codec,
GstVideoFormat format,
const GUID ** selected_profile);
-gboolean gst_d3d11_decoder_supports_format (GstD3D11Decoder * decoder,
+gboolean gst_d3d11_decoder_supports_format (GstD3D11Device * device,
const GUID * decoder_profile,
DXGI_FORMAT format);
-gboolean gst_d3d11_decoder_supports_resolution (GstD3D11Decoder * decoder,
+gboolean gst_d3d11_decoder_supports_resolution (GstD3D11Device * device,
const GUID * decoder_profile,
DXGI_FORMAT format,
guint width,
guint height);
GstD3D11DecoderClassData * gst_d3d11_decoder_class_data_new (GstD3D11Device * device,
- GstD3D11Codec codec,
+ GstDXVACodec codec,
GstCaps * sink_caps,
GstCaps * src_caps);
diff --git a/sys/d3d11/gstd3d11h264dec.cpp b/sys/d3d11/gstd3d11h264dec.cpp
index 5723fa2ac..39dca6bab 100644
--- a/sys/d3d11/gstd3d11h264dec.cpp
+++ b/sys/d3d11/gstd3d11h264dec.cpp
@@ -70,6 +70,7 @@
#include <gst/codecs/gsth264decoder.h>
#include <string.h>
+#include <vector>
/* HACK: to expose dxva data structure on UWP */
#ifdef WINAPI_PARTITION_DESKTOP
@@ -82,36 +83,34 @@
GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_h264_dec_debug);
#define GST_CAT_DEFAULT gst_d3d11_h264_dec_debug
+/* *INDENT-OFF* */
+typedef struct _GstD3D11H264DecInner
+{
+ GstD3D11Device *device = nullptr;
+ GstD3D11Decoder *d3d11_decoder = nullptr;
+
+ DXVA_PicParams_H264 pic_params;
+ DXVA_Qmatrix_H264 iq_matrix;
+
+ std::vector<DXVA_Slice_H264_Short> slice_list;
+ std::vector<guint8> bitstream_buffer;
+
+ gint width = 0;
+ gint height = 0;
+ gint coded_width = 0;
+ gint coded_height = 0;
+ gint bitdepth = 0;
+ guint8 chroma_format_idc = 0;
+ GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
+ gboolean interlaced = FALSE;
+ gint max_dpb_size = 0;
+} GstD3D11H264DecInner;
+
+/* *INDENT-ON* */
typedef struct _GstD3D11H264Dec
{
GstH264Decoder parent;
-
- GstD3D11Device *device;
-
- gint width, height;
- gint coded_width, coded_height;
- gint bitdepth;
- guint8 chroma_format_idc;
- GstVideoFormat out_format;
- gboolean interlaced;
- gint max_dpb_size;
-
- /* Array of DXVA_Slice_H264_Short */
- GArray *slice_list;
-
- GstD3D11Decoder *d3d11_decoder;
-
- /* Pointing current bitstream buffer */
- gboolean bad_aligned_bitstream_buffer;
- guint written_buffer_size;
- guint remaining_buffer_size;
- guint8 *bitstream_buffer_data;
-
- DXVA_PicEntry_H264 ref_frame_list[16];
- INT field_order_cnt_list[16][2];
- USHORT frame_num_list[16];
- UINT used_for_reference_flags;
- USHORT non_existing_frame_flags;
+ GstD3D11H264DecInner *inner;
} GstD3D11H264Dec;
typedef struct _GstD3D11H264DecClass
@@ -128,7 +127,7 @@ static GstElementClass *parent_class = NULL;
static void gst_d3d11_h264_dec_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
-static void gst_d3d11_h264_dec_dispose (GObject * object);
+static void gst_d3d11_h264_dec_finalize (GObject * object);
static void gst_d3d11_h264_dec_set_context (GstElement * element,
GstContext * context);
@@ -149,8 +148,6 @@ static gboolean gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
GstVideoCodecFrame * frame, GstH264Picture * picture);
static gboolean gst_d3d11_h264_dec_new_field_picture (GstH264Decoder *
decoder, const GstH264Picture * first_field, GstH264Picture * second_field);
-static GstFlowReturn gst_d3d11_h264_dec_output_picture (GstH264Decoder *
- decoder, GstVideoCodecFrame * frame, GstH264Picture * picture);
static gboolean gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb);
static gboolean gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder,
@@ -158,8 +155,8 @@ static gboolean gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder,
GArray * ref_pic_list1);
static gboolean gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
GstH264Picture * picture);
-static gboolean gst_d3d11_h264_dec_fill_picture_params (GstD3D11H264Dec * self,
- const GstH264SliceHdr * slice_header, DXVA_PicParams_H264 * params);
+static GstFlowReturn gst_d3d11_h264_dec_output_picture (GstH264Decoder *
+ decoder, GstVideoCodecFrame * frame, GstH264Picture * picture);
static void
gst_d3d11_h264_dec_class_init (GstD3D11H264DecClass * klass, gpointer data)
@@ -171,7 +168,7 @@ gst_d3d11_h264_dec_class_init (GstD3D11H264DecClass * klass, gpointer data)
GstD3D11DecoderClassData *cdata = (GstD3D11DecoderClassData *) data;
gobject_class->get_property = gst_d3d11_h264_dec_get_property;
- gobject_class->dispose = gst_d3d11_h264_dec_dispose;
+ gobject_class->finalize = gst_d3d11_h264_dec_finalize;
element_class->set_context =
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_set_context);
@@ -208,7 +205,7 @@ gst_d3d11_h264_dec_class_init (GstD3D11H264DecClass * klass, gpointer data)
static void
gst_d3d11_h264_dec_init (GstD3D11H264Dec * self)
{
- self->slice_list = g_array_new (FALSE, TRUE, sizeof (DXVA_Slice_H264_Short));
+ self->inner = new GstD3D11H264DecInner ();
}
static void
@@ -222,27 +219,25 @@ gst_d3d11_h264_dec_get_property (GObject * object, guint prop_id,
}
static void
-gst_d3d11_h264_dec_dispose (GObject * object)
+gst_d3d11_h264_dec_finalize (GObject * object)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (object);
- if (self->slice_list) {
- g_array_unref (self->slice_list);
- self->slice_list = NULL;
- }
+ delete self->inner;
- G_OBJECT_CLASS (parent_class)->dispose (object);
+ G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_d3d11_h264_dec_set_context (GstElement * element, GstContext * context)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (element);
+ GstD3D11H264DecInner *inner = self->inner;
GstD3D11H264DecClass *klass = GST_D3D11_H264_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
gst_d3d11_handle_set_context (element, context, cdata->adapter,
- &self->device);
+ &inner->device);
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
}
@@ -251,35 +246,39 @@ gst_d3d11_h264_dec_set_context (GstElement * element, GstContext * context)
static void
gst_d3d11_h264_dec_reset (GstD3D11H264Dec * self)
{
- self->width = 0;
- self->height = 0;
- self->coded_width = 0;
- self->coded_height = 0;
- self->bitdepth = 0;
- self->chroma_format_idc = 0;
- self->out_format = GST_VIDEO_FORMAT_UNKNOWN;
- self->interlaced = FALSE;
- self->max_dpb_size = 0;
+ GstD3D11H264DecInner *inner = self->inner;
+
+ inner->width = 0;
+ inner->height = 0;
+ inner->coded_width = 0;
+ inner->coded_height = 0;
+ inner->bitdepth = 0;
+ inner->chroma_format_idc = 0;
+ inner->out_format = GST_VIDEO_FORMAT_UNKNOWN;
+ inner->interlaced = FALSE;
+ inner->max_dpb_size = 0;
}
static gboolean
gst_d3d11_h264_dec_open (GstVideoDecoder * decoder)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
GstD3D11H264DecClass *klass = GST_D3D11_H264_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
if (!gst_d3d11_ensure_element_data (GST_ELEMENT_CAST (self), cdata->adapter,
- &self->device)) {
+ &inner->device)) {
GST_ERROR_OBJECT (self, "Cannot create d3d11device");
return FALSE;
}
- self->d3d11_decoder = gst_d3d11_decoder_new (self->device);
+ inner->d3d11_decoder = gst_d3d11_decoder_new (inner->device,
+ GST_DXVA_CODEC_H264);
- if (!self->d3d11_decoder) {
+ if (!inner->d3d11_decoder) {
GST_ERROR_OBJECT (self, "Cannot create d3d11 decoder");
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->device);
return FALSE;
}
@@ -292,9 +291,10 @@ static gboolean
gst_d3d11_h264_dec_close (GstVideoDecoder * decoder)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
- gst_clear_object (&self->d3d11_decoder);
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->d3d11_decoder);
+ gst_clear_object (&inner->device);
return TRUE;
}
@@ -303,8 +303,9 @@ static gboolean
gst_d3d11_h264_dec_negotiate (GstVideoDecoder * decoder)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_negotiate (self->d3d11_decoder, decoder))
+ if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
return FALSE;
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
@@ -315,8 +316,9 @@ gst_d3d11_h264_dec_decide_allocation (GstVideoDecoder * decoder,
GstQuery * query)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_decide_allocation (self->d3d11_decoder,
+ if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder,
decoder, query)) {
return FALSE;
}
@@ -329,11 +331,12 @@ static gboolean
gst_d3d11_h264_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
- query, self->device)) {
+ query, inner->device)) {
return TRUE;
}
break;
@@ -348,15 +351,16 @@ static gboolean
gst_d3d11_h264_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, TRUE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
break;
case GST_EVENT_FLUSH_STOP:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, FALSE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
default:
break;
}
@@ -369,6 +373,7 @@ gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
const GstH264SPS * sps, gint max_dpb_size)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
gint crop_width, crop_height;
gboolean interlaced;
gboolean modified = FALSE;
@@ -383,63 +388,63 @@ gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
crop_height = sps->height;
}
- if (self->width != crop_width || self->height != crop_height ||
- self->coded_width != sps->width || self->coded_height != sps->height) {
+ if (inner->width != crop_width || inner->height != crop_height ||
+ inner->coded_width != sps->width || inner->coded_height != sps->height) {
GST_INFO_OBJECT (self, "resolution changed %dx%d (%dx%d)",
crop_width, crop_height, sps->width, sps->height);
- self->width = crop_width;
- self->height = crop_height;
- self->coded_width = sps->width;
- self->coded_height = sps->height;
+ inner->width = crop_width;
+ inner->height = crop_height;
+ inner->coded_width = sps->width;
+ inner->coded_height = sps->height;
modified = TRUE;
}
- if (self->bitdepth != sps->bit_depth_luma_minus8 + 8) {
+ if (inner->bitdepth != sps->bit_depth_luma_minus8 + 8) {
GST_INFO_OBJECT (self, "bitdepth changed");
- self->bitdepth = (guint) sps->bit_depth_luma_minus8 + 8;
+ inner->bitdepth = (guint) sps->bit_depth_luma_minus8 + 8;
modified = TRUE;
}
- if (self->chroma_format_idc != sps->chroma_format_idc) {
+ if (inner->chroma_format_idc != sps->chroma_format_idc) {
GST_INFO_OBJECT (self, "chroma format changed");
- self->chroma_format_idc = sps->chroma_format_idc;
+ inner->chroma_format_idc = sps->chroma_format_idc;
modified = TRUE;
}
interlaced = !sps->frame_mbs_only_flag;
- if (self->interlaced != interlaced) {
+ if (inner->interlaced != interlaced) {
GST_INFO_OBJECT (self, "interlaced sequence changed");
- self->interlaced = interlaced;
+ inner->interlaced = interlaced;
modified = TRUE;
}
- if (self->max_dpb_size < max_dpb_size) {
+ if (inner->max_dpb_size < max_dpb_size) {
GST_INFO_OBJECT (self, "Requires larger DPB size (%d -> %d)",
- self->max_dpb_size, max_dpb_size);
+ inner->max_dpb_size, max_dpb_size);
modified = TRUE;
}
- if (modified || !gst_d3d11_decoder_is_configured (self->d3d11_decoder)) {
+ if (modified || !gst_d3d11_decoder_is_configured (inner->d3d11_decoder)) {
GstVideoInfo info;
- self->out_format = GST_VIDEO_FORMAT_UNKNOWN;
+ inner->out_format = GST_VIDEO_FORMAT_UNKNOWN;
- if (self->bitdepth == 8) {
- if (self->chroma_format_idc == 1)
- self->out_format = GST_VIDEO_FORMAT_NV12;
+ if (inner->bitdepth == 8) {
+ if (inner->chroma_format_idc == 1)
+ inner->out_format = GST_VIDEO_FORMAT_NV12;
else {
GST_FIXME_OBJECT (self, "Could not support 8bits non-4:2:0 format");
}
}
- if (self->out_format == GST_VIDEO_FORMAT_UNKNOWN) {
+ if (inner->out_format == GST_VIDEO_FORMAT_UNKNOWN) {
GST_ERROR_OBJECT (self, "Could not support bitdepth/chroma format");
return FALSE;
}
gst_video_info_set_format (&info,
- self->out_format, self->width, self->height);
- if (self->interlaced)
+ inner->out_format, inner->width, inner->height);
+ if (inner->interlaced)
GST_VIDEO_INFO_INTERLACE_MODE (&info) = GST_VIDEO_INTERLACE_MODE_MIXED;
/* Store configured DPB size here. Then, it will be referenced later
@@ -447,9 +452,10 @@ gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
* For instance, if every configuration is same apart from DPB size and
* new DPB size is decreased, we can reuse existing decoder object.
*/
- self->max_dpb_size = max_dpb_size;
- if (!gst_d3d11_decoder_configure (self->d3d11_decoder, GST_D3D11_CODEC_H264,
- decoder->input_state, &info, self->coded_width, self->coded_height,
+ inner->max_dpb_size = max_dpb_size;
+ if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
+ decoder->input_state, &info,
+ inner->coded_width, inner->coded_height,
/* Additional 4 views margin for zero-copy rendering */
max_dpb_size + 4)) {
GST_ERROR_OBJECT (self, "Failed to create decoder");
@@ -466,262 +472,14 @@ gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
}
static gboolean
-gst_d3d11_h264_dec_get_bitstream_buffer (GstD3D11H264Dec * self)
-{
- GST_TRACE_OBJECT (self, "Getting bitstream buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM, &self->remaining_buffer_size,
- (gpointer *) & self->bitstream_buffer_data)) {
- GST_ERROR_OBJECT (self, "Faild to get bitstream buffer");
- return FALSE;
- }
-
- GST_TRACE_OBJECT (self, "Got bitstream buffer %p with size %d",
- self->bitstream_buffer_data, self->remaining_buffer_size);
- self->written_buffer_size = 0;
- if ((self->remaining_buffer_size & 127) != 0) {
- GST_WARNING_OBJECT (self,
- "The size of bitstream buffer is not 128 bytes aligned");
- self->bad_aligned_bitstream_buffer = TRUE;
- } else {
- self->bad_aligned_bitstream_buffer = FALSE;
- }
-
- return TRUE;
-}
-
-static ID3D11VideoDecoderOutputView *
-gst_d3d11_h264_dec_get_output_view_from_picture (GstD3D11H264Dec * self,
- GstH264Picture * picture, guint8 * view_id)
-{
- GstBuffer *view_buffer;
- ID3D11VideoDecoderOutputView *view;
-
- view_buffer = (GstBuffer *) gst_h264_picture_get_user_data (picture);
- if (!view_buffer) {
- GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
- return NULL;
- }
-
- view = gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
- view_buffer, view_id);
- if (!view) {
- GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
- return NULL;
- }
-
- return view;
-}
-
-static gboolean
-gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
- GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb)
-{
- GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
- ID3D11VideoDecoderOutputView *view;
- guint8 view_id = 0xff;
- GArray *dpb_array;
- GstH264SPS *sps;
- GstH264PPS *pps;
- DXVA_PicParams_H264 pic_params = { 0, };
- DXVA_Qmatrix_H264 iq_matrix = { 0, };
- guint d3d11_buffer_size = 0;
- gpointer d3d11_buffer = NULL;
- gint i, j;
-
- pps = slice->header.pps;
- g_assert (pps != NULL);
-
- sps = pps->sequence;
- g_assert (sps != NULL);
-
- view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture,
- &view_id);
- if (!view) {
- GST_ERROR_OBJECT (self, "current picture does not have output view handle");
- return FALSE;
- }
-
- GST_TRACE_OBJECT (self, "Begin frame");
- if (!gst_d3d11_decoder_begin_frame (self->d3d11_decoder, view, 0, NULL)) {
- GST_ERROR_OBJECT (self, "Failed to begin frame");
- return FALSE;
- }
-
- for (i = 0; i < 16; i++) {
- self->ref_frame_list[i].bPicEntry = 0xFF;
- self->field_order_cnt_list[i][0] = 0;
- self->field_order_cnt_list[i][1] = 0;
- self->frame_num_list[i] = 0;
- }
- self->used_for_reference_flags = 0;
- self->non_existing_frame_flags = 0;
-
- dpb_array = gst_h264_dpb_get_pictures_all (dpb);
-
- for (i = dpb_array->len - 1, j = 0; i >= 0 && j < 16; i--) {
- GstH264Picture *other = g_array_index (dpb_array, GstH264Picture *, i);
- guint8 id = 0xff;
-
- if (!GST_H264_PICTURE_IS_REF (other))
- continue;
-
- /* The second field picture will be handled differently */
- if (other->second_field)
- continue;
-
- gst_d3d11_h264_dec_get_output_view_from_picture (self, other, &id);
- self->ref_frame_list[j].Index7Bits = id;
-
- if (GST_H264_PICTURE_IS_LONG_TERM_REF (other)) {
- self->ref_frame_list[j].AssociatedFlag = 1;
- self->frame_num_list[j] = other->long_term_frame_idx;
- } else {
- self->ref_frame_list[j].AssociatedFlag = 0;
- self->frame_num_list[j] = other->frame_num;
- }
-
- switch (other->field) {
- case GST_H264_PICTURE_FIELD_TOP_FIELD:
- self->field_order_cnt_list[j][0] = other->top_field_order_cnt;
- self->used_for_reference_flags |= 0x1 << (2 * j);
- break;
- case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
- self->field_order_cnt_list[j][1] = other->bottom_field_order_cnt;
- self->used_for_reference_flags |= 0x1 << (2 * j + 1);
- break;
- default:
- self->field_order_cnt_list[j][0] = other->top_field_order_cnt;
- self->field_order_cnt_list[j][1] = other->bottom_field_order_cnt;
- self->used_for_reference_flags |= 0x3 << (2 * j);
- break;
- }
-
- if (other->other_field) {
- GstH264Picture *other_field = other->other_field;
-
- switch (other_field->field) {
- case GST_H264_PICTURE_FIELD_TOP_FIELD:
- self->field_order_cnt_list[j][0] = other_field->top_field_order_cnt;
- self->used_for_reference_flags |= 0x1 << (2 * j);
- break;
- case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
- self->field_order_cnt_list[j][1] =
- other_field->bottom_field_order_cnt;
- self->used_for_reference_flags |= 0x1 << (2 * j + 1);
- break;
- default:
- break;
- }
- }
-
- self->non_existing_frame_flags |= (other->nonexisting) << j;
- j++;
- }
-
- gst_d3d11_h264_dec_fill_picture_params (self, &slice->header, &pic_params);
-
- pic_params.CurrPic.Index7Bits = view_id;
- pic_params.RefPicFlag = GST_H264_PICTURE_IS_REF (picture);
- pic_params.frame_num = picture->frame_num;
-
- if (picture->field == GST_H264_PICTURE_FIELD_TOP_FIELD) {
- pic_params.CurrFieldOrderCnt[0] = picture->top_field_order_cnt;
- pic_params.CurrFieldOrderCnt[1] = 0;
- } else if (picture->field == GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
- pic_params.CurrFieldOrderCnt[0] = 0;
- pic_params.CurrFieldOrderCnt[1] = picture->bottom_field_order_cnt;
- } else {
- pic_params.CurrFieldOrderCnt[0] = picture->top_field_order_cnt;
- pic_params.CurrFieldOrderCnt[1] = picture->bottom_field_order_cnt;
- }
-
- memcpy (pic_params.RefFrameList, self->ref_frame_list,
- sizeof (pic_params.RefFrameList));
- memcpy (pic_params.FieldOrderCntList, self->field_order_cnt_list,
- sizeof (pic_params.FieldOrderCntList));
- memcpy (pic_params.FrameNumList, self->frame_num_list,
- sizeof (pic_params.FrameNumList));
-
- pic_params.UsedForReferenceFlags = self->used_for_reference_flags;
- pic_params.NonExistingFrameFlags = self->non_existing_frame_flags;
-
- GST_TRACE_OBJECT (self, "Getting picture param decoder buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS, &d3d11_buffer_size,
- &d3d11_buffer)) {
- GST_ERROR_OBJECT (self,
- "Failed to get decoder buffer for picture parameters");
- return FALSE;
- }
-
- memcpy (d3d11_buffer, &pic_params, sizeof (DXVA_PicParams_H264));
-
- GST_TRACE_OBJECT (self, "Release picture param decoder buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS)) {
- GST_ERROR_OBJECT (self, "Failed to release decoder buffer");
- return FALSE;
- }
-
- if (pps->pic_scaling_matrix_present_flag) {
- for (i = 0; i < 6; i++) {
- for (j = 0; j < 16; j++) {
- iq_matrix.bScalingLists4x4[i][j] = pps->scaling_lists_4x4[i][j];
- }
- }
-
- for (i = 0; i < 2; i++) {
- for (j = 0; j < 64; j++) {
- iq_matrix.bScalingLists8x8[i][j] = pps->scaling_lists_8x8[i][j];
- }
- }
- } else {
- for (i = 0; i < 6; i++) {
- for (j = 0; j < 16; j++) {
- iq_matrix.bScalingLists4x4[i][j] = sps->scaling_lists_4x4[i][j];
- }
- }
-
- for (i = 0; i < 2; i++) {
- for (j = 0; j < 64; j++) {
- iq_matrix.bScalingLists8x8[i][j] = sps->scaling_lists_8x8[i][j];
- }
- }
- }
-
- GST_TRACE_OBJECT (self, "Getting inverse quantization matrix buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX,
- &d3d11_buffer_size, &d3d11_buffer)) {
- GST_ERROR_OBJECT (self,
- "Failed to get decoder buffer for inv. quantization matrix");
- return FALSE;
- }
-
- memcpy (d3d11_buffer, &iq_matrix, sizeof (DXVA_Qmatrix_H264));
-
- GST_TRACE_OBJECT (self, "Release inverse quantization matrix buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX)) {
- GST_ERROR_OBJECT (self, "Failed to release decoder buffer");
- return FALSE;
- }
-
- g_array_unref (dpb_array);
- g_array_set_size (self->slice_list, 0);
-
- return gst_d3d11_h264_dec_get_bitstream_buffer (self);
-}
-
-static gboolean
gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
GstVideoCodecFrame * frame, GstH264Picture * picture)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
GstBuffer *view_buffer;
- view_buffer = gst_d3d11_decoder_get_output_view_buffer (self->d3d11_decoder,
+ view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
@@ -762,178 +520,28 @@ gst_d3d11_h264_dec_new_field_picture (GstH264Decoder * decoder,
return TRUE;
}
-static GstFlowReturn
-gst_d3d11_h264_dec_output_picture (GstH264Decoder * decoder,
- GstVideoCodecFrame * frame, GstH264Picture * picture)
+static ID3D11VideoDecoderOutputView *
+gst_d3d11_h264_dec_get_output_view_from_picture (GstD3D11H264Dec * self,
+ GstH264Picture * picture, guint8 * view_id)
{
- GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
- GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
GstBuffer *view_buffer;
-
- GST_LOG_OBJECT (self,
- "Outputting picture %p (poc %d)", picture, picture->pic_order_cnt);
+ ID3D11VideoDecoderOutputView *view;
view_buffer = (GstBuffer *) gst_h264_picture_get_user_data (picture);
-
if (!view_buffer) {
- GST_ERROR_OBJECT (self, "Could not get output view");
- goto error;
- }
-
- if (!gst_d3d11_decoder_process_output (self->d3d11_decoder, vdec,
- self->width, self->height, view_buffer, &frame->output_buffer)) {
- GST_ERROR_OBJECT (self, "Failed to copy buffer");
- goto error;
- }
-
- if (picture->buffer_flags != 0) {
- gboolean interlaced =
- (picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_INTERLACED) != 0;
- gboolean tff = (picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_TFF) != 0;
-
- GST_TRACE_OBJECT (self,
- "apply buffer flags 0x%x (interlaced %d, top-field-first %d)",
- picture->buffer_flags, interlaced, tff);
- GST_BUFFER_FLAG_SET (frame->output_buffer, picture->buffer_flags);
- }
-
- gst_h264_picture_unref (picture);
-
- return gst_video_decoder_finish_frame (vdec, frame);
-
-error:
- gst_video_decoder_drop_frame (vdec, frame);
- gst_h264_picture_unref (picture);
-
- return GST_FLOW_ERROR;
-}
-
-static gboolean
-gst_d3d11_h264_dec_submit_slice_data (GstD3D11H264Dec * self)
-{
- guint buffer_size;
- gpointer buffer;
- guint8 *data;
- gsize offset = 0;
- guint i;
- D3D11_VIDEO_DECODER_BUFFER_DESC buffer_desc[4];
- gboolean ret;
- DXVA_Slice_H264_Short *slice_data;
-
- if (self->slice_list->len < 1) {
- GST_WARNING_OBJECT (self, "Nothing to submit");
- return FALSE;
- }
-
- memset (buffer_desc, 0, sizeof (buffer_desc));
-
- slice_data = &g_array_index (self->slice_list, DXVA_Slice_H264_Short,
- self->slice_list->len - 1);
-
- /* DXVA2 spec is saying that written bitstream data must be 128 bytes
- * aligned if the bitstream buffer contains end of slice
- * (i.e., wBadSliceChopping == 0 or 2) */
- if (slice_data->wBadSliceChopping == 0 || slice_data->wBadSliceChopping == 2) {
- guint padding =
- MIN (GST_ROUND_UP_128 (self->written_buffer_size) -
- self->written_buffer_size, self->remaining_buffer_size);
-
- if (padding) {
- GST_TRACE_OBJECT (self,
- "Written bitstream buffer size %u is not 128 bytes aligned, "
- "add padding %u bytes", self->written_buffer_size, padding);
- memset (self->bitstream_buffer_data, 0, padding);
- self->written_buffer_size += padding;
- slice_data->SliceBytesInBuffer += padding;
- }
- }
-
- GST_TRACE_OBJECT (self, "Getting slice control buffer");
-
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL, &buffer_size, &buffer)) {
- GST_ERROR_OBJECT (self, "Couldn't get slice control buffer");
- return FALSE;
- }
-
- data = (guint8 *) buffer;
- for (i = 0; i < self->slice_list->len; i++) {
- DXVA_Slice_H264_Short *slice_data =
- &g_array_index (self->slice_list, DXVA_Slice_H264_Short, i);
-
- memcpy (data + offset, slice_data, sizeof (DXVA_Slice_H264_Short));
- offset += sizeof (DXVA_Slice_H264_Short);
- }
-
- GST_TRACE_OBJECT (self, "Release slice control buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL)) {
- GST_ERROR_OBJECT (self, "Failed to release slice control buffer");
- return FALSE;
- }
-
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM)) {
- GST_ERROR_OBJECT (self, "Failed to release bitstream buffer");
- return FALSE;
- }
-
- buffer_desc[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
- buffer_desc[0].DataOffset = 0;
- buffer_desc[0].DataSize = sizeof (DXVA_PicParams_H264);
-
- buffer_desc[1].BufferType =
- D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX;
- buffer_desc[1].DataOffset = 0;
- buffer_desc[1].DataSize = sizeof (DXVA_Qmatrix_H264);
-
- buffer_desc[2].BufferType = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
- buffer_desc[2].DataOffset = 0;
- buffer_desc[2].DataSize =
- sizeof (DXVA_Slice_H264_Short) * self->slice_list->len;
-
- if (!self->bad_aligned_bitstream_buffer
- && (self->written_buffer_size & 127) != 0) {
- GST_WARNING_OBJECT (self,
- "Written bitstream buffer size %u is not 128 bytes aligned",
- self->written_buffer_size);
- }
-
- buffer_desc[3].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
- buffer_desc[3].DataOffset = 0;
- buffer_desc[3].DataSize = self->written_buffer_size;
-
- ret = gst_d3d11_decoder_submit_decoder_buffers (self->d3d11_decoder,
- 4, buffer_desc);
-
- self->written_buffer_size = 0;
- self->bitstream_buffer_data = NULL;
- self->remaining_buffer_size = 0;
- g_array_set_size (self->slice_list, 0);
-
- return ret;
-}
-
-static gboolean
-gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
- GstH264Picture * picture)
-{
- GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
-
- GST_LOG_OBJECT (self, "end picture %p, (poc %d)",
- picture, picture->pic_order_cnt);
-
- if (!gst_d3d11_h264_dec_submit_slice_data (self)) {
- GST_ERROR_OBJECT (self, "Failed to submit slice data");
- return FALSE;
+ GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
+ return NULL;
}
- if (!gst_d3d11_decoder_end_frame (self->d3d11_decoder)) {
- GST_ERROR_OBJECT (self, "Failed to EndFrame");
- return FALSE;
+ view = gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
+ view_buffer, view_id);
+ if (!view) {
+ GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
+ return NULL;
}
- return TRUE;
+ return view;
}
static void
@@ -1020,8 +628,6 @@ gst_d3d11_h264_dec_fill_picture_params (GstD3D11H264Dec * self,
pps = slice_header->pps;
sps = pps->sequence;
- memset (params, 0, sizeof (DXVA_PicParams_H264));
-
params->MbsConsecutiveFlag = 1;
params->Reserved16Bits = 3;
params->ContinuationFlag = 1;
@@ -1038,127 +644,275 @@ gst_d3d11_h264_dec_fill_picture_params (GstD3D11H264Dec * self,
return TRUE;
}
+static inline void
+init_pic_params (DXVA_PicParams_H264 * params)
+{
+ memset (params, 0, sizeof (DXVA_PicParams_H264));
+ for (guint i = 0; i < G_N_ELEMENTS (params->RefFrameList); i++)
+ params->RefFrameList[i].bPicEntry = 0xff;
+}
+
+static gboolean
+gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
+ GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb)
+{
+ GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
+ DXVA_PicParams_H264 *pic_params = &inner->pic_params;
+ DXVA_Qmatrix_H264 *iq_matrix = &inner->iq_matrix;
+ ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
+ GArray *dpb_array;
+ GstH264PPS *pps;
+ guint i, j;
+
+ pps = slice->header.pps;
+
+ view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture,
+ &view_id);
+ if (!view) {
+ GST_ERROR_OBJECT (self, "current picture does not have output view handle");
+ return FALSE;
+ }
+
+ init_pic_params (pic_params);
+ gst_d3d11_h264_dec_fill_picture_params (self, &slice->header, pic_params);
+
+ pic_params->CurrPic.Index7Bits = view_id;
+ pic_params->RefPicFlag = GST_H264_PICTURE_IS_REF (picture);
+ pic_params->frame_num = picture->frame_num;
+
+ if (picture->field == GST_H264_PICTURE_FIELD_TOP_FIELD) {
+ pic_params->CurrFieldOrderCnt[0] = picture->top_field_order_cnt;
+ pic_params->CurrFieldOrderCnt[1] = 0;
+ } else if (picture->field == GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
+ pic_params->CurrFieldOrderCnt[0] = 0;
+ pic_params->CurrFieldOrderCnt[1] = picture->bottom_field_order_cnt;
+ } else {
+ pic_params->CurrFieldOrderCnt[0] = picture->top_field_order_cnt;
+ pic_params->CurrFieldOrderCnt[1] = picture->bottom_field_order_cnt;
+ }
+
+ dpb_array = gst_h264_dpb_get_pictures_all (dpb);
+ for (i = 0, j = 0; i < dpb_array->len && j < 16; i++) {
+ GstH264Picture *other = g_array_index (dpb_array, GstH264Picture *, i);
+ guint8 id = 0xff;
+
+ if (!GST_H264_PICTURE_IS_REF (other))
+ continue;
+
+ /* The second field picture will be handled differently */
+ if (other->second_field)
+ continue;
+
+ gst_d3d11_h264_dec_get_output_view_from_picture (self, other, &id);
+ pic_params->RefFrameList[j].Index7Bits = id;
+
+ if (GST_H264_PICTURE_IS_LONG_TERM_REF (other)) {
+ pic_params->RefFrameList[j].AssociatedFlag = 1;
+ pic_params->FrameNumList[j] = other->long_term_frame_idx;
+ } else {
+ pic_params->RefFrameList[j].AssociatedFlag = 0;
+ pic_params->FrameNumList[j] = other->frame_num;
+ }
+
+ switch (other->field) {
+ case GST_H264_PICTURE_FIELD_TOP_FIELD:
+ pic_params->FieldOrderCntList[j][0] = other->top_field_order_cnt;
+ pic_params->UsedForReferenceFlags |= 0x1 << (2 * j);
+ break;
+ case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
+ pic_params->FieldOrderCntList[j][1] = other->bottom_field_order_cnt;
+ pic_params->UsedForReferenceFlags |= 0x1 << (2 * j + 1);
+ break;
+ default:
+ pic_params->FieldOrderCntList[j][0] = other->top_field_order_cnt;
+ pic_params->FieldOrderCntList[j][1] = other->bottom_field_order_cnt;
+ pic_params->UsedForReferenceFlags |= 0x3 << (2 * j);
+ break;
+ }
+
+ if (other->other_field) {
+ GstH264Picture *other_field = other->other_field;
+
+ switch (other_field->field) {
+ case GST_H264_PICTURE_FIELD_TOP_FIELD:
+ pic_params->FieldOrderCntList[j][0] =
+ other_field->top_field_order_cnt;
+ pic_params->UsedForReferenceFlags |= 0x1 << (2 * j);
+ break;
+ case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
+ pic_params->FieldOrderCntList[j][1] =
+ other_field->bottom_field_order_cnt;
+ pic_params->UsedForReferenceFlags |= 0x1 << (2 * j + 1);
+ break;
+ default:
+ break;
+ }
+ }
+
+ pic_params->NonExistingFrameFlags |= (other->nonexisting) << j;
+ j++;
+ }
+ g_array_unref (dpb_array);
+
+ G_STATIC_ASSERT (sizeof (iq_matrix->bScalingLists4x4) ==
+ sizeof (pps->scaling_lists_4x4));
+ memcpy (iq_matrix->bScalingLists4x4, pps->scaling_lists_4x4,
+ sizeof (pps->scaling_lists_4x4));
+
+ G_STATIC_ASSERT (sizeof (iq_matrix->bScalingLists8x8[0]) ==
+ sizeof (pps->scaling_lists_8x8[0]));
+ memcpy (iq_matrix->bScalingLists8x8[0], pps->scaling_lists_8x8[0],
+ sizeof (pps->scaling_lists_8x8[0]));
+ memcpy (iq_matrix->bScalingLists8x8[1], pps->scaling_lists_8x8[1],
+ sizeof (pps->scaling_lists_8x8[1]));
+
+ inner->slice_list.resize (0);
+ inner->bitstream_buffer.resize (0);
+
+ return TRUE;
+}
+
static gboolean
gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0,
GArray * ref_pic_list1)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
+ DXVA_Slice_H264_Short dxva_slice;
+ static const guint8 start_code[] = { 0, 0, 1 };
+ const size_t start_code_size = sizeof (start_code);
- {
- guint to_write = slice->nalu.size + 3;
- gboolean is_first = TRUE;
-
- while (to_write > 0) {
- guint bytes_to_copy;
- gboolean is_last = TRUE;
- DXVA_Slice_H264_Short slice_short = { 0, };
-
- if (self->remaining_buffer_size < to_write && self->slice_list->len > 0) {
- if (!gst_d3d11_h264_dec_submit_slice_data (self)) {
- GST_ERROR_OBJECT (self, "Failed to submit bitstream buffers");
- return FALSE;
- }
-
- if (!gst_d3d11_h264_dec_get_bitstream_buffer (self)) {
- GST_ERROR_OBJECT (self, "Failed to get bitstream buffer");
- return FALSE;
- }
- }
+ dxva_slice.BSNALunitDataLocation = inner->bitstream_buffer.size ();
+ /* Includes 3 bytes start code prefix */
+ dxva_slice.SliceBytesInBuffer = slice->nalu.size + start_code_size;
+ dxva_slice.wBadSliceChopping = 0;
- /* remaining_buffer_size: the size of remaining d3d11 decoder
- * bitstream memory allowed to write more
- * written_buffer_size: the size of written bytes to this d3d11 decoder
- * bitstream memory
- * bytes_to_copy: the size of which we would write to d3d11 decoder
- * bitstream memory in this loop
- */
-
- bytes_to_copy = to_write;
-
- if (bytes_to_copy > self->remaining_buffer_size) {
- /* if the size of this slice is larger than the size of remaining d3d11
- * decoder bitstream memory, write the data up to the remaining d3d11
- * decoder bitstream memory size and the rest would be written to the
- * next d3d11 bitstream memory */
- bytes_to_copy = self->remaining_buffer_size;
- is_last = FALSE;
- }
+ inner->slice_list.push_back (dxva_slice);
- if (bytes_to_copy >= 3 && is_first) {
- /* normal case */
- self->bitstream_buffer_data[0] = 0;
- self->bitstream_buffer_data[1] = 0;
- self->bitstream_buffer_data[2] = 1;
- memcpy (self->bitstream_buffer_data + 3,
- slice->nalu.data + slice->nalu.offset, bytes_to_copy - 3);
- } else {
- /* when this nal unit date is splitted into two buffer */
- memcpy (self->bitstream_buffer_data,
- slice->nalu.data + slice->nalu.offset, bytes_to_copy);
- }
+ size_t pos = inner->bitstream_buffer.size ();
+ inner->bitstream_buffer.resize (pos + start_code_size + slice->nalu.size);
- /* For wBadSliceChopping value 0 or 1, BSNALunitDataLocation means
- * the offset of the first start code of this slice in this d3d11
- * memory buffer.
- * 1) If this is the first slice of picture, it should be zero
- * since we write start code at offset 0 (written size before this
- * slice also must be zero).
- * 2) If this is not the first slice of picture but this is the first
- * d3d11 bitstream buffer (meaning that one bitstream buffer contains
- * multiple slices), then this is the written size of buffer
- * excluding this loop.
- * And for wBadSliceChopping value 2 or 3, this should be zero by spec */
- if (is_first)
- slice_short.BSNALunitDataLocation = self->written_buffer_size;
- else
- slice_short.BSNALunitDataLocation = 0;
- slice_short.SliceBytesInBuffer = bytes_to_copy;
-
- /* wBadSliceChopping: (dxva h264 spec.)
- * 0: All bits for the slice are located within the corresponding
- * bitstream data buffer
- * 1: The bitstream data buffer contains the start of the slice,
- * but not the entire slice, because the buffer is full
- * 2: The bitstream data buffer contains the end of the slice.
- * It does not contain the start of the slice, because the start of
- * the slice was located in the previous bitstream data buffer.
- * 3: The bitstream data buffer does not contain the start of the slice
- * (because the start of the slice was located in the previous
- * bitstream data buffer), and it does not contain the end of the slice
- * (because the current bitstream data buffer is also full).
- */
- if (is_last && is_first) {
- slice_short.wBadSliceChopping = 0;
- } else if (!is_last && is_first) {
- slice_short.wBadSliceChopping = 1;
- } else if (is_last && !is_first) {
- slice_short.wBadSliceChopping = 2;
- } else {
- slice_short.wBadSliceChopping = 3;
- }
+ /* Fill start code prefix */
+ memcpy (&inner->bitstream_buffer[0] + pos, start_code, start_code_size);
- g_array_append_val (self->slice_list, slice_short);
- self->remaining_buffer_size -= bytes_to_copy;
- self->written_buffer_size += bytes_to_copy;
- self->bitstream_buffer_data += bytes_to_copy;
- is_first = FALSE;
- to_write -= bytes_to_copy;
- }
- }
+ /* Copy bitstream */
+ memcpy (&inner->bitstream_buffer[0] + pos + start_code_size,
+ slice->nalu.data + slice->nalu.offset, slice->nalu.size);
return TRUE;
}
-typedef struct
+static gboolean
+gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
+ GstH264Picture * picture)
{
- guint width;
- guint height;
-} GstD3D11H264DecResolution;
+ GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
+ ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
+ size_t bitstream_buffer_size;
+ size_t bitstream_pos;
+ GstD3D11DecodeInputStreamArgs input_args;
+
+ GST_LOG_OBJECT (self, "end picture %p, (poc %d)",
+ picture, picture->pic_order_cnt);
+
+ if (inner->bitstream_buffer.empty () || inner->slice_list.empty ()) {
+ GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
+ return FALSE;
+ }
+
+ view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture,
+ &view_id);
+ if (!view) {
+ GST_ERROR_OBJECT (self, "current picture does not have output view handle");
+ return FALSE;
+ }
+
+ memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
+
+ bitstream_pos = inner->bitstream_buffer.size ();
+ bitstream_buffer_size = GST_ROUND_UP_128 (bitstream_pos);
+
+ if (bitstream_buffer_size > bitstream_pos) {
+ size_t padding = bitstream_buffer_size - bitstream_pos;
+
+ /* As per DXVA spec, total amount of bitstream buffer size should be
+ * 128 bytes aligned. If actual data is not multiple of 128 bytes,
+ * the last slice data needs to be zero-padded */
+ inner->bitstream_buffer.resize (bitstream_buffer_size, 0);
+
+ DXVA_Slice_H264_Short & slice = inner->slice_list.back ();
+ slice.SliceBytesInBuffer += padding;
+ }
+
+ input_args.picture_params = &inner->pic_params;
+ input_args.picture_params_size = sizeof (DXVA_PicParams_H264);
+ input_args.slice_control = &inner->slice_list[0];
+ input_args.slice_control_size =
+ sizeof (DXVA_Slice_H264_Short) * inner->slice_list.size ();
+ input_args.bitstream = &inner->bitstream_buffer[0];
+ input_args.bitstream_size = inner->bitstream_buffer.size ();
+ input_args.inverse_quantization_matrix = &inner->iq_matrix;
+ input_args.inverse_quantization_matrix_size = sizeof (DXVA_Qmatrix_H264);
+
+ return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
+ view, &input_args);
+}
+
+static GstFlowReturn
+gst_d3d11_h264_dec_output_picture (GstH264Decoder * decoder,
+ GstVideoCodecFrame * frame, GstH264Picture * picture)
+{
+ GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
+ GstD3D11H264DecInner *inner = self->inner;
+ GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
+ GstBuffer *view_buffer;
+
+ GST_LOG_OBJECT (self,
+ "Outputting picture %p (poc %d)", picture, picture->pic_order_cnt);
+
+ view_buffer = (GstBuffer *) gst_h264_picture_get_user_data (picture);
+
+ if (!view_buffer) {
+ GST_ERROR_OBJECT (self, "Could not get output view");
+ goto error;
+ }
+
+ if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
+ inner->width, inner->height, view_buffer, &frame->output_buffer)) {
+ GST_ERROR_OBJECT (self, "Failed to copy buffer");
+ goto error;
+ }
+
+ if (picture->buffer_flags != 0) {
+ gboolean interlaced =
+ (picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_INTERLACED) != 0;
+ gboolean tff = (picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_TFF) != 0;
+
+ GST_TRACE_OBJECT (self,
+ "apply buffer flags 0x%x (interlaced %d, top-field-first %d)",
+ picture->buffer_flags, interlaced, tff);
+ GST_BUFFER_FLAG_SET (frame->output_buffer, picture->buffer_flags);
+ }
+
+ gst_h264_picture_unref (picture);
+
+ return gst_video_decoder_finish_frame (vdec, frame);
+
+error:
+ gst_h264_picture_unref (picture);
+ gst_video_decoder_release_frame (vdec, frame);
+
+ return GST_FLOW_ERROR;
+}
void
gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
- GstD3D11Decoder * decoder, guint rank, gboolean legacy)
+ guint rank, gboolean legacy)
{
GType type;
gchar *type_name;
@@ -1178,19 +932,14 @@ gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
(GInstanceInitFunc) gst_d3d11_h264_dec_init,
};
const GUID *supported_profile = NULL;
- /* values were taken from chromium. See supported_profile_helper.cc */
- GstD3D11H264DecResolution resolutions_to_check[] = {
- {1920, 1088}, {2560, 1440}, {3840, 2160}, {4096, 2160},
- {4096, 2304}
- };
GstCaps *sink_caps = NULL;
GstCaps *src_caps = NULL;
guint max_width = 0;
guint max_height = 0;
guint resolution;
- ret = gst_d3d11_decoder_get_supported_decoder_profile (decoder,
- GST_D3D11_CODEC_H264, GST_VIDEO_FORMAT_NV12, &supported_profile);
+ ret = gst_d3d11_decoder_get_supported_decoder_profile (device,
+ GST_DXVA_CODEC_H264, GST_VIDEO_FORMAT_NV12, &supported_profile);
if (!ret) {
GST_WARNING_OBJECT (device, "decoder profile unavailable");
@@ -1198,7 +947,7 @@ gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
}
ret =
- gst_d3d11_decoder_supports_format (decoder, supported_profile,
+ gst_d3d11_decoder_supports_format (device, supported_profile,
DXGI_FORMAT_NV12);
if (!ret) {
GST_FIXME_OBJECT (device, "device does not support NV12 format");
@@ -1208,15 +957,15 @@ gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
/* we will not check the maximum resolution for legacy devices.
* it might cause crash */
if (legacy) {
- max_width = resolutions_to_check[0].width;
- max_height = resolutions_to_check[0].height;
+ max_width = gst_dxva_resolutions[0].width;
+ max_height = gst_dxva_resolutions[0].height;
} else {
- for (i = 0; i < G_N_ELEMENTS (resolutions_to_check); i++) {
- if (gst_d3d11_decoder_supports_resolution (decoder, supported_profile,
- DXGI_FORMAT_NV12, resolutions_to_check[i].width,
- resolutions_to_check[i].height)) {
- max_width = resolutions_to_check[i].width;
- max_height = resolutions_to_check[i].height;
+ for (i = 0; i < G_N_ELEMENTS (gst_dxva_resolutions); i++) {
+ if (gst_d3d11_decoder_supports_resolution (device, supported_profile,
+ DXGI_FORMAT_NV12, gst_dxva_resolutions[i].width,
+ gst_dxva_resolutions[i].height)) {
+ max_width = gst_dxva_resolutions[i].width;
+ max_height = gst_dxva_resolutions[i].height;
GST_DEBUG_OBJECT (device,
"device support resolution %dx%d", max_width, max_height);
@@ -1249,7 +998,7 @@ gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
"height", GST_TYPE_INT_RANGE, 1, resolution, NULL);
type_info.class_data =
- gst_d3d11_decoder_class_data_new (device, GST_D3D11_CODEC_H264,
+ gst_d3d11_decoder_class_data_new (device, GST_DXVA_CODEC_H264,
sink_caps, src_caps);
type_name = g_strdup ("GstD3D11H264Dec");
diff --git a/sys/d3d11/gstd3d11h264dec.h b/sys/d3d11/gstd3d11h264dec.h
index 382e28909..9d0c52f72 100644
--- a/sys/d3d11/gstd3d11h264dec.h
+++ b/sys/d3d11/gstd3d11h264dec.h
@@ -26,7 +26,6 @@ G_BEGIN_DECLS
void gst_d3d11_h264_dec_register (GstPlugin * plugin,
GstD3D11Device * device,
- GstD3D11Decoder * decoder,
guint rank,
gboolean legacy);
diff --git a/sys/d3d11/gstd3d11h265dec.cpp b/sys/d3d11/gstd3d11h265dec.cpp
index 3a8a37b21..d594decaa 100644
--- a/sys/d3d11/gstd3d11h265dec.cpp
+++ b/sys/d3d11/gstd3d11h265dec.cpp
@@ -40,6 +40,7 @@
#include <gst/codecs/gsth265decoder.h>
#include <string.h>
+#include <vector>
/* HACK: to expose dxva data structure on UWP */
#ifdef WINAPI_PARTITION_DESKTOP
@@ -52,36 +53,35 @@
GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_h265_dec_debug);
#define GST_CAT_DEFAULT gst_d3d11_h265_dec_debug
-typedef struct _GstD3D11H265Dec
+/* *INDENT-OFF* */
+typedef struct _GstD3D11H265DecInner
{
- GstH265Decoder parent;
+ GstD3D11Device *device = nullptr;
+ GstD3D11Decoder *d3d11_decoder = nullptr;
- GstD3D11Device *device;
+ DXVA_PicParams_HEVC pic_params;
+ DXVA_Qmatrix_HEVC iq_matrix;
- gint width, height;
- gint coded_width, coded_height;
- guint bitdepth;
- guint8 chroma_format_idc;
- GstVideoFormat out_format;
- GstVideoInterlaceMode interlace_mode;
+ std::vector<DXVA_Slice_HEVC_Short> slice_list;
+ std::vector<guint8> bitstream_buffer;
- /* Array of DXVA_Slice_HEVC_Short */
- GArray *slice_list;
gboolean submit_iq_data;
- GstD3D11Decoder *d3d11_decoder;
-
- /* Pointing current bitstream buffer */
- gboolean bad_aligned_bitstream_buffer;
- guint written_buffer_size;
- guint remaining_buffer_size;
- guint8 *bitstream_buffer_data;
+ gint width = 0;
+ gint height = 0;
+ gint coded_width = 0;
+ gint coded_height = 0;
+ guint bitdepth = 0;
+ guint8 chroma_format_idc = 0;
+ GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
+ GstVideoInterlaceMode interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
+} GstD3D11H265DecInner;
+/* *INDENT-ON* */
- DXVA_PicEntry_HEVC ref_pic_list[15];
- INT pic_order_cnt_val_list[15];
- UCHAR ref_pic_set_st_curr_before[8];
- UCHAR ref_pic_set_st_curr_after[8];
- UCHAR ref_pic_set_lt_curr[8];
+typedef struct _GstD3D11H265Dec
+{
+ GstH265Decoder parent;
+ GstD3D11H265DecInner *inner;
} GstD3D11H265Dec;
typedef struct _GstD3D11H265DecClass
@@ -98,7 +98,7 @@ static GstElementClass *parent_class = NULL;
static void gst_d3d11_h265_dec_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
-static void gst_d3d11_h265_dec_dispose (GObject * object);
+static void gst_d3d11_h265_dec_finalize (GObject * object);
static void gst_d3d11_h265_dec_set_context (GstElement * element,
GstContext * context);
@@ -126,14 +126,6 @@ static gboolean gst_d3d11_h265_dec_decode_slice (GstH265Decoder * decoder,
GArray * ref_pic_list0, GArray * ref_pic_list1);
static gboolean gst_d3d11_h265_dec_end_picture (GstH265Decoder * decoder,
GstH265Picture * picture);
-static gboolean gst_d3d11_h265_dec_fill_picture_params (GstD3D11H265Dec * self,
- const GstH265SliceHdr * slice_header, DXVA_PicParams_HEVC * params);
-
-#ifndef GST_DISABLE_GST_DEBUG
-static void
-gst_d3d11_h265_dec_dump_pic_params (GstD3D11H265Dec * self,
- DXVA_PicParams_HEVC * params);
-#endif
static void
gst_d3d11_h265_dec_class_init (GstD3D11H265DecClass * klass, gpointer data)
@@ -145,7 +137,7 @@ gst_d3d11_h265_dec_class_init (GstD3D11H265DecClass * klass, gpointer data)
GstD3D11DecoderClassData *cdata = (GstD3D11DecoderClassData *) data;
gobject_class->get_property = gst_d3d11_h265_dec_get_property;
- gobject_class->dispose = gst_d3d11_h265_dec_dispose;
+ gobject_class->finalize = gst_d3d11_h265_dec_finalize;
element_class->set_context =
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_set_context);
@@ -180,7 +172,7 @@ gst_d3d11_h265_dec_class_init (GstD3D11H265DecClass * klass, gpointer data)
static void
gst_d3d11_h265_dec_init (GstD3D11H265Dec * self)
{
- self->slice_list = g_array_new (FALSE, TRUE, sizeof (DXVA_Slice_HEVC_Short));
+ self->inner = new GstD3D11H265DecInner ();
}
static void
@@ -194,27 +186,25 @@ gst_d3d11_h265_dec_get_property (GObject * object, guint prop_id,
}
static void
-gst_d3d11_h265_dec_dispose (GObject * object)
+gst_d3d11_h265_dec_finalize (GObject * object)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (object);
- if (self->slice_list) {
- g_array_unref (self->slice_list);
- self->slice_list = NULL;
- }
+ delete self->inner;
- G_OBJECT_CLASS (parent_class)->dispose (object);
+ G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_d3d11_h265_dec_set_context (GstElement * element, GstContext * context)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (element);
+ GstD3D11H265DecInner *inner = self->inner;
GstD3D11H265DecClass *klass = GST_D3D11_H265_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
gst_d3d11_handle_set_context (element, context, cdata->adapter,
- &self->device);
+ &inner->device);
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
}
@@ -223,20 +213,22 @@ static gboolean
gst_d3d11_h265_dec_open (GstVideoDecoder * decoder)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ GstD3D11H265DecInner *inner = self->inner;
GstD3D11H265DecClass *klass = GST_D3D11_H265_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
if (!gst_d3d11_ensure_element_data (GST_ELEMENT_CAST (self), cdata->adapter,
- &self->device)) {
+ &inner->device)) {
GST_ERROR_OBJECT (self, "Cannot create d3d11device");
return FALSE;
}
- self->d3d11_decoder = gst_d3d11_decoder_new (self->device);
+ inner->d3d11_decoder = gst_d3d11_decoder_new (inner->device,
+ GST_DXVA_CODEC_H265);
- if (!self->d3d11_decoder) {
+ if (!inner->d3d11_decoder) {
GST_ERROR_OBJECT (self, "Cannot create d3d11 decoder");
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->device);
return FALSE;
}
@@ -247,9 +239,10 @@ static gboolean
gst_d3d11_h265_dec_close (GstVideoDecoder * decoder)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ GstD3D11H265DecInner *inner = self->inner;
- gst_clear_object (&self->d3d11_decoder);
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->d3d11_decoder);
+ gst_clear_object (&inner->device);
return TRUE;
}
@@ -258,8 +251,9 @@ static gboolean
gst_d3d11_h265_dec_negotiate (GstVideoDecoder * decoder)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ GstD3D11H265DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_negotiate (self->d3d11_decoder, decoder))
+ if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
return FALSE;
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
@@ -270,8 +264,9 @@ gst_d3d11_h265_dec_decide_allocation (GstVideoDecoder * decoder,
GstQuery * query)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ GstD3D11H265DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_decide_allocation (self->d3d11_decoder,
+ if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder,
decoder, query)) {
return FALSE;
}
@@ -284,11 +279,12 @@ static gboolean
gst_d3d11_h265_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ GstD3D11H265DecInner *inner = self->inner;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
- query, self->device)) {
+ query, inner->device)) {
return TRUE;
}
break;
@@ -303,15 +299,16 @@ static gboolean
gst_d3d11_h265_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ GstD3D11H265DecInner *inner = self->inner;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, TRUE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
break;
case GST_EVENT_FLUSH_STOP:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, FALSE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
default:
break;
}
@@ -324,6 +321,7 @@ gst_d3d11_h265_dec_new_sequence (GstH265Decoder * decoder,
const GstH265SPS * sps, gint max_dpb_size)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ GstD3D11H265DecInner *inner = self->inner;
gint crop_width, crop_height;
gboolean modified = FALSE;
GstVideoInterlaceMode interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
@@ -338,20 +336,20 @@ gst_d3d11_h265_dec_new_sequence (GstH265Decoder * decoder,
crop_height = sps->height;
}
- if (self->width != crop_width || self->height != crop_height ||
- self->coded_width != sps->width || self->coded_height != sps->height) {
- GST_INFO_OBJECT (self, "resolution changed %dx%d (%dx%d)",
+ if (inner->width != crop_width || inner->height != crop_height ||
+ inner->coded_width != sps->width || inner->coded_height != sps->height) {
+ GST_INFO_OBJECT (self, "resolution changed %dx%d -> %dx%d",
crop_width, crop_height, sps->width, sps->height);
- self->width = crop_width;
- self->height = crop_height;
- self->coded_width = sps->width;
- self->coded_height = sps->height;
+ inner->width = crop_width;
+ inner->height = crop_height;
+ inner->coded_width = sps->width;
+ inner->coded_height = sps->height;
modified = TRUE;
}
- if (self->bitdepth != (guint) sps->bit_depth_luma_minus8 + 8) {
+ if (inner->bitdepth != (guint) sps->bit_depth_luma_minus8 + 8) {
GST_INFO_OBJECT (self, "bitdepth changed");
- self->bitdepth = sps->bit_depth_luma_minus8 + 8;
+ inner->bitdepth = sps->bit_depth_luma_minus8 + 8;
modified = TRUE;
}
@@ -367,49 +365,50 @@ gst_d3d11_h265_dec_new_sequence (GstH265Decoder * decoder,
}
}
- if (self->interlace_mode != interlace_mode) {
+ if (inner->interlace_mode != interlace_mode) {
GST_INFO_OBJECT (self, "Interlace mode change %d -> %d",
- self->interlace_mode, interlace_mode);
- self->interlace_mode = interlace_mode;
+ inner->interlace_mode, interlace_mode);
+ inner->interlace_mode = interlace_mode;
modified = TRUE;
}
- if (self->chroma_format_idc != sps->chroma_format_idc) {
+ if (inner->chroma_format_idc != sps->chroma_format_idc) {
GST_INFO_OBJECT (self, "chroma format changed");
- self->chroma_format_idc = sps->chroma_format_idc;
+ inner->chroma_format_idc = sps->chroma_format_idc;
modified = TRUE;
}
- if (modified || !gst_d3d11_decoder_is_configured (self->d3d11_decoder)) {
+ if (modified || !gst_d3d11_decoder_is_configured (inner->d3d11_decoder)) {
GstVideoInfo info;
- self->out_format = GST_VIDEO_FORMAT_UNKNOWN;
+ inner->out_format = GST_VIDEO_FORMAT_UNKNOWN;
- if (self->bitdepth == 8) {
- if (self->chroma_format_idc == 1) {
- self->out_format = GST_VIDEO_FORMAT_NV12;
+ if (inner->bitdepth == 8) {
+ if (inner->chroma_format_idc == 1) {
+ inner->out_format = GST_VIDEO_FORMAT_NV12;
} else {
GST_FIXME_OBJECT (self, "Could not support 8bits non-4:2:0 format");
}
- } else if (self->bitdepth == 10) {
- if (self->chroma_format_idc == 1) {
- self->out_format = GST_VIDEO_FORMAT_P010_10LE;
+ } else if (inner->bitdepth == 10) {
+ if (inner->chroma_format_idc == 1) {
+ inner->out_format = GST_VIDEO_FORMAT_P010_10LE;
} else {
GST_FIXME_OBJECT (self, "Could not support 10bits non-4:2:0 format");
}
}
- if (self->out_format == GST_VIDEO_FORMAT_UNKNOWN) {
+ if (inner->out_format == GST_VIDEO_FORMAT_UNKNOWN) {
GST_ERROR_OBJECT (self, "Could not support bitdepth/chroma format");
return FALSE;
}
gst_video_info_set_format (&info,
- self->out_format, self->width, self->height);
- GST_VIDEO_INFO_INTERLACE_MODE (&info) = self->interlace_mode;
+ inner->out_format, inner->width, inner->height);
+ GST_VIDEO_INFO_INTERLACE_MODE (&info) = inner->interlace_mode;
- if (!gst_d3d11_decoder_configure (self->d3d11_decoder, GST_D3D11_CODEC_H265,
- decoder->input_state, &info, self->coded_width, self->coded_height,
+ if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
+ decoder->input_state, &info,
+ inner->coded_width, inner->coded_height,
/* Additional 4 views margin for zero-copy rendering */
max_dpb_size + 4)) {
GST_ERROR_OBJECT (self, "Failed to create decoder");
@@ -426,300 +425,14 @@ gst_d3d11_h265_dec_new_sequence (GstH265Decoder * decoder,
}
static gboolean
-gst_d3d11_h265_dec_get_bitstream_buffer (GstD3D11H265Dec * self)
-{
- GST_TRACE_OBJECT (self, "Getting bitstream buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM, &self->remaining_buffer_size,
- (gpointer *) & self->bitstream_buffer_data)) {
- GST_ERROR_OBJECT (self, "Faild to get bitstream buffer");
- return FALSE;
- }
-
- GST_TRACE_OBJECT (self, "Got bitstream buffer %p with size %d",
- self->bitstream_buffer_data, self->remaining_buffer_size);
- self->written_buffer_size = 0;
- if ((self->remaining_buffer_size & 127) != 0) {
- GST_WARNING_OBJECT (self,
- "The size of bitstream buffer is not 128 bytes aligned");
- self->bad_aligned_bitstream_buffer = TRUE;
- } else {
- self->bad_aligned_bitstream_buffer = FALSE;
- }
-
- return TRUE;
-}
-
-static ID3D11VideoDecoderOutputView *
-gst_d3d11_h265_dec_get_output_view_from_picture (GstD3D11H265Dec * self,
- GstH265Picture * picture, guint8 * view_id)
-{
- GstBuffer *view_buffer;
- ID3D11VideoDecoderOutputView *view;
-
- view_buffer = (GstBuffer *) gst_h265_picture_get_user_data (picture);
- if (!view_buffer) {
- GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
- return NULL;
- }
-
- view = gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
- view_buffer, view_id);
- if (!view) {
- GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
- return NULL;
- }
-
- return view;
-}
-
-static gint
-gst_d3d11_h265_dec_get_ref_index (GstD3D11H265Dec * self, gint view_id)
-{
- guint i;
- for (i = 0; i < G_N_ELEMENTS (self->ref_pic_list); i++) {
- if (self->ref_pic_list[i].Index7Bits == view_id)
- return i;
- }
-
- return 0xff;
-}
-
-static gboolean
-gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
- GstH265Picture * picture, GstH265Slice * slice, GstH265Dpb * dpb)
-{
- GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
- ID3D11VideoDecoderOutputView *view;
- guint8 view_id = 0xff;
- guint i, j;
- GArray *dpb_array;
- GstH265SPS *sps;
- GstH265PPS *pps;
- DXVA_PicParams_HEVC pic_params = { 0, };
- DXVA_Qmatrix_HEVC iq_matrix = { 0, };
- GstH265ScalingList *scaling_list = NULL;
- guint d3d11_buffer_size = 0;
- gpointer d3d11_buffer = NULL;
-
- pps = slice->header.pps;
- g_assert (pps != NULL);
-
- sps = pps->sps;
- g_assert (sps != NULL);
-
- view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture,
- &view_id);
- if (!view) {
- GST_ERROR_OBJECT (self, "current picture does not have output view handle");
- return FALSE;
- }
-
- GST_TRACE_OBJECT (self, "Begin frame");
- if (!gst_d3d11_decoder_begin_frame (self->d3d11_decoder, view, 0, NULL)) {
- GST_ERROR_OBJECT (self, "Failed to begin frame");
- return FALSE;
- }
-
- for (i = 0; i < 15; i++) {
- self->ref_pic_list[i].bPicEntry = 0xff;
- self->pic_order_cnt_val_list[i] = 0;
- }
-
- for (i = 0; i < 8; i++) {
- self->ref_pic_set_st_curr_before[i] = 0xff;
- self->ref_pic_set_st_curr_after[i] = 0xff;
- self->ref_pic_set_lt_curr[i] = 0xff;
- }
-
- dpb_array = gst_h265_dpb_get_pictures_all (dpb);
-
- GST_LOG_OBJECT (self, "DPB size %d", dpb_array->len);
-
- for (i = 0; i < dpb_array->len && i < G_N_ELEMENTS (self->ref_pic_list); i++) {
- GstH265Picture *other = g_array_index (dpb_array, GstH265Picture *, i);
- guint8 id = 0xff;
-
- if (!other->ref) {
- GST_LOG_OBJECT (self, "%dth picture in dpb is not reference, skip", i);
- continue;
- }
-
- gst_d3d11_h265_dec_get_output_view_from_picture (self, other, &id);
- self->ref_pic_list[i].Index7Bits = id;
- self->ref_pic_list[i].AssociatedFlag = other->long_term;
- self->pic_order_cnt_val_list[i] = other->pic_order_cnt;
- }
-
- for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_st_curr_before); i++) {
- GstH265Picture *other = NULL;
- guint8 other_view_id = 0xff;
- guint8 id = 0xff;
-
- while (other == NULL && j < decoder->NumPocStCurrBefore)
- other = decoder->RefPicSetStCurrBefore[j++];
-
- if (other) {
- ID3D11VideoDecoderOutputView *other_view;
-
- other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
- other, &other_view_id);
-
- if (other_view)
- id = gst_d3d11_h265_dec_get_ref_index (self, other_view_id);
- }
-
- self->ref_pic_set_st_curr_before[i] = id;
- }
-
- for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_st_curr_after); i++) {
- GstH265Picture *other = NULL;
- guint8 other_view_id = 0xff;
- guint8 id = 0xff;
-
- while (other == NULL && j < decoder->NumPocStCurrAfter)
- other = decoder->RefPicSetStCurrAfter[j++];
-
- if (other) {
- ID3D11VideoDecoderOutputView *other_view;
-
- other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
- other, &other_view_id);
-
- if (other_view)
- id = gst_d3d11_h265_dec_get_ref_index (self, other_view_id);
- }
-
- self->ref_pic_set_st_curr_after[i] = id;
- }
-
- for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_lt_curr); i++) {
- GstH265Picture *other = NULL;
- guint8 other_view_id = 0xff;
- guint8 id = 0xff;
-
- while (other == NULL && j < decoder->NumPocLtCurr)
- other = decoder->RefPicSetLtCurr[j++];
-
- if (other) {
- ID3D11VideoDecoderOutputView *other_view;
-
- other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
- other, &other_view_id);
-
- if (other_view)
- id = gst_d3d11_h265_dec_get_ref_index (self, other_view_id);
- }
-
- self->ref_pic_set_lt_curr[i] = id;
- }
-
- gst_d3d11_h265_dec_fill_picture_params (self, &slice->header, &pic_params);
-
- pic_params.CurrPic.Index7Bits = view_id;
- pic_params.IrapPicFlag = GST_H265_IS_NAL_TYPE_IRAP (slice->nalu.type);
- pic_params.IdrPicFlag = GST_H265_IS_NAL_TYPE_IDR (slice->nalu.type);
- pic_params.IntraPicFlag = GST_H265_IS_NAL_TYPE_IRAP (slice->nalu.type);
- pic_params.CurrPicOrderCntVal = picture->pic_order_cnt;
-
- memcpy (pic_params.RefPicList, self->ref_pic_list,
- sizeof (pic_params.RefPicList));
- memcpy (pic_params.PicOrderCntValList, self->pic_order_cnt_val_list,
- sizeof (pic_params.PicOrderCntValList));
- memcpy (pic_params.RefPicSetStCurrBefore, self->ref_pic_set_st_curr_before,
- sizeof (pic_params.RefPicSetStCurrBefore));
- memcpy (pic_params.RefPicSetStCurrAfter, self->ref_pic_set_st_curr_after,
- sizeof (pic_params.RefPicSetStCurrAfter));
- memcpy (pic_params.RefPicSetLtCurr, self->ref_pic_set_lt_curr,
- sizeof (pic_params.RefPicSetLtCurr));
-
-#ifndef GST_DISABLE_GST_DEBUG
- if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_TRACE)
- gst_d3d11_h265_dec_dump_pic_params (self, &pic_params);
-#endif
-
- GST_TRACE_OBJECT (self, "Getting picture param decoder buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS, &d3d11_buffer_size,
- &d3d11_buffer)) {
- GST_ERROR_OBJECT (self,
- "Failed to get decoder buffer for picture parameters");
- return FALSE;
- }
-
- memcpy (d3d11_buffer, &pic_params, sizeof (pic_params));
-
- GST_TRACE_OBJECT (self, "Release picture param decoder buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS)) {
- GST_ERROR_OBJECT (self, "Failed to release decoder buffer");
- return FALSE;
- }
-
- if (pps->scaling_list_data_present_flag ||
- (sps->scaling_list_enabled_flag
- && !sps->scaling_list_data_present_flag)) {
- scaling_list = &pps->scaling_list;
- } else if (sps->scaling_list_enabled_flag &&
- sps->scaling_list_data_present_flag) {
- scaling_list = &sps->scaling_list;
- }
-
- if (scaling_list) {
- self->submit_iq_data = TRUE;
-
- memcpy (iq_matrix.ucScalingLists0, scaling_list->scaling_lists_4x4,
- sizeof (iq_matrix.ucScalingLists0));
- memcpy (iq_matrix.ucScalingLists1, scaling_list->scaling_lists_8x8,
- sizeof (iq_matrix.ucScalingLists1));
- memcpy (iq_matrix.ucScalingLists2, scaling_list->scaling_lists_16x16,
- sizeof (iq_matrix.ucScalingLists2));
- memcpy (iq_matrix.ucScalingLists3, scaling_list->scaling_lists_32x32,
- sizeof (iq_matrix.ucScalingLists3));
-
- for (i = 0; i < 6; i++)
- iq_matrix.ucScalingListDCCoefSizeID2[i] =
- scaling_list->scaling_list_dc_coef_minus8_16x16[i] + 8;
-
- for (i = 0; i < 2; i++)
- iq_matrix.ucScalingListDCCoefSizeID3[i] =
- scaling_list->scaling_list_dc_coef_minus8_32x32[i] + 8;
-
- GST_TRACE_OBJECT (self, "Getting inverse quantization matrix buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX,
- &d3d11_buffer_size, &d3d11_buffer)) {
- GST_ERROR_OBJECT (self,
- "Failed to get decoder buffer for inv. quantization matrix");
- return FALSE;
- }
-
- memcpy (d3d11_buffer, &iq_matrix, sizeof (iq_matrix));
-
- GST_TRACE_OBJECT (self, "Release inverse quantization matrix buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX)) {
- GST_ERROR_OBJECT (self, "Failed to release decoder buffer");
- return FALSE;
- }
- } else {
- self->submit_iq_data = FALSE;
- }
-
- g_array_unref (dpb_array);
- g_array_set_size (self->slice_list, 0);
-
- return gst_d3d11_h265_dec_get_bitstream_buffer (self);
-}
-
-static gboolean
gst_d3d11_h265_dec_new_picture (GstH265Decoder * decoder,
GstVideoCodecFrame * cframe, GstH265Picture * picture)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ GstD3D11H265DecInner *inner = self->inner;
GstBuffer *view_buffer;
- view_buffer = gst_d3d11_decoder_get_output_view_buffer (self->d3d11_decoder,
+ view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
@@ -736,179 +449,6 @@ gst_d3d11_h265_dec_new_picture (GstH265Decoder * decoder,
return TRUE;
}
-static GstFlowReturn
-gst_d3d11_h265_dec_output_picture (GstH265Decoder * decoder,
- GstVideoCodecFrame * frame, GstH265Picture * picture)
-{
- GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
- GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
- GstBuffer *view_buffer;
-
- GST_LOG_OBJECT (self, "Outputting picture %p, poc %d, picture_struct %d, "
- "buffer flags 0x%x", picture, picture->pic_order_cnt, picture->pic_struct,
- picture->buffer_flags);
-
- view_buffer = (GstBuffer *) gst_h265_picture_get_user_data (picture);
-
- if (!view_buffer) {
- GST_ERROR_OBJECT (self, "Could not get output view");
- goto error;
- }
-
- if (!gst_d3d11_decoder_process_output (self->d3d11_decoder, vdec,
- self->width, self->height, view_buffer, &frame->output_buffer)) {
- GST_ERROR_OBJECT (self, "Failed to copy buffer");
- goto error;
- }
-
- GST_BUFFER_FLAG_SET (frame->output_buffer, picture->buffer_flags);
- gst_h265_picture_unref (picture);
-
- return gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
-
-error:
- gst_video_decoder_drop_frame (vdec, frame);
- gst_h265_picture_unref (picture);
-
- return GST_FLOW_ERROR;
-}
-
-static gboolean
-gst_d3d11_h265_dec_submit_slice_data (GstD3D11H265Dec * self)
-{
- guint buffer_size;
- gpointer buffer;
- guint8 *data;
- gsize offset = 0;
- guint i;
- D3D11_VIDEO_DECODER_BUFFER_DESC buffer_desc[4];
- gboolean ret;
- guint buffer_count = 0;
- DXVA_Slice_HEVC_Short *slice_data;
-
- if (self->slice_list->len < 1) {
- GST_WARNING_OBJECT (self, "Nothing to submit");
- return FALSE;
- }
-
- memset (buffer_desc, 0, sizeof (buffer_desc));
-
- slice_data = &g_array_index (self->slice_list, DXVA_Slice_HEVC_Short,
- self->slice_list->len - 1);
-
- /* DXVA2 spec is saying that written bitstream data must be 128 bytes
- * aligned if the bitstream buffer contains end of slice
- * (i.e., wBadSliceChopping == 0 or 2) */
- if (slice_data->wBadSliceChopping == 0 || slice_data->wBadSliceChopping == 2) {
- guint padding =
- MIN (GST_ROUND_UP_128 (self->written_buffer_size) -
- self->written_buffer_size, self->remaining_buffer_size);
-
- if (padding) {
- GST_TRACE_OBJECT (self,
- "Written bitstream buffer size %u is not 128 bytes aligned, "
- "add padding %u bytes", self->written_buffer_size, padding);
- memset (self->bitstream_buffer_data, 0, padding);
- self->written_buffer_size += padding;
- slice_data->SliceBytesInBuffer += padding;
- }
- }
-
- GST_TRACE_OBJECT (self, "Getting slice control buffer");
-
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL, &buffer_size, &buffer)) {
- GST_ERROR_OBJECT (self, "Couldn't get slice control buffer");
- return FALSE;
- }
-
- data = (guint8 *) buffer;
- for (i = 0; i < self->slice_list->len; i++) {
- slice_data = &g_array_index (self->slice_list, DXVA_Slice_HEVC_Short, i);
-
- memcpy (data + offset, slice_data, sizeof (DXVA_Slice_HEVC_Short));
- offset += sizeof (DXVA_Slice_HEVC_Short);
- }
-
- GST_TRACE_OBJECT (self, "Release slice control buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL)) {
- GST_ERROR_OBJECT (self, "Failed to release slice control buffer");
- return FALSE;
- }
-
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM)) {
- GST_ERROR_OBJECT (self, "Failed to release bitstream buffer");
- return FALSE;
- }
-
- buffer_desc[buffer_count].BufferType =
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
- buffer_desc[buffer_count].DataOffset = 0;
- buffer_desc[buffer_count].DataSize = sizeof (DXVA_PicParams_HEVC);
- buffer_count++;
-
- if (self->submit_iq_data) {
- buffer_desc[buffer_count].BufferType =
- D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX;
- buffer_desc[buffer_count].DataOffset = 0;
- buffer_desc[buffer_count].DataSize = sizeof (DXVA_Qmatrix_HEVC);
- buffer_count++;
- }
-
- buffer_desc[buffer_count].BufferType =
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
- buffer_desc[buffer_count].DataOffset = 0;
- buffer_desc[buffer_count].DataSize =
- sizeof (DXVA_Slice_HEVC_Short) * self->slice_list->len;
- buffer_count++;
-
- if (!self->bad_aligned_bitstream_buffer
- && (self->written_buffer_size & 127) != 0) {
- GST_WARNING_OBJECT (self,
- "Written bitstream buffer size %u is not 128 bytes aligned",
- self->written_buffer_size);
- }
-
- buffer_desc[buffer_count].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
- buffer_desc[buffer_count].DataOffset = 0;
- buffer_desc[buffer_count].DataSize = self->written_buffer_size;
- buffer_count++;
-
- ret = gst_d3d11_decoder_submit_decoder_buffers (self->d3d11_decoder,
- buffer_count, buffer_desc);
-
- self->written_buffer_size = 0;
- self->bitstream_buffer_data = NULL;
- self->remaining_buffer_size = 0;
- g_array_set_size (self->slice_list, 0);
-
- return ret;
-}
-
-static gboolean
-gst_d3d11_h265_dec_end_picture (GstH265Decoder * decoder,
- GstH265Picture * picture)
-{
- GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
-
- GST_LOG_OBJECT (self, "end picture %p, (poc %d)",
- picture, picture->pic_order_cnt);
-
- if (!gst_d3d11_h265_dec_submit_slice_data (self)) {
- GST_ERROR_OBJECT (self, "Failed to submit slice data");
- return FALSE;
- }
-
- if (!gst_d3d11_decoder_end_frame (self->d3d11_decoder)) {
- GST_ERROR_OBJECT (self, "Failed to EndFrame");
- return FALSE;
- }
-
- return TRUE;
-}
-
static void
gst_d3d11_h265_dec_picture_params_from_sps (GstD3D11H265Dec * self,
const GstH265SPS * sps, DXVA_PicParams_HEVC * params)
@@ -1048,8 +588,6 @@ gst_d3d11_h265_dec_fill_picture_params (GstD3D11H265Dec * self,
pps = slice_header->pps;
sps = pps->sps;
- memset (params, 0, sizeof (DXVA_PicParams_HEVC));
-
/* not related to hevc syntax */
params->NoPicReorderingFlag = 0;
params->NoBiPredFlag = 0;
@@ -1064,113 +602,221 @@ gst_d3d11_h265_dec_fill_picture_params (GstD3D11H265Dec * self,
return TRUE;
}
-#ifndef GST_DISABLE_GST_DEBUG
-static void
-gst_d3d11_h265_dec_dump_pic_params (GstD3D11H265Dec * self,
- DXVA_PicParams_HEVC * params)
+static ID3D11VideoDecoderOutputView *
+gst_d3d11_h265_dec_get_output_view_from_picture (GstD3D11H265Dec * self,
+ GstH265Picture * picture, guint8 * view_id)
{
- guint i;
+ GstD3D11H265DecInner *inner = self->inner;
+ GstBuffer *view_buffer;
+ ID3D11VideoDecoderOutputView *view;
- GST_TRACE_OBJECT (self, "Dump current DXVA_PicParams_HEVC");
-
-#define DUMP_PIC_PARAMS(p) \
- GST_TRACE_OBJECT (self, "\t" G_STRINGIFY(p) ": %d", (gint)params->p)
-
- DUMP_PIC_PARAMS (PicWidthInMinCbsY);
- DUMP_PIC_PARAMS (PicHeightInMinCbsY);
- DUMP_PIC_PARAMS (chroma_format_idc);
- DUMP_PIC_PARAMS (separate_colour_plane_flag);
- DUMP_PIC_PARAMS (bit_depth_chroma_minus8);
- DUMP_PIC_PARAMS (NoPicReorderingFlag);
- DUMP_PIC_PARAMS (NoBiPredFlag);
- DUMP_PIC_PARAMS (CurrPic.Index7Bits);
- DUMP_PIC_PARAMS (sps_max_dec_pic_buffering_minus1);
- DUMP_PIC_PARAMS (log2_min_luma_coding_block_size_minus3);
- DUMP_PIC_PARAMS (log2_diff_max_min_luma_coding_block_size);
- DUMP_PIC_PARAMS (log2_min_transform_block_size_minus2);
- DUMP_PIC_PARAMS (log2_diff_max_min_transform_block_size);
- DUMP_PIC_PARAMS (max_transform_hierarchy_depth_inter);
- DUMP_PIC_PARAMS (max_transform_hierarchy_depth_intra);
- DUMP_PIC_PARAMS (num_short_term_ref_pic_sets);
- DUMP_PIC_PARAMS (num_long_term_ref_pics_sps);
- DUMP_PIC_PARAMS (num_ref_idx_l0_default_active_minus1);
- DUMP_PIC_PARAMS (num_ref_idx_l1_default_active_minus1);
- DUMP_PIC_PARAMS (init_qp_minus26);
- DUMP_PIC_PARAMS (ucNumDeltaPocsOfRefRpsIdx);
- DUMP_PIC_PARAMS (wNumBitsForShortTermRPSInSlice);
- DUMP_PIC_PARAMS (scaling_list_enabled_flag);
- DUMP_PIC_PARAMS (amp_enabled_flag);
- DUMP_PIC_PARAMS (sample_adaptive_offset_enabled_flag);
- DUMP_PIC_PARAMS (pcm_enabled_flag);
- DUMP_PIC_PARAMS (pcm_sample_bit_depth_luma_minus1);
- DUMP_PIC_PARAMS (pcm_sample_bit_depth_chroma_minus1);
- DUMP_PIC_PARAMS (log2_min_pcm_luma_coding_block_size_minus3);
- DUMP_PIC_PARAMS (log2_diff_max_min_pcm_luma_coding_block_size);
- DUMP_PIC_PARAMS (pcm_loop_filter_disabled_flag);
- DUMP_PIC_PARAMS (long_term_ref_pics_present_flag);
- DUMP_PIC_PARAMS (sps_temporal_mvp_enabled_flag);
- DUMP_PIC_PARAMS (strong_intra_smoothing_enabled_flag);
- DUMP_PIC_PARAMS (dependent_slice_segments_enabled_flag);
- DUMP_PIC_PARAMS (output_flag_present_flag);
- DUMP_PIC_PARAMS (num_extra_slice_header_bits);
- DUMP_PIC_PARAMS (sign_data_hiding_enabled_flag);
- DUMP_PIC_PARAMS (cabac_init_present_flag);
-
- DUMP_PIC_PARAMS (constrained_intra_pred_flag);
- DUMP_PIC_PARAMS (transform_skip_enabled_flag);
- DUMP_PIC_PARAMS (cu_qp_delta_enabled_flag);
- DUMP_PIC_PARAMS (pps_slice_chroma_qp_offsets_present_flag);
- DUMP_PIC_PARAMS (weighted_pred_flag);
- DUMP_PIC_PARAMS (weighted_bipred_flag);
- DUMP_PIC_PARAMS (transquant_bypass_enabled_flag);
- DUMP_PIC_PARAMS (tiles_enabled_flag);
- DUMP_PIC_PARAMS (entropy_coding_sync_enabled_flag);
- DUMP_PIC_PARAMS (uniform_spacing_flag);
- DUMP_PIC_PARAMS (loop_filter_across_tiles_enabled_flag);
- DUMP_PIC_PARAMS (pps_loop_filter_across_slices_enabled_flag);
- DUMP_PIC_PARAMS (deblocking_filter_override_enabled_flag);
- DUMP_PIC_PARAMS (pps_deblocking_filter_disabled_flag);
- DUMP_PIC_PARAMS (lists_modification_present_flag);
- DUMP_PIC_PARAMS (IrapPicFlag);
- DUMP_PIC_PARAMS (IdrPicFlag);
- DUMP_PIC_PARAMS (IntraPicFlag);
- DUMP_PIC_PARAMS (pps_cb_qp_offset);
- DUMP_PIC_PARAMS (pps_cr_qp_offset);
- DUMP_PIC_PARAMS (num_tile_columns_minus1);
- DUMP_PIC_PARAMS (num_tile_rows_minus1);
- for (i = 0; i < G_N_ELEMENTS (params->column_width_minus1); i++)
- GST_TRACE_OBJECT (self, "\tcolumn_width_minus1[%d]: %d", i,
- params->column_width_minus1[i]);
- for (i = 0; i < G_N_ELEMENTS (params->row_height_minus1); i++)
- GST_TRACE_OBJECT (self, "\trow_height_minus1[%d]: %d", i,
- params->row_height_minus1[i]);
- DUMP_PIC_PARAMS (diff_cu_qp_delta_depth);
- DUMP_PIC_PARAMS (pps_beta_offset_div2);
- DUMP_PIC_PARAMS (pps_tc_offset_div2);
- DUMP_PIC_PARAMS (log2_parallel_merge_level_minus2);
- DUMP_PIC_PARAMS (CurrPicOrderCntVal);
-
- for (i = 0; i < G_N_ELEMENTS (params->RefPicList); i++) {
- GST_TRACE_OBJECT (self, "\tRefPicList[%d].Index7Bits: %d", i,
- params->RefPicList[i].Index7Bits);
- GST_TRACE_OBJECT (self, "\tRefPicList[%d].AssociatedFlag: %d", i,
- params->RefPicList[i].AssociatedFlag);
- GST_TRACE_OBJECT (self, "\tPicOrderCntValList[%d]: %d", i,
- params->PicOrderCntValList[i]);
+ view_buffer = (GstBuffer *) gst_h265_picture_get_user_data (picture);
+ if (!view_buffer) {
+ GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
+ return NULL;
}
- for (i = 0; i < G_N_ELEMENTS (params->RefPicSetStCurrBefore); i++) {
- GST_TRACE_OBJECT (self, "\tRefPicSetStCurrBefore[%d]: %d", i,
- params->RefPicSetStCurrBefore[i]);
- GST_TRACE_OBJECT (self, "\tRefPicSetStCurrAfter[%d]: %d", i,
- params->RefPicSetStCurrAfter[i]);
- GST_TRACE_OBJECT (self, "\tRefPicSetLtCurr[%d]: %d", i,
- params->RefPicSetLtCurr[i]);
+ view = gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
+ view_buffer, view_id);
+ if (!view) {
+ GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
+ return NULL;
}
-#undef DUMP_PIC_PARAMS
+ return view;
+}
+
+static UCHAR
+gst_d3d11_h265_dec_get_ref_index (const DXVA_PicParams_HEVC * pic_params,
+ guint8 view_id)
+{
+ for (UCHAR i = 0; i < G_N_ELEMENTS (pic_params->RefPicList); i++) {
+ if (pic_params->RefPicList[i].Index7Bits == view_id)
+ return i;
+ }
+
+ return 0xff;
+}
+
+static inline void
+init_pic_params (DXVA_PicParams_HEVC * params)
+{
+ memset (params, 0, sizeof (DXVA_PicParams_HEVC));
+ for (guint i = 0; i < G_N_ELEMENTS (params->RefPicList); i++)
+ params->RefPicList[i].bPicEntry = 0xff;
+
+ for (guint i = 0; i < G_N_ELEMENTS (params->RefPicSetStCurrBefore); i++) {
+ params->RefPicSetStCurrBefore[i] = 0xff;
+ params->RefPicSetStCurrAfter[i] = 0xff;
+ params->RefPicSetLtCurr[i] = 0xff;
+ }
+}
+
+static gboolean
+gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
+ GstH265Picture * picture, GstH265Slice * slice, GstH265Dpb * dpb)
+{
+ GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ GstD3D11H265DecInner *inner = self->inner;
+ DXVA_PicParams_HEVC *pic_params = &inner->pic_params;
+ DXVA_Qmatrix_HEVC *iq_matrix = &inner->iq_matrix;
+ ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
+ guint i, j;
+ GArray *dpb_array;
+ GstH265SPS *sps;
+ GstH265PPS *pps;
+ GstH265ScalingList *scaling_list = nullptr;
+
+ pps = slice->header.pps;
+ sps = pps->sps;
+
+ view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture,
+ &view_id);
+ if (!view) {
+ GST_ERROR_OBJECT (self, "current picture does not have output view handle");
+ return FALSE;
+ }
+
+ init_pic_params (pic_params);
+ gst_d3d11_h265_dec_fill_picture_params (self, &slice->header, pic_params);
+
+ pic_params->CurrPic.Index7Bits = view_id;
+ pic_params->IrapPicFlag = GST_H265_IS_NAL_TYPE_IRAP (slice->nalu.type);
+ pic_params->IdrPicFlag = GST_H265_IS_NAL_TYPE_IDR (slice->nalu.type);
+ pic_params->IntraPicFlag = GST_H265_IS_NAL_TYPE_IRAP (slice->nalu.type);
+ pic_params->CurrPicOrderCntVal = picture->pic_order_cnt;
+
+ dpb_array = gst_h265_dpb_get_pictures_all (dpb);
+ for (i = 0, j = 0;
+ i < dpb_array->len && j < G_N_ELEMENTS (pic_params->RefPicList); i++) {
+ GstH265Picture *other = g_array_index (dpb_array, GstH265Picture *, i);
+ guint8 id = 0xff;
+
+ if (!other->ref) {
+ GST_LOG_OBJECT (self, "%dth picture in dpb is not reference, skip", i);
+ continue;
+ }
+
+ gst_d3d11_h265_dec_get_output_view_from_picture (self, other, &id);
+ pic_params->RefPicList[j].Index7Bits = id;
+ pic_params->RefPicList[j].AssociatedFlag = other->long_term;
+ pic_params->PicOrderCntValList[j] = other->pic_order_cnt;
+ j++;
+ }
+ g_array_unref (dpb_array);
+
+ for (i = 0, j = 0; i < G_N_ELEMENTS (pic_params->RefPicSetStCurrBefore); i++) {
+ GstH265Picture *other = nullptr;
+ guint8 other_view_id = 0xff;
+ guint8 id = 0xff;
+
+ while (!other && j < decoder->NumPocStCurrBefore)
+ other = decoder->RefPicSetStCurrBefore[j++];
+
+ if (other) {
+ ID3D11VideoDecoderOutputView *other_view;
+
+ other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
+ other, &other_view_id);
+
+ if (other_view)
+ id = gst_d3d11_h265_dec_get_ref_index (pic_params, other_view_id);
+ }
+
+ pic_params->RefPicSetStCurrBefore[i] = id;
+ }
+
+ for (i = 0, j = 0; i < G_N_ELEMENTS (pic_params->RefPicSetStCurrAfter); i++) {
+ GstH265Picture *other = nullptr;
+ guint8 other_view_id = 0xff;
+ guint8 id = 0xff;
+
+ while (!other && j < decoder->NumPocStCurrAfter)
+ other = decoder->RefPicSetStCurrAfter[j++];
+
+ if (other) {
+ ID3D11VideoDecoderOutputView *other_view;
+
+ other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
+ other, &other_view_id);
+
+ if (other_view)
+ id = gst_d3d11_h265_dec_get_ref_index (pic_params, other_view_id);
+ }
+
+ pic_params->RefPicSetStCurrAfter[i] = id;
+ }
+
+ for (i = 0, j = 0; i < G_N_ELEMENTS (pic_params->RefPicSetLtCurr); i++) {
+ GstH265Picture *other = nullptr;
+ guint8 other_view_id = 0xff;
+ guint8 id = 0xff;
+
+ while (!other && j < decoder->NumPocLtCurr)
+ other = decoder->RefPicSetLtCurr[j++];
+
+ if (other) {
+ ID3D11VideoDecoderOutputView *other_view;
+
+ other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
+ other, &other_view_id);
+
+ if (other_view)
+ id = gst_d3d11_h265_dec_get_ref_index (pic_params, other_view_id);
+ }
+
+ pic_params->RefPicSetLtCurr[i] = id;
+ }
+
+ if (pps->scaling_list_data_present_flag ||
+ (sps->scaling_list_enabled_flag
+ && !sps->scaling_list_data_present_flag)) {
+ scaling_list = &pps->scaling_list;
+ } else if (sps->scaling_list_enabled_flag &&
+ sps->scaling_list_data_present_flag) {
+ scaling_list = &sps->scaling_list;
+ }
+
+ if (scaling_list) {
+ G_STATIC_ASSERT (sizeof (iq_matrix->ucScalingLists0) ==
+ sizeof (scaling_list->scaling_lists_4x4));
+ G_STATIC_ASSERT (sizeof (iq_matrix->ucScalingLists1) ==
+ sizeof (scaling_list->scaling_lists_8x8));
+ G_STATIC_ASSERT (sizeof (iq_matrix->ucScalingLists2) ==
+ sizeof (scaling_list->scaling_lists_16x16));
+ G_STATIC_ASSERT (sizeof (iq_matrix->ucScalingLists3) ==
+ sizeof (scaling_list->scaling_lists_32x32));
+
+ memcpy (iq_matrix->ucScalingLists0, scaling_list->scaling_lists_4x4,
+ sizeof (iq_matrix->ucScalingLists0));
+ memcpy (iq_matrix->ucScalingLists1, scaling_list->scaling_lists_8x8,
+ sizeof (iq_matrix->ucScalingLists1));
+ memcpy (iq_matrix->ucScalingLists2, scaling_list->scaling_lists_16x16,
+ sizeof (iq_matrix->ucScalingLists2));
+ memcpy (iq_matrix->ucScalingLists3, scaling_list->scaling_lists_32x32,
+ sizeof (iq_matrix->ucScalingLists3));
+
+ for (i = 0; i < G_N_ELEMENTS (iq_matrix->ucScalingListDCCoefSizeID2); i++) {
+ iq_matrix->ucScalingListDCCoefSizeID2[i] =
+ scaling_list->scaling_list_dc_coef_minus8_16x16[i] + 8;
+ }
+
+ for (i = 0; i < G_N_ELEMENTS (iq_matrix->ucScalingListDCCoefSizeID3); i++) {
+ iq_matrix->ucScalingListDCCoefSizeID3[i] =
+ scaling_list->scaling_list_dc_coef_minus8_32x32[i] + 8;
+ }
+
+ inner->submit_iq_data = TRUE;
+ } else {
+ inner->submit_iq_data = FALSE;
+ }
+
+ inner->slice_list.resize (0);
+ inner->bitstream_buffer.resize (0);
+
+ return TRUE;
}
-#endif
static gboolean
gst_d3d11_h265_dec_decode_slice (GstH265Decoder * decoder,
@@ -1178,121 +824,133 @@ gst_d3d11_h265_dec_decode_slice (GstH265Decoder * decoder,
GArray * ref_pic_list0, GArray * ref_pic_list1)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ GstD3D11H265DecInner *inner = self->inner;
+ DXVA_Slice_HEVC_Short dxva_slice;
+ static const guint8 start_code[] = { 0, 0, 1 };
+ const size_t start_code_size = sizeof (start_code);
- {
- guint to_write = slice->nalu.size + 3;
- gboolean is_first = TRUE;
-
- while (to_write > 0) {
- guint bytes_to_copy;
- gboolean is_last = TRUE;
- DXVA_Slice_HEVC_Short slice_short = { 0, };
-
- if (self->remaining_buffer_size < to_write && self->slice_list->len > 0) {
- if (!gst_d3d11_h265_dec_submit_slice_data (self)) {
- GST_ERROR_OBJECT (self, "Failed to submit bitstream buffers");
- return FALSE;
- }
-
- if (!gst_d3d11_h265_dec_get_bitstream_buffer (self)) {
- GST_ERROR_OBJECT (self, "Failed to get bitstream buffer");
- return FALSE;
- }
- }
+ dxva_slice.BSNALunitDataLocation = inner->bitstream_buffer.size ();
+ /* Includes 3 bytes start code prefix */
+ dxva_slice.SliceBytesInBuffer = slice->nalu.size + start_code_size;
+ dxva_slice.wBadSliceChopping = 0;
- /* remaining_buffer_size: the size of remaining d3d11 decoder
- * bitstream memory allowed to write more
- * written_buffer_size: the size of written bytes to this d3d11 decoder
- * bitstream memory
- * bytes_to_copy: the size of which we would write to d3d11 decoder
- * bitstream memory in this loop
- */
-
- bytes_to_copy = to_write;
-
- if (bytes_to_copy > self->remaining_buffer_size) {
- /* if the size of this slice is larger than the size of remaining d3d11
- * decoder bitstream memory, write the data up to the remaining d3d11
- * decoder bitstream memory size and the rest would be written to the
- * next d3d11 bitstream memory */
- bytes_to_copy = self->remaining_buffer_size;
- is_last = FALSE;
- }
+ inner->slice_list.push_back (dxva_slice);
- if (bytes_to_copy >= 3 && is_first) {
- /* normal case */
- self->bitstream_buffer_data[0] = 0;
- self->bitstream_buffer_data[1] = 0;
- self->bitstream_buffer_data[2] = 1;
- memcpy (self->bitstream_buffer_data + 3,
- slice->nalu.data + slice->nalu.offset, bytes_to_copy - 3);
- } else {
- /* when this nal unit date is splitted into two buffer */
- memcpy (self->bitstream_buffer_data,
- slice->nalu.data + slice->nalu.offset, bytes_to_copy);
- }
+ size_t pos = inner->bitstream_buffer.size ();
+ inner->bitstream_buffer.resize (pos + start_code_size + slice->nalu.size);
- /* For wBadSliceChopping value 0 or 1, BSNALunitDataLocation means
- * the offset of the first start code of this slice in this d3d11
- * memory buffer.
- * 1) If this is the first slice of picture, it should be zero
- * since we write start code at offset 0 (written size before this
- * slice also must be zero).
- * 2) If this is not the first slice of picture but this is the first
- * d3d11 bitstream buffer (meaning that one bitstream buffer contains
- * multiple slices), then this is the written size of buffer
- * excluding this loop.
- * And for wBadSliceChopping value 2 or 3, this should be zero by spec */
- if (is_first)
- slice_short.BSNALunitDataLocation = self->written_buffer_size;
- else
- slice_short.BSNALunitDataLocation = 0;
- slice_short.SliceBytesInBuffer = bytes_to_copy;
-
- /* wBadSliceChopping: (dxva h265 spec.)
- * 0: All bits for the slice are located within the corresponding
- * bitstream data buffer
- * 1: The bitstream data buffer contains the start of the slice,
- * but not the entire slice, because the buffer is full
- * 2: The bitstream data buffer contains the end of the slice.
- * It does not contain the start of the slice, because the start of
- * the slice was located in the previous bitstream data buffer.
- * 3: The bitstream data buffer does not contain the start of the slice
- * (because the start of the slice was located in the previous
- * bitstream data buffer), and it does not contain the end of the slice
- * (because the current bitstream data buffer is also full).
- */
- if (is_last && is_first) {
- slice_short.wBadSliceChopping = 0;
- } else if (!is_last && is_first) {
- slice_short.wBadSliceChopping = 1;
- } else if (is_last && !is_first) {
- slice_short.wBadSliceChopping = 2;
- } else {
- slice_short.wBadSliceChopping = 3;
- }
+ /* Fill start code prefix */
+ memcpy (&inner->bitstream_buffer[0] + pos, start_code, start_code_size);
- g_array_append_val (self->slice_list, slice_short);
- self->remaining_buffer_size -= bytes_to_copy;
- self->written_buffer_size += bytes_to_copy;
- self->bitstream_buffer_data += bytes_to_copy;
- is_first = FALSE;
- to_write -= bytes_to_copy;
- }
- }
+ /* Copy bitstream */
+ memcpy (&inner->bitstream_buffer[0] + pos + start_code_size,
+ slice->nalu.data + slice->nalu.offset, slice->nalu.size);
return TRUE;
}
-typedef struct
+static gboolean
+gst_d3d11_h265_dec_end_picture (GstH265Decoder * decoder,
+ GstH265Picture * picture)
{
- guint width;
- guint height;
-} GstD3D11H265DecResolution;
+ GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ GstD3D11H265DecInner *inner = self->inner;
+ ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
+ size_t bitstream_buffer_size;
+ size_t bitstream_pos;
+ GstD3D11DecodeInputStreamArgs input_args;
+
+ GST_LOG_OBJECT (self, "end picture %p, (poc %d)",
+ picture, picture->pic_order_cnt);
+
+ if (inner->bitstream_buffer.empty () || inner->slice_list.empty ()) {
+ GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
+ return FALSE;
+ }
+
+ view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture,
+ &view_id);
+ if (!view) {
+ GST_ERROR_OBJECT (self, "current picture does not have output view handle");
+ return FALSE;
+ }
+
+ memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
+
+ bitstream_pos = inner->bitstream_buffer.size ();
+ bitstream_buffer_size = GST_ROUND_UP_128 (bitstream_pos);
+
+ if (bitstream_buffer_size > bitstream_pos) {
+ size_t padding = bitstream_buffer_size - bitstream_pos;
+
+ /* As per DXVA spec, total amount of bitstream buffer size should be
+ * 128 bytes aligned. If actual data is not multiple of 128 bytes,
+ * the last slice data needs to be zero-padded */
+ inner->bitstream_buffer.resize (bitstream_buffer_size, 0);
+
+ DXVA_Slice_HEVC_Short & slice = inner->slice_list.back ();
+ slice.SliceBytesInBuffer += padding;
+ }
+
+ input_args.picture_params = &inner->pic_params;
+ input_args.picture_params_size = sizeof (DXVA_PicParams_HEVC);
+ input_args.slice_control = &inner->slice_list[0];
+ input_args.slice_control_size =
+ sizeof (DXVA_Slice_HEVC_Short) * inner->slice_list.size ();
+ input_args.bitstream = &inner->bitstream_buffer[0];
+ input_args.bitstream_size = inner->bitstream_buffer.size ();
+
+ if (inner->submit_iq_data) {
+ input_args.inverse_quantization_matrix = &inner->iq_matrix;
+ input_args.inverse_quantization_matrix_size = sizeof (DXVA_Qmatrix_HEVC);
+ }
+
+ return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
+ view, &input_args);
+}
+
+static GstFlowReturn
+gst_d3d11_h265_dec_output_picture (GstH265Decoder * decoder,
+ GstVideoCodecFrame * frame, GstH265Picture * picture)
+{
+ GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ GstD3D11H265DecInner *inner = self->inner;
+ GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
+ GstBuffer *view_buffer;
+
+ GST_LOG_OBJECT (self, "Outputting picture %p, poc %d, picture_struct %d, "
+ "buffer flags 0x%x", picture, picture->pic_order_cnt, picture->pic_struct,
+ picture->buffer_flags);
+
+ view_buffer = (GstBuffer *) gst_h265_picture_get_user_data (picture);
+
+ if (!view_buffer) {
+ GST_ERROR_OBJECT (self, "Could not get output view");
+ goto error;
+ }
+
+ if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
+ inner->width, inner->height, view_buffer, &frame->output_buffer)) {
+ GST_ERROR_OBJECT (self, "Failed to copy buffer");
+ goto error;
+ }
+
+ GST_BUFFER_FLAG_SET (frame->output_buffer, picture->buffer_flags);
+ gst_h265_picture_unref (picture);
+
+ return gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
+
+error:
+ gst_h265_picture_unref (picture);
+ gst_video_decoder_release_frame (vdec, frame);
+
+ return GST_FLOW_ERROR;
+}
void
gst_d3d11_h265_dec_register (GstPlugin * plugin, GstD3D11Device * device,
- GstD3D11Decoder * decoder, guint rank)
+ guint rank)
{
GType type;
gchar *type_name;
@@ -1313,14 +971,6 @@ gst_d3d11_h265_dec_register (GstPlugin * plugin, GstD3D11Device * device,
};
const GUID *main_10_guid = NULL;
const GUID *main_guid = NULL;
- /* values were taken from chromium.
- * Note that since chromium does not support hevc decoding, this list is
- * the combination of lists for avc and vp9.
- * See supported_profile_helper.cc */
- GstD3D11H265DecResolution resolutions_to_check[] = {
- {1920, 1088}, {2560, 1440}, {3840, 2160}, {4096, 2160},
- {4096, 2304}, {7680, 4320}, {8192, 4320}, {8192, 8192}
- };
GstCaps *sink_caps = NULL;
GstCaps *src_caps = NULL;
GstCaps *src_caps_copy;
@@ -1333,27 +983,26 @@ gst_d3d11_h265_dec_register (GstPlugin * plugin, GstD3D11Device * device,
gboolean have_main = FALSE;
DXGI_FORMAT format = DXGI_FORMAT_UNKNOWN;
- have_main10 = gst_d3d11_decoder_get_supported_decoder_profile (decoder,
- GST_D3D11_CODEC_H265, GST_VIDEO_FORMAT_P010_10LE, &main_10_guid);
+ have_main10 = gst_d3d11_decoder_get_supported_decoder_profile (device,
+ GST_DXVA_CODEC_H265, GST_VIDEO_FORMAT_P010_10LE, &main_10_guid);
if (!have_main10) {
GST_DEBUG_OBJECT (device, "decoder does not support HEVC_VLD_MAIN10");
} else {
have_main10 &=
- gst_d3d11_decoder_supports_format (decoder, main_10_guid,
+ gst_d3d11_decoder_supports_format (device, main_10_guid,
DXGI_FORMAT_P010);
if (!have_main10) {
GST_FIXME_OBJECT (device, "device does not support P010 format");
}
}
- have_main = gst_d3d11_decoder_get_supported_decoder_profile (decoder,
- GST_D3D11_CODEC_H265, GST_VIDEO_FORMAT_NV12, &main_guid);
+ have_main = gst_d3d11_decoder_get_supported_decoder_profile (device,
+ GST_DXVA_CODEC_H265, GST_VIDEO_FORMAT_NV12, &main_guid);
if (!have_main) {
GST_DEBUG_OBJECT (device, "decoder does not support HEVC_VLD_MAIN");
} else {
have_main =
- gst_d3d11_decoder_supports_format (decoder, main_guid,
- DXGI_FORMAT_NV12);
+ gst_d3d11_decoder_supports_format (device, main_guid, DXGI_FORMAT_NV12);
if (!have_main) {
GST_FIXME_OBJECT (device, "device does not support NV12 format");
}
@@ -1372,12 +1021,12 @@ gst_d3d11_h265_dec_register (GstPlugin * plugin, GstD3D11Device * device,
format = DXGI_FORMAT_P010;
}
- for (i = 0; i < G_N_ELEMENTS (resolutions_to_check); i++) {
- if (gst_d3d11_decoder_supports_resolution (decoder, profile,
- format, resolutions_to_check[i].width,
- resolutions_to_check[i].height)) {
- max_width = resolutions_to_check[i].width;
- max_height = resolutions_to_check[i].height;
+ for (i = 0; i < G_N_ELEMENTS (gst_dxva_resolutions); i++) {
+ if (gst_d3d11_decoder_supports_resolution (device, profile,
+ format, gst_dxva_resolutions[i].width,
+ gst_dxva_resolutions[i].height)) {
+ max_width = gst_dxva_resolutions[i].width;
+ max_height = gst_dxva_resolutions[i].height;
GST_DEBUG_OBJECT (device,
"device support resolution %dx%d", max_width, max_height);
@@ -1475,7 +1124,7 @@ gst_d3d11_h265_dec_register (GstPlugin * plugin, GstD3D11Device * device,
gst_caps_unref (src_caps_copy);
type_info.class_data =
- gst_d3d11_decoder_class_data_new (device, GST_D3D11_CODEC_H265,
+ gst_d3d11_decoder_class_data_new (device, GST_DXVA_CODEC_H265,
sink_caps, src_caps);
type_name = g_strdup ("GstD3D11H265Dec");
diff --git a/sys/d3d11/gstd3d11h265dec.h b/sys/d3d11/gstd3d11h265dec.h
index 9cb021514..f6cd6e9ca 100644
--- a/sys/d3d11/gstd3d11h265dec.h
+++ b/sys/d3d11/gstd3d11h265dec.h
@@ -26,7 +26,6 @@ G_BEGIN_DECLS
void gst_d3d11_h265_dec_register (GstPlugin * plugin,
GstD3D11Device * device,
- GstD3D11Decoder * decoder,
guint rank);
G_END_DECLS
diff --git a/sys/d3d11/gstd3d11mpeg2dec.cpp b/sys/d3d11/gstd3d11mpeg2dec.cpp
index b1e1b8ba1..a18c07aae 100644
--- a/sys/d3d11/gstd3d11mpeg2dec.cpp
+++ b/sys/d3d11/gstd3d11mpeg2dec.cpp
@@ -40,6 +40,7 @@
#include <gst/codecs/gstmpeg2decoder.h>
#include <string.h>
+#include <vector>
/* HACK: to expose dxva data structure on UWP */
#ifdef WINAPI_PARTITION_DESKTOP
@@ -55,28 +56,35 @@ GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_mpeg2_dec_debug);
/* reference list 2 + 4 margin */
#define NUM_OUTPUT_VIEW 6
-typedef struct _GstD3D11Mpeg2Dec
+/* *INDENT-OFF* */
+typedef struct _GstD3D11Mpeg2DecInner
{
- GstMpeg2Decoder parent;
+ GstD3D11Device *device = nullptr;
+ GstD3D11Decoder *d3d11_decoder = nullptr;
- GstD3D11Device *device;
- GstD3D11Decoder *d3d11_decoder;
+ DXVA_PictureParameters pic_params;
+ DXVA_QmatrixData iq_matrix;
- gint width, height;
- guint width_in_mb, height_in_mb;
- GstVideoFormat out_format;
- GstMpegVideoSequenceHdr seq;
- GstMpegVideoProfile profile;
- gboolean interlaced;
+ std::vector<DXVA_SliceInfo> slice_list;
+ std::vector<guint8> bitstream_buffer;
- /* Array of DXVA_SliceInfo */
- GArray *slice_list;
gboolean submit_iq_data;
- /* Pointing current bitstream buffer */
- guint written_buffer_size;
- guint remaining_buffer_size;
- guint8 *bitstream_buffer_data;
+ gint width = 0;
+ gint height = 0;
+ guint width_in_mb = 0;
+ guint height_in_mb = 0;
+ GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
+ GstMpegVideoSequenceHdr seq;
+ GstMpegVideoProfile profile = GST_MPEG_VIDEO_PROFILE_MAIN;
+ gboolean interlaced = FALSE;
+} GstD3D11Mpeg2DecInner;
+/* *INDENT-ON* */
+
+typedef struct _GstD3D11Mpeg2Dec
+{
+ GstMpeg2Decoder parent;
+ GstD3D11Mpeg2DecInner *inner;
} GstD3D11Mpeg2Dec;
typedef struct _GstD3D11Mpeg2DecClass
@@ -91,9 +99,9 @@ static GstElementClass *parent_class = NULL;
#define GST_D3D11_MPEG2_DEC_GET_CLASS(object) \
(G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstD3D11Mpeg2DecClass))
-static void gst_d3d11_mpeg2_dec_finalize (GObject * object);
static void gst_d3d11_mpeg2_dec_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
+static void gst_d3d11_mpeg2_dec_finalize (GObject * object);
static void gst_d3d11_mpeg2_dec_set_context (GstElement * element,
GstContext * context);
@@ -176,18 +184,7 @@ gst_d3d11_mpeg2_dec_class_init (GstD3D11Mpeg2DecClass * klass, gpointer data)
static void
gst_d3d11_mpeg2_dec_init (GstD3D11Mpeg2Dec * self)
{
- self->slice_list = g_array_new (FALSE, TRUE, sizeof (DXVA_SliceInfo));
- self->profile = GST_MPEG_VIDEO_PROFILE_MAIN;
-}
-
-static void
-gst_d3d11_mpeg2_dec_finalize (GObject * object)
-{
- GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (object);
-
- g_array_unref (self->slice_list);
-
- G_OBJECT_CLASS (parent_class)->finalize (object);
+ self->inner = new GstD3D11Mpeg2DecInner ();
}
static void
@@ -201,14 +198,25 @@ gst_d3d11_mpeg2_dec_get_property (GObject * object, guint prop_id,
}
static void
+gst_d3d11_mpeg2_dec_finalize (GObject * object)
+{
+ GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (object);
+
+ delete self->inner;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
gst_d3d11_mpeg2_dec_set_context (GstElement * element, GstContext * context)
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (element);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
GstD3D11Mpeg2DecClass *klass = GST_D3D11_MPEG2_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
gst_d3d11_handle_set_context (element, context, cdata->adapter,
- &self->device);
+ &inner->device);
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
}
@@ -217,20 +225,22 @@ static gboolean
gst_d3d11_mpeg2_dec_open (GstVideoDecoder * decoder)
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
GstD3D11Mpeg2DecClass *klass = GST_D3D11_MPEG2_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
if (!gst_d3d11_ensure_element_data (GST_ELEMENT_CAST (self), cdata->adapter,
- &self->device)) {
+ &inner->device)) {
GST_ERROR_OBJECT (self, "Cannot create d3d11device");
return FALSE;
}
- self->d3d11_decoder = gst_d3d11_decoder_new (self->device);
+ inner->d3d11_decoder = gst_d3d11_decoder_new (inner->device,
+ GST_DXVA_CODEC_MPEG2);
- if (!self->d3d11_decoder) {
+ if (!inner->d3d11_decoder) {
GST_ERROR_OBJECT (self, "Cannot create d3d11 decoder");
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->device);
return FALSE;
}
@@ -241,9 +251,10 @@ static gboolean
gst_d3d11_mpeg2_dec_close (GstVideoDecoder * decoder)
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
- gst_clear_object (&self->d3d11_decoder);
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->d3d11_decoder);
+ gst_clear_object (&inner->device);
return TRUE;
}
@@ -252,8 +263,9 @@ static gboolean
gst_d3d11_mpeg2_dec_negotiate (GstVideoDecoder * decoder)
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_negotiate (self->d3d11_decoder, decoder))
+ if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
return FALSE;
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
@@ -264,8 +276,9 @@ gst_d3d11_mpeg2_dec_decide_allocation (GstVideoDecoder * decoder,
GstQuery * query)
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_decide_allocation (self->d3d11_decoder,
+ if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder,
decoder, query)) {
return FALSE;
}
@@ -278,11 +291,12 @@ static gboolean
gst_d3d11_mpeg2_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
- query, self->device)) {
+ query, inner->device)) {
return TRUE;
}
break;
@@ -297,15 +311,16 @@ static gboolean
gst_d3d11_mpeg2_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, TRUE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
break;
case GST_EVENT_FLUSH_STOP:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, FALSE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
default:
break;
}
@@ -321,6 +336,7 @@ gst_d3d11_mpeg2_dec_new_sequence (GstMpeg2Decoder * decoder,
const GstMpegVideoSequenceScalableExt * seq_scalable_ext)
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
gboolean interlaced;
gboolean modified = FALSE;
gint width, height;
@@ -329,9 +345,9 @@ gst_d3d11_mpeg2_dec_new_sequence (GstMpeg2Decoder * decoder,
GST_LOG_OBJECT (self, "new sequence");
interlaced = seq_ext ? !seq_ext->progressive : FALSE;
- if (self->interlaced != interlaced) {
+ if (inner->interlaced != interlaced) {
GST_INFO_OBJECT (self, "interlaced sequence change");
- self->interlaced = interlaced;
+ inner->interlaced = interlaced;
modified = TRUE;
}
@@ -342,13 +358,13 @@ gst_d3d11_mpeg2_dec_new_sequence (GstMpeg2Decoder * decoder,
height = (height & 0x0fff) | ((guint32) seq_ext->vert_size_ext << 12);
}
- if (self->width != width || self->height != height) {
+ if (inner->width != width || inner->height != height) {
GST_INFO_OBJECT (self, "resolution change %dx%d -> %dx%d",
- self->width, self->height, width, height);
- self->width = width;
- self->height = height;
- self->width_in_mb = GST_ROUND_UP_16 (width) >> 4;
- self->height_in_mb = GST_ROUND_UP_16 (height) >> 4;
+ inner->width, inner->height, width, height);
+ inner->width = width;
+ inner->height = height;
+ inner->width_in_mb = GST_ROUND_UP_16 (width) >> 4;
+ inner->height_in_mb = GST_ROUND_UP_16 (height) >> 4;
modified = TRUE;
}
@@ -362,27 +378,27 @@ gst_d3d11_mpeg2_dec_new_sequence (GstMpeg2Decoder * decoder,
return FALSE;
}
- if (self->profile != mpeg_profile) {
+ if (inner->profile != mpeg_profile) {
GST_INFO_OBJECT (self, "Profile change %d -> %d",
- self->profile, mpeg_profile);
- self->profile = mpeg_profile;
+ inner->profile, mpeg_profile);
+ inner->profile = mpeg_profile;
modified = TRUE;
}
- if (modified || !gst_d3d11_decoder_is_configured (self->d3d11_decoder)) {
+ if (modified || !gst_d3d11_decoder_is_configured (inner->d3d11_decoder)) {
GstVideoInfo info;
/* FIXME: support I420 */
- self->out_format = GST_VIDEO_FORMAT_NV12;
+ inner->out_format = GST_VIDEO_FORMAT_NV12;
gst_video_info_set_format (&info,
- self->out_format, self->width, self->height);
- if (self->interlaced)
+ inner->out_format, inner->width, inner->height);
+ if (inner->interlaced)
GST_VIDEO_INFO_INTERLACE_MODE (&info) = GST_VIDEO_INTERLACE_MODE_MIXED;
- if (!gst_d3d11_decoder_configure (self->d3d11_decoder,
- GST_D3D11_CODEC_MPEG2, decoder->input_state, &info,
- self->width, self->height, NUM_OUTPUT_VIEW)) {
+ if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
+ decoder->input_state, &info,
+ inner->width, inner->height, NUM_OUTPUT_VIEW)) {
GST_ERROR_OBJECT (self, "Failed to create decoder");
return FALSE;
}
@@ -401,9 +417,10 @@ gst_d3d11_mpeg2_dec_new_picture (GstMpeg2Decoder * decoder,
GstVideoCodecFrame * frame, GstMpeg2Picture * picture)
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
GstBuffer *view_buffer;
- view_buffer = gst_d3d11_decoder_get_output_view_buffer (self->d3d11_decoder,
+ view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
@@ -444,33 +461,11 @@ gst_d3d11_mpeg2_dec_new_field_picture (GstMpeg2Decoder * decoder,
return TRUE;
}
-static gboolean
-gst_d3d11_mpeg2_dec_get_bitstream_buffer (GstD3D11Mpeg2Dec * self)
-{
- GST_TRACE_OBJECT (self, "Getting bitstream buffer");
-
- self->written_buffer_size = 0;
- self->remaining_buffer_size = 0;
- self->bitstream_buffer_data = NULL;
-
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM, &self->remaining_buffer_size,
- (gpointer *) & self->bitstream_buffer_data)) {
- GST_ERROR_OBJECT (self, "Faild to get bitstream buffer");
- return FALSE;
- }
-
- GST_TRACE_OBJECT (self, "Got bitstream buffer %p with size %d",
- self->bitstream_buffer_data, self->remaining_buffer_size);
- self->written_buffer_size = 0;
-
- return TRUE;
-}
-
static ID3D11VideoDecoderOutputView *
gst_d3d11_mpeg2_dec_get_output_view_from_picture (GstD3D11Mpeg2Dec * self,
GstMpeg2Picture * picture, guint8 * view_id)
{
+ GstD3D11Mpeg2DecInner *inner = self->inner;
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
@@ -484,7 +479,7 @@ gst_d3d11_mpeg2_dec_get_output_view_from_picture (GstD3D11Mpeg2Dec * self,
}
view =
- gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
+ gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
@@ -525,14 +520,13 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
GstMpeg2Picture * prev_picture, GstMpeg2Picture * next_picture)
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
+ DXVA_PictureParameters *pic_params = &inner->pic_params;
+ DXVA_QmatrixData *iq_matrix = &inner->iq_matrix;
ID3D11VideoDecoderOutputView *view;
ID3D11VideoDecoderOutputView *other_view;
guint8 view_id = 0xff;
guint8 other_view_id = 0xff;
- DXVA_PictureParameters pic_params = { 0, };
- DXVA_QmatrixData iq_matrix = { 0, };
- guint d3d11_buffer_size = 0;
- gpointer d3d11_buffer = NULL;
gboolean is_field =
picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
@@ -543,16 +537,13 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
return FALSE;
}
- GST_TRACE_OBJECT (self, "Begin frame");
- if (!gst_d3d11_decoder_begin_frame (self->d3d11_decoder, view, 0, NULL)) {
- GST_ERROR_OBJECT (self, "Failed to begin frame");
- return FALSE;
- }
+ memset (pic_params, 0, sizeof (DXVA_PictureParameters));
+ memset (iq_matrix, 0, sizeof (DXVA_QmatrixData));
/* Fill DXVA_PictureParameters */
- pic_params.wDecodedPictureIndex = view_id;
- pic_params.wForwardRefPictureIndex = 0xffff;
- pic_params.wBackwardRefPictureIndex = 0xffff;
+ pic_params->wDecodedPictureIndex = view_id;
+ pic_params->wForwardRefPictureIndex = 0xffff;
+ pic_params->wBackwardRefPictureIndex = 0xffff;
switch (picture->type) {
case GST_MPEG_VIDEO_PICTURE_TYPE_B:{
@@ -561,7 +552,7 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
gst_d3d11_mpeg2_dec_get_output_view_from_picture (self,
next_picture, &other_view_id);
if (other_view)
- pic_params.wBackwardRefPictureIndex = other_view_id;
+ pic_params->wBackwardRefPictureIndex = other_view_id;
}
}
/* fall-through */
@@ -571,51 +562,33 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
gst_d3d11_mpeg2_dec_get_output_view_from_picture (self,
prev_picture, &other_view_id);
if (other_view)
- pic_params.wForwardRefPictureIndex = other_view_id;
+ pic_params->wForwardRefPictureIndex = other_view_id;
}
}
default:
break;
}
- /* *INDENT-OFF* */
- pic_params.wPicWidthInMBminus1 = self->width_in_mb - 1;
- pic_params.wPicHeightInMBminus1 = (self->height_in_mb >> is_field) - 1;
- pic_params.bMacroblockWidthMinus1 = 15;
- pic_params.bMacroblockHeightMinus1 = 15;
- pic_params.bBlockWidthMinus1 = 7;
- pic_params.bBlockHeightMinus1 = 7;
- pic_params.bBPPminus1 = 7;
- pic_params.bPicStructure = (BYTE) picture->structure;
- pic_params.bSecondField = is_field && ! !picture->first_field;
- pic_params.bPicIntra = picture->type == GST_MPEG_VIDEO_PICTURE_TYPE_I;
- pic_params.bPicBackwardPrediction =
+ pic_params->wPicWidthInMBminus1 = inner->width_in_mb - 1;
+ pic_params->wPicHeightInMBminus1 = (inner->height_in_mb >> is_field) - 1;
+ pic_params->bMacroblockWidthMinus1 = 15;
+ pic_params->bMacroblockHeightMinus1 = 15;
+ pic_params->bBlockWidthMinus1 = 7;
+ pic_params->bBlockHeightMinus1 = 7;
+ pic_params->bBPPminus1 = 7;
+ pic_params->bPicStructure = (BYTE) picture->structure;
+ if (picture->first_field && is_field) {
+ pic_params->bSecondField = TRUE;
+ }
+ pic_params->bPicIntra = picture->type == GST_MPEG_VIDEO_PICTURE_TYPE_I;
+ pic_params->bPicBackwardPrediction =
picture->type == GST_MPEG_VIDEO_PICTURE_TYPE_B;
/* FIXME: 1 -> 4:2:0, 2 -> 4:2:2, 3 -> 4:4:4 */
- pic_params.bChromaFormat = 1;
- pic_params.bPicScanFixed = 1;
- pic_params.bPicScanMethod = slice->pic_ext->alternate_scan;
- pic_params.wBitstreamFcodes = _pack_f_codes (slice->pic_ext->f_code);
- pic_params.wBitstreamPCEelements = _pack_pce_elements (slice);
- /* *INDENT-ON* */
-
- GST_TRACE_OBJECT (self, "Getting picture param decoder buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS, &d3d11_buffer_size,
- &d3d11_buffer)) {
- GST_ERROR_OBJECT (self,
- "Failed to get decoder buffer for picture parameters");
- return FALSE;
- }
-
- memcpy (d3d11_buffer, &pic_params, sizeof (pic_params));
-
- GST_TRACE_OBJECT (self, "Release picture param decoder buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS)) {
- GST_ERROR_OBJECT (self, "Failed to release decoder buffer");
- return FALSE;
- }
+ pic_params->bChromaFormat = 1;
+ pic_params->bPicScanFixed = 1;
+ pic_params->bPicScanMethod = slice->pic_ext->alternate_scan;
+ pic_params->wBitstreamFcodes = _pack_f_codes (slice->pic_ext->f_code);
+ pic_params->wBitstreamPCEelements = _pack_pce_elements (slice);
/* Fill DXVA_QmatrixData */
if (slice->quant_matrix &&
@@ -625,157 +598,46 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
(slice->quant_matrix->load_intra_quantiser_matrix ||
slice->quant_matrix->load_non_intra_quantiser_matrix)) {
GstMpegVideoQuantMatrixExt *quant_matrix = slice->quant_matrix;
- self->submit_iq_data = TRUE;
if (quant_matrix->load_intra_quantiser_matrix) {
- iq_matrix.bNewQmatrix[0] = 1;
- memcpy (iq_matrix.Qmatrix[0], quant_matrix->intra_quantiser_matrix,
- sizeof (quant_matrix->intra_quantiser_matrix));
+ iq_matrix->bNewQmatrix[0] = 1;
+ for (guint i = 0; i < 64; i++) {
+ iq_matrix->Qmatrix[0][i] = quant_matrix->intra_quantiser_matrix[i];
+ }
}
if (quant_matrix->load_non_intra_quantiser_matrix) {
- iq_matrix.bNewQmatrix[1] = 1;
- memcpy (iq_matrix.Qmatrix[1], quant_matrix->non_intra_quantiser_matrix,
- sizeof (quant_matrix->non_intra_quantiser_matrix));
+ iq_matrix->bNewQmatrix[1] = 1;
+ for (guint i = 0; i < 64; i++) {
+ iq_matrix->Qmatrix[1][i] = quant_matrix->non_intra_quantiser_matrix[i];
+ }
}
if (quant_matrix->load_chroma_intra_quantiser_matrix) {
- iq_matrix.bNewQmatrix[2] = 1;
- memcpy (iq_matrix.Qmatrix[2], quant_matrix->chroma_intra_quantiser_matrix,
- sizeof (quant_matrix->chroma_intra_quantiser_matrix));
+ iq_matrix->bNewQmatrix[2] = 1;
+ for (guint i = 0; i < 64; i++) {
+ iq_matrix->Qmatrix[2][i] =
+ quant_matrix->chroma_intra_quantiser_matrix[i];
+ }
}
if (quant_matrix->load_chroma_non_intra_quantiser_matrix) {
- iq_matrix.bNewQmatrix[3] = 1;
- memcpy (iq_matrix.Qmatrix[3],
- quant_matrix->chroma_non_intra_quantiser_matrix,
- sizeof (quant_matrix->chroma_non_intra_quantiser_matrix));
- }
-
- GST_TRACE_OBJECT (self, "Getting inverse quantization matrix buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX,
- &d3d11_buffer_size, &d3d11_buffer)) {
- GST_ERROR_OBJECT (self,
- "Failed to get decoder buffer for inv. quantization matrix");
- return FALSE;
+ iq_matrix->bNewQmatrix[3] = 1;
+ for (guint i = 0; i < 64; i++) {
+ iq_matrix->Qmatrix[3][i] =
+ quant_matrix->chroma_non_intra_quantiser_matrix[i];
+ }
}
- memcpy (d3d11_buffer, &iq_matrix, sizeof (DXVA_QmatrixData));
-
- GST_TRACE_OBJECT (self, "Release inverse quantization matrix buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX)) {
- GST_ERROR_OBJECT (self, "Failed to release decoder buffer");
- return FALSE;
- }
+ inner->submit_iq_data = TRUE;
} else {
- self->submit_iq_data = FALSE;
+ inner->submit_iq_data = FALSE;
}
- g_array_set_size (self->slice_list, 0);
-
- return gst_d3d11_mpeg2_dec_get_bitstream_buffer (self);
-}
-
-static gboolean
-gst_d3d11_mpeg2_dec_submit_slice_data (GstD3D11Mpeg2Dec * self,
- GstMpeg2Picture * picture)
-{
- guint buffer_size;
- gpointer buffer;
- guint8 *data;
- gsize offset = 0;
- guint i;
- D3D11_VIDEO_DECODER_BUFFER_DESC buffer_desc[4];
- gboolean ret;
- guint buffer_count = 0;
- DXVA_SliceInfo *slice_data;
- gboolean is_field =
- picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
- guint mb_count = self->width_in_mb * (self->height_in_mb >> is_field);
-
- if (self->slice_list->len < 1) {
- GST_WARNING_OBJECT (self, "Nothing to submit");
- return FALSE;
- }
-
- memset (buffer_desc, 0, sizeof (buffer_desc));
-
- GST_TRACE_OBJECT (self, "Getting slice control buffer");
-
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL, &buffer_size, &buffer)) {
- GST_ERROR_OBJECT (self, "Couldn't get slice control buffer");
- return FALSE;
- }
+ inner->slice_list.resize (0);
+ inner->bitstream_buffer.resize (0);
- data = (guint8 *) buffer;
- for (i = 0; i < self->slice_list->len; i++) {
- slice_data = &g_array_index (self->slice_list, DXVA_SliceInfo, i);
-
- /* Update the number of MBs per slice */
- if (i == self->slice_list->len - 1) {
- slice_data->wNumberMBsInSlice = mb_count - slice_data->wNumberMBsInSlice;
- } else {
- DXVA_SliceInfo *next =
- &g_array_index (self->slice_list, DXVA_SliceInfo, i + 1);
- slice_data->wNumberMBsInSlice =
- next->wNumberMBsInSlice - slice_data->wNumberMBsInSlice;
- }
-
- memcpy (data + offset, slice_data, sizeof (DXVA_SliceInfo));
- offset += sizeof (DXVA_SliceInfo);
- }
-
- GST_TRACE_OBJECT (self, "Release slice control buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL)) {
- GST_ERROR_OBJECT (self, "Failed to release slice control buffer");
- return FALSE;
- }
-
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM)) {
- GST_ERROR_OBJECT (self, "Failed to release bitstream buffer");
- return FALSE;
- }
-
- buffer_desc[buffer_count].BufferType =
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
- buffer_desc[buffer_count].DataOffset = 0;
- buffer_desc[buffer_count].DataSize = sizeof (DXVA_PictureParameters);
- buffer_count++;
-
- if (self->submit_iq_data) {
- buffer_desc[buffer_count].BufferType =
- D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX;
- buffer_desc[buffer_count].DataOffset = 0;
- buffer_desc[buffer_count].DataSize = sizeof (DXVA_QmatrixData);
- buffer_count++;
- }
-
- buffer_desc[buffer_count].BufferType =
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
- buffer_desc[buffer_count].DataOffset = 0;
- buffer_desc[buffer_count].DataSize =
- sizeof (DXVA_SliceInfo) * self->slice_list->len;
- buffer_count++;
-
- buffer_desc[buffer_count].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
- buffer_desc[buffer_count].DataOffset = 0;
- buffer_desc[buffer_count].DataSize = self->written_buffer_size;
- buffer_count++;
-
- ret = gst_d3d11_decoder_submit_decoder_buffers (self->d3d11_decoder,
- buffer_count, buffer_desc);
-
- self->written_buffer_size = 0;
- self->bitstream_buffer_data = NULL;
- self->remaining_buffer_size = 0;
- g_array_set_size (self->slice_list, 0);
-
- return ret;
+ return TRUE;
}
static gboolean
@@ -783,58 +645,98 @@ gst_d3d11_mpeg2_dec_decode_slice (GstMpeg2Decoder * decoder,
GstMpeg2Picture * picture, GstMpeg2Slice * slice)
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
GstMpegVideoSliceHdr *header = &slice->header;
GstMpegVideoPacket *packet = &slice->packet;
- /* including start code 4 bytes */
- guint to_write = packet->size + 4;
DXVA_SliceInfo slice_info = { 0, };
g_assert (packet->offset >= 4);
- /* FIXME: DXVA wants to know the number of MBs per slice
- * (not sure whether it's actually used by driver). But in case that
- * one slice is splitted into two bitstream buffer, it's almost impossible
- * to know the number of MBs per splitted bitstream buffer.
- * So, we will not support too large bitstream buffer which requires multiple
- * hardware bitstream buffer at this moment.
- */
- if (self->remaining_buffer_size < to_write) {
- /* Submit slice data we have so that release acquired bitstream buffers */
- if (self->bitstream_buffer_data)
- gst_d3d11_mpeg2_dec_submit_slice_data (self, picture);
- self->bitstream_buffer_data = 0;
-
- GST_ERROR_OBJECT (self, "Slice data is too large");
-
- return FALSE;
- }
-
slice_info.wHorizontalPosition = header->mb_column;
slice_info.wVerticalPosition = header->mb_row;
- slice_info.dwSliceBitsInBuffer = 8 * to_write;
- slice_info.dwSliceDataLocation = self->written_buffer_size;
+ /* including start code 4 bytes */
+ slice_info.dwSliceBitsInBuffer = 8 * (packet->size + 4);
+ slice_info.dwSliceDataLocation = inner->bitstream_buffer.size ();
/* XXX: We don't have information about the number of MBs in this slice.
* Just store offset here, and actual number will be calculated later */
slice_info.wNumberMBsInSlice =
- (header->mb_row * self->width_in_mb) + header->mb_column;
+ (header->mb_row * inner->width_in_mb) + header->mb_column;
slice_info.wQuantizerScaleCode = header->quantiser_scale_code;
slice_info.wMBbitOffset = header->header_size + 32;
- memcpy (self->bitstream_buffer_data, packet->data + packet->offset - 4,
- to_write);
- g_array_append_val (self->slice_list, slice_info);
- self->remaining_buffer_size -= to_write;
- self->written_buffer_size += to_write;
- self->bitstream_buffer_data += to_write;
+ inner->slice_list.push_back (slice_info);
+
+ size_t pos = inner->bitstream_buffer.size ();
+ inner->bitstream_buffer.resize (pos + packet->size + 4);
+ memcpy (&inner->bitstream_buffer[0] + pos, packet->data + packet->offset - 4,
+ packet->size + 4);
return TRUE;
}
+static gboolean
+gst_d3d11_mpeg2_dec_end_picture (GstMpeg2Decoder * decoder,
+ GstMpeg2Picture * picture)
+{
+ GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
+ ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
+ GstD3D11DecodeInputStreamArgs input_args;
+ gboolean is_field =
+ picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
+ guint mb_count = inner->width_in_mb * (inner->height_in_mb >> is_field);
+
+ if (inner->bitstream_buffer.empty ()) {
+ GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
+ return FALSE;
+ }
+
+ view = gst_d3d11_mpeg2_dec_get_output_view_from_picture (self, picture,
+ &view_id);
+ if (!view) {
+ GST_ERROR_OBJECT (self, "current picture does not have output view handle");
+ return FALSE;
+ }
+
+ memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
+
+ DXVA_SliceInfo *first = &inner->slice_list[0];
+ for (size_t i = 0; i < inner->slice_list.size (); i++) {
+ DXVA_SliceInfo *slice = first + i;
+
+ /* Update the number of MBs per slice */
+ if (i == inner->slice_list.size () - 1) {
+ slice->wNumberMBsInSlice = mb_count - slice->wNumberMBsInSlice;
+ } else {
+ DXVA_SliceInfo *next = first + i + 1;
+ slice->wNumberMBsInSlice =
+ next->wNumberMBsInSlice - slice->wNumberMBsInSlice;
+ }
+ }
+
+ input_args.picture_params = &inner->pic_params;
+ input_args.picture_params_size = sizeof (DXVA_PictureParameters);
+ input_args.slice_control = &inner->slice_list[0];
+ input_args.slice_control_size =
+ sizeof (DXVA_SliceInfo) * inner->slice_list.size ();
+ input_args.bitstream = &inner->bitstream_buffer[0];
+ input_args.bitstream_size = inner->bitstream_buffer.size ();
+ if (inner->submit_iq_data) {
+ input_args.inverse_quantization_matrix = &inner->iq_matrix;
+ input_args.inverse_quantization_matrix_size = sizeof (DXVA_QmatrixData);
+ }
+
+ return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
+ view, &input_args);
+}
+
static GstFlowReturn
gst_d3d11_mpeg2_dec_output_picture (GstMpeg2Decoder * decoder,
GstVideoCodecFrame * frame, GstMpeg2Picture * picture)
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
+ GstD3D11Mpeg2DecInner *inner = self->inner;
GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
GstBuffer *view_buffer;
@@ -847,8 +749,8 @@ gst_d3d11_mpeg2_dec_output_picture (GstMpeg2Decoder * decoder,
goto error;
}
- if (!gst_d3d11_decoder_process_output (self->d3d11_decoder, vdec,
- self->width, self->height, view_buffer, &frame->output_buffer)) {
+ if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
+ inner->width, inner->height, view_buffer, &frame->output_buffer)) {
GST_ERROR_OBJECT (self, "Failed to copy buffer");
goto error;
}
@@ -869,40 +771,15 @@ gst_d3d11_mpeg2_dec_output_picture (GstMpeg2Decoder * decoder,
return gst_video_decoder_finish_frame (vdec, frame);
error:
- gst_video_decoder_drop_frame (vdec, frame);
gst_mpeg2_picture_unref (picture);
+ gst_video_decoder_release_frame (vdec, frame);
return GST_FLOW_ERROR;
}
-static gboolean
-gst_d3d11_mpeg2_dec_end_picture (GstMpeg2Decoder * decoder,
- GstMpeg2Picture * picture)
-{
- GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
-
- if (!gst_d3d11_mpeg2_dec_submit_slice_data (self, picture)) {
- GST_ERROR_OBJECT (self, "Failed to submit slice data");
- return FALSE;
- }
-
- if (!gst_d3d11_decoder_end_frame (self->d3d11_decoder)) {
- GST_ERROR_OBJECT (self, "Failed to EndFrame");
- return FALSE;
- }
-
- return TRUE;
-}
-
-typedef struct
-{
- guint width;
- guint height;
-} GstD3D11Mpeg2DecResolution;
-
void
gst_d3d11_mpeg2_dec_register (GstPlugin * plugin, GstD3D11Device * device,
- GstD3D11Decoder * decoder, guint rank)
+ guint rank)
{
GType type;
gchar *type_name;
@@ -923,8 +800,8 @@ gst_d3d11_mpeg2_dec_register (GstPlugin * plugin, GstD3D11Device * device,
GstCaps *sink_caps = NULL;
GstCaps *src_caps = NULL;
- if (!gst_d3d11_decoder_get_supported_decoder_profile (decoder,
- GST_D3D11_CODEC_MPEG2, GST_VIDEO_FORMAT_NV12, &supported_profile)) {
+ if (!gst_d3d11_decoder_get_supported_decoder_profile (device,
+ GST_DXVA_CODEC_MPEG2, GST_VIDEO_FORMAT_NV12, &supported_profile)) {
GST_INFO_OBJECT (device, "device does not support MPEG-2 video decoding");
return;
}
@@ -946,7 +823,7 @@ gst_d3d11_mpeg2_dec_register (GstPlugin * plugin, GstD3D11Device * device,
"height", GST_TYPE_INT_RANGE, 1, 1920, NULL);
type_info.class_data =
- gst_d3d11_decoder_class_data_new (device, GST_D3D11_CODEC_MPEG2,
+ gst_d3d11_decoder_class_data_new (device, GST_DXVA_CODEC_MPEG2,
sink_caps, src_caps);
type_name = g_strdup ("GstD3D11Mpeg2Dec");
diff --git a/sys/d3d11/gstd3d11mpeg2dec.h b/sys/d3d11/gstd3d11mpeg2dec.h
index 2c2ce5441..f6db58925 100644
--- a/sys/d3d11/gstd3d11mpeg2dec.h
+++ b/sys/d3d11/gstd3d11mpeg2dec.h
@@ -26,7 +26,6 @@ G_BEGIN_DECLS
void gst_d3d11_mpeg2_dec_register (GstPlugin * plugin,
GstD3D11Device * device,
- GstD3D11Decoder * decoder,
guint rank);
G_END_DECLS
diff --git a/sys/d3d11/gstd3d11vp8dec.cpp b/sys/d3d11/gstd3d11vp8dec.cpp
index cdb2761a5..9e18c1217 100644
--- a/sys/d3d11/gstd3d11vp8dec.cpp
+++ b/sys/d3d11/gstd3d11vp8dec.cpp
@@ -40,6 +40,7 @@
#include <gst/codecs/gstvp8decoder.h>
#include <string.h>
+#include <vector>
/* HACK: to expose dxva data structure on UWP */
#ifdef WINAPI_PARTITION_DESKTOP
@@ -55,15 +56,29 @@ GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_vp8_dec_debug);
/* reference list 4 + 4 margin */
#define NUM_OUTPUT_VIEW 8
-typedef struct _GstD3D11Vp8Dec
+/* *INDENT-OFF* */
+typedef struct _GstD3D11Vp8DecInner
{
- GstVp8Decoder parent;
+ GstD3D11Device *device = nullptr;
+ GstD3D11Decoder *d3d11_decoder = nullptr;
+
+ DXVA_PicParams_VP8 pic_params;
+ DXVA_Slice_VPx_Short slice;
- GstD3D11Device *device;
- GstD3D11Decoder *d3d11_decoder;
+ /* In case of VP8, there's only one slice per picture so we don't
+ * need this bitstream buffer, but this will be used for 128 bytes alignment */
+ std::vector<guint8> bitstream_buffer;
- guint width, height;
- GstVideoFormat out_format;
+ guint width = 0;
+ guint height = 0;
+ GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
+} GstD3D11Vp8DecInner;
+/* *INDENT-ON* */
+
+typedef struct _GstD3D11Vp8Dec
+{
+ GstVp8Decoder parent;
+ GstD3D11Vp8DecInner *inner;
} GstD3D11Vp8Dec;
typedef struct _GstD3D11Vp8DecClass
@@ -80,6 +95,7 @@ static GstElementClass *parent_class = NULL;
static void gst_d3d11_vp8_dec_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
+static void gst_d3d11_vp8_dec_finalize (GObject * object);
static void gst_d3d11_vp8_dec_set_context (GstElement * element,
GstContext * context);
@@ -98,14 +114,14 @@ static gboolean gst_d3d11_vp8_dec_new_sequence (GstVp8Decoder * decoder,
const GstVp8FrameHdr * frame_hdr);
static gboolean gst_d3d11_vp8_dec_new_picture (GstVp8Decoder * decoder,
GstVideoCodecFrame * frame, GstVp8Picture * picture);
-static GstFlowReturn gst_d3d11_vp8_dec_output_picture (GstVp8Decoder *
- decoder, GstVideoCodecFrame * frame, GstVp8Picture * picture);
static gboolean gst_d3d11_vp8_dec_start_picture (GstVp8Decoder * decoder,
GstVp8Picture * picture);
static gboolean gst_d3d11_vp8_dec_decode_picture (GstVp8Decoder * decoder,
GstVp8Picture * picture, GstVp8Parser * parser);
static gboolean gst_d3d11_vp8_dec_end_picture (GstVp8Decoder * decoder,
GstVp8Picture * picture);
+static GstFlowReturn gst_d3d11_vp8_dec_output_picture (GstVp8Decoder *
+ decoder, GstVideoCodecFrame * frame, GstVp8Picture * picture);
static void
gst_d3d11_vp8_dec_class_init (GstD3D11Vp8DecClass * klass, gpointer data)
@@ -117,6 +133,7 @@ gst_d3d11_vp8_dec_class_init (GstD3D11Vp8DecClass * klass, gpointer data)
GstD3D11DecoderClassData *cdata = (GstD3D11DecoderClassData *) data;
gobject_class->get_property = gst_d3d11_vp8_dec_get_property;
+ gobject_class->finalize = gst_d3d11_vp8_dec_finalize;
element_class->set_context =
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_set_context);
@@ -138,19 +155,20 @@ gst_d3d11_vp8_dec_class_init (GstD3D11Vp8DecClass * klass, gpointer data)
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_new_sequence);
vp8decoder_class->new_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_new_picture);
- vp8decoder_class->output_picture =
- GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_output_picture);
vp8decoder_class->start_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_start_picture);
vp8decoder_class->decode_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_decode_picture);
vp8decoder_class->end_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_end_picture);
+ vp8decoder_class->output_picture =
+ GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_output_picture);
}
static void
gst_d3d11_vp8_dec_init (GstD3D11Vp8Dec * self)
{
+ self->inner = new GstD3D11Vp8DecInner ();
}
static void
@@ -164,14 +182,25 @@ gst_d3d11_vp8_dec_get_property (GObject * object, guint prop_id,
}
static void
+gst_d3d11_vp8_dec_finalize (GObject * object)
+{
+ GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (object);
+
+ delete self->inner;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
gst_d3d11_vp8_dec_set_context (GstElement * element, GstContext * context)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (element);
+ GstD3D11Vp8DecInner *inner = self->inner;
GstD3D11Vp8DecClass *klass = GST_D3D11_VP8_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
gst_d3d11_handle_set_context (element, context, cdata->adapter,
- &self->device);
+ &inner->device);
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
}
@@ -180,20 +209,22 @@ static gboolean
gst_d3d11_vp8_dec_open (GstVideoDecoder * decoder)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
+ GstD3D11Vp8DecInner *inner = self->inner;
GstD3D11Vp8DecClass *klass = GST_D3D11_VP8_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
if (!gst_d3d11_ensure_element_data (GST_ELEMENT_CAST (self), cdata->adapter,
- &self->device)) {
+ &inner->device)) {
GST_ERROR_OBJECT (self, "Cannot create d3d11device");
return FALSE;
}
- self->d3d11_decoder = gst_d3d11_decoder_new (self->device);
+ inner->d3d11_decoder = gst_d3d11_decoder_new (inner->device,
+ GST_DXVA_CODEC_VP8);
- if (!self->d3d11_decoder) {
+ if (!inner->d3d11_decoder) {
GST_ERROR_OBJECT (self, "Cannot create d3d11 decoder");
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->device);
return FALSE;
}
@@ -204,9 +235,10 @@ static gboolean
gst_d3d11_vp8_dec_close (GstVideoDecoder * decoder)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
+ GstD3D11Vp8DecInner *inner = self->inner;
- gst_clear_object (&self->d3d11_decoder);
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->d3d11_decoder);
+ gst_clear_object (&inner->device);
return TRUE;
}
@@ -215,8 +247,9 @@ static gboolean
gst_d3d11_vp8_dec_negotiate (GstVideoDecoder * decoder)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
+ GstD3D11Vp8DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_negotiate (self->d3d11_decoder, decoder))
+ if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
return FALSE;
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
@@ -227,8 +260,9 @@ gst_d3d11_vp8_dec_decide_allocation (GstVideoDecoder * decoder,
GstQuery * query)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
+ GstD3D11Vp8DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_decide_allocation (self->d3d11_decoder, decoder,
+ if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder, decoder,
query)) {
return FALSE;
}
@@ -241,11 +275,12 @@ static gboolean
gst_d3d11_vp8_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
+ GstD3D11Vp8DecInner *inner = self->inner;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
- query, self->device)) {
+ query, inner->device)) {
return TRUE;
}
break;
@@ -260,15 +295,16 @@ static gboolean
gst_d3d11_vp8_sink_event (GstVideoDecoder * decoder, GstEvent * event)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
+ GstD3D11Vp8DecInner *inner = self->inner;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, TRUE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
break;
case GST_EVENT_FLUSH_STOP:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, FALSE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
default:
break;
}
@@ -281,20 +317,21 @@ gst_d3d11_vp8_dec_new_sequence (GstVp8Decoder * decoder,
const GstVp8FrameHdr * frame_hdr)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
+ GstD3D11Vp8DecInner *inner = self->inner;
GstVideoInfo info;
GST_LOG_OBJECT (self, "new sequence");
/* FIXME: support I420 */
- self->out_format = GST_VIDEO_FORMAT_NV12;
- self->width = frame_hdr->width;
- self->height = frame_hdr->height;
+ inner->out_format = GST_VIDEO_FORMAT_NV12;
+ inner->width = frame_hdr->width;
+ inner->height = frame_hdr->height;
gst_video_info_set_format (&info,
- self->out_format, self->width, self->height);
+ inner->out_format, inner->width, inner->height);
- if (!gst_d3d11_decoder_configure (self->d3d11_decoder, GST_D3D11_CODEC_VP8,
- decoder->input_state, &info, self->width, self->height,
+ if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
+ decoder->input_state, &info, inner->width, inner->height,
NUM_OUTPUT_VIEW)) {
GST_ERROR_OBJECT (self, "Failed to create decoder");
return FALSE;
@@ -313,9 +350,10 @@ gst_d3d11_vp8_dec_new_picture (GstVp8Decoder * decoder,
GstVideoCodecFrame * frame, GstVp8Picture * picture)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
+ GstD3D11Vp8DecInner *inner = self->inner;
GstBuffer *view_buffer;
- view_buffer = gst_d3d11_decoder_get_output_view_buffer (self->d3d11_decoder,
+ view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
@@ -332,46 +370,23 @@ gst_d3d11_vp8_dec_new_picture (GstVp8Decoder * decoder,
return TRUE;
}
-static GstFlowReturn
-gst_d3d11_vp8_dec_output_picture (GstVp8Decoder * decoder,
- GstVideoCodecFrame * frame, GstVp8Picture * picture)
+static gboolean
+gst_d3d11_vp8_dec_start_picture (GstVp8Decoder * decoder,
+ GstVp8Picture * picture)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
- GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
- GstBuffer *view_buffer;
-
- g_assert (picture->frame_hdr.show_frame);
+ GstD3D11Vp8DecInner *inner = self->inner;
- GST_LOG_OBJECT (self, "Outputting picture %p", picture);
-
- view_buffer = (GstBuffer *) gst_vp8_picture_get_user_data (picture);
-
- if (!view_buffer) {
- GST_ERROR_OBJECT (self, "Could not get output view");
- goto error;
- }
+ inner->bitstream_buffer.resize (0);
- if (!gst_d3d11_decoder_process_output (self->d3d11_decoder, vdec,
- self->width, self->height, view_buffer, &frame->output_buffer)) {
- GST_ERROR_OBJECT (self, "Failed to copy buffer");
- goto error;
- }
-
- gst_vp8_picture_unref (picture);
-
- return gst_video_decoder_finish_frame (vdec, frame);
-
-error:
- gst_video_decoder_drop_frame (vdec, frame);
- gst_vp8_picture_unref (picture);
-
- return GST_FLOW_ERROR;
+ return TRUE;
}
static ID3D11VideoDecoderOutputView *
gst_d3d11_vp8_dec_get_output_view_from_picture (GstD3D11Vp8Dec * self,
GstVp8Picture * picture, guint8 * view_id)
{
+ GstD3D11Vp8DecInner *inner = self->inner;
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
@@ -382,7 +397,7 @@ gst_d3d11_vp8_dec_get_output_view_from_picture (GstD3D11Vp8Dec * self,
}
view =
- gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
+ gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
@@ -392,29 +407,6 @@ gst_d3d11_vp8_dec_get_output_view_from_picture (GstD3D11Vp8Dec * self,
return view;
}
-static gboolean
-gst_d3d11_vp8_dec_start_picture (GstVp8Decoder * decoder,
- GstVp8Picture * picture)
-{
- GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
- ID3D11VideoDecoderOutputView *view;
-
- view = gst_d3d11_vp8_dec_get_output_view_from_picture (self, picture, NULL);
- if (!view) {
- GST_ERROR_OBJECT (self, "current picture does not have output view handle");
- return FALSE;
- }
-
- GST_TRACE_OBJECT (self, "Begin frame");
-
- if (!gst_d3d11_decoder_begin_frame (self->d3d11_decoder, view, 0, NULL)) {
- GST_ERROR_OBJECT (self, "Failed to begin frame");
- return FALSE;
- }
-
- return TRUE;
-}
-
static void
gst_d3d11_vp8_dec_copy_frame_params (GstD3D11Vp8Dec * self,
GstVp8Picture * picture, GstVp8Parser * parser, DXVA_PicParams_VP8 * params)
@@ -546,190 +538,61 @@ gst_d3d11_vp8_dec_copy_segmentation_params (GstD3D11Vp8Dec * self,
}
static gboolean
-gst_d3d11_vp8_dec_submit_picture_data (GstD3D11Vp8Dec * self,
- GstVp8Picture * picture, DXVA_PicParams_VP8 * params)
+gst_d3d11_vp8_dec_decode_picture (GstVp8Decoder * decoder,
+ GstVp8Picture * picture, GstVp8Parser * parser)
{
- guint d3d11_buffer_size;
- gpointer d3d11_buffer;
- gsize buffer_offset = 0;
- gboolean is_first = TRUE;
-
- GST_TRACE_OBJECT (self, "Getting picture params buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS, &d3d11_buffer_size,
- &d3d11_buffer)) {
- GST_ERROR_OBJECT (self,
- "Failed to get decoder buffer for picture parameters");
- return FALSE;
- }
-
- memcpy (d3d11_buffer, params, sizeof (DXVA_PicParams_VP8));
-
- GST_TRACE_OBJECT (self, "Release picture param decoder buffer");
-
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS)) {
- GST_ERROR_OBJECT (self, "Failed to release decoder buffer");
- return FALSE;
- }
+ GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
+ GstD3D11Vp8DecInner *inner = self->inner;
+ DXVA_PicParams_VP8 *pic_params = &inner->pic_params;
+ DXVA_Slice_VPx_Short *slice = &inner->slice;
+ ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
+ const GstVp8FrameHdr *frame_hdr = &picture->frame_hdr;
- if (!picture->data || !picture->size) {
- GST_ERROR_OBJECT (self, "No data to submit");
+ view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
+ picture, &view_id);
+ if (!view) {
+ GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
}
- GST_TRACE_OBJECT (self, "Submit total %" G_GSIZE_FORMAT " bytes",
- picture->size);
-
- while (buffer_offset < picture->size) {
- gsize bytes_to_copy = picture->size - buffer_offset;
- gsize written_buffer_size;
- gboolean is_last = TRUE;
- DXVA_Slice_VPx_Short slice_short = { 0, };
- D3D11_VIDEO_DECODER_BUFFER_DESC buffer_desc[3];
- gboolean bad_aligned_bitstream_buffer = FALSE;
-
- memset (buffer_desc, 0, sizeof (buffer_desc));
-
- GST_TRACE_OBJECT (self, "Getting bitstream buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM, &d3d11_buffer_size,
- &d3d11_buffer)) {
- GST_ERROR_OBJECT (self, "Couldn't get bitstream buffer");
- goto error;
- }
-
- if ((d3d11_buffer_size & 127) != 0) {
- GST_WARNING_OBJECT (self,
- "The size of bitstream buffer is not 128 bytes aligned");
- bad_aligned_bitstream_buffer = TRUE;
- }
-
- if (bytes_to_copy > d3d11_buffer_size) {
- /* if the size of this slice is larger than the size of remaining d3d11
- * decoder bitstream memory, write the data up to the remaining d3d11
- * decoder bitstream memory size and the rest would be written to the
- * next d3d11 bitstream memory */
- bytes_to_copy = d3d11_buffer_size;
- is_last = FALSE;
- }
-
- memcpy (d3d11_buffer, picture->data + buffer_offset, bytes_to_copy);
- written_buffer_size = bytes_to_copy;
-
- /* DXVA2 spec is saying that written bitstream data must be 128 bytes
- * aligned if the bitstream buffer contains end of frame
- * (i.e., wBadSliceChopping == 0 or 2) */
- if (is_last) {
- guint padding = MIN (GST_ROUND_UP_128 (bytes_to_copy) - bytes_to_copy,
- d3d11_buffer_size - bytes_to_copy);
-
- if (padding) {
- GST_TRACE_OBJECT (self,
- "Written bitstream buffer size %" G_GSIZE_FORMAT
- " is not 128 bytes aligned, add padding %d bytes",
- bytes_to_copy, padding);
- memset ((guint8 *) d3d11_buffer + bytes_to_copy, 0, padding);
- written_buffer_size += padding;
- }
- }
-
- GST_TRACE_OBJECT (self, "Release bitstream buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM)) {
- GST_ERROR_OBJECT (self, "Failed to release bitstream buffer");
-
- goto error;
- }
-
- slice_short.BSNALunitDataLocation = 0;
- slice_short.SliceBytesInBuffer = (UINT) written_buffer_size;
-
- /* wBadSliceChopping: (dxva spec.)
- * 0: All bits for the slice are located within the corresponding
- * bitstream data buffer
- * 1: The bitstream data buffer contains the start of the slice,
- * but not the entire slice, because the buffer is full
- * 2: The bitstream data buffer contains the end of the slice.
- * It does not contain the start of the slice, because the start of
- * the slice was located in the previous bitstream data buffer.
- * 3: The bitstream data buffer does not contain the start of the slice
- * (because the start of the slice was located in the previous
- * bitstream data buffer), and it does not contain the end of the slice
- * (because the current bitstream data buffer is also full).
- */
- if (is_last && is_first) {
- slice_short.wBadSliceChopping = 0;
- } else if (!is_last && is_first) {
- slice_short.wBadSliceChopping = 1;
- } else if (is_last && !is_first) {
- slice_short.wBadSliceChopping = 2;
- } else {
- slice_short.wBadSliceChopping = 3;
- }
-
- GST_TRACE_OBJECT (self, "Getting slice control buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL, &d3d11_buffer_size,
- &d3d11_buffer)) {
- GST_ERROR_OBJECT (self, "Couldn't get slice control buffer");
-
- goto error;
- }
-
- memcpy (d3d11_buffer, &slice_short, sizeof (DXVA_Slice_VPx_Short));
-
- GST_TRACE_OBJECT (self, "Release slice control buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL)) {
- GST_ERROR_OBJECT (self, "Failed to release slice control buffer");
-
- goto error;
- }
-
- buffer_desc[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
- buffer_desc[0].DataOffset = 0;
- buffer_desc[0].DataSize = sizeof (DXVA_PicParams_VP8);
-
- buffer_desc[1].BufferType = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
- buffer_desc[1].DataOffset = 0;
- buffer_desc[1].DataSize = sizeof (DXVA_Slice_VPx_Short);
+ memset (pic_params, 0, sizeof (DXVA_PicParams_VP8));
- if (!bad_aligned_bitstream_buffer && (written_buffer_size & 127) != 0) {
- GST_WARNING_OBJECT (self,
- "Written bitstream buffer size %" G_GSIZE_FORMAT
- " is not 128 bytes aligned", written_buffer_size);
- }
+ pic_params->first_part_size = frame_hdr->first_part_size;
+ pic_params->width = inner->width;
+ pic_params->height = inner->height;
+ pic_params->CurrPic.Index7Bits = view_id;
+ pic_params->StatusReportFeedbackNumber = 1;
- buffer_desc[2].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
- buffer_desc[2].DataOffset = 0;
- buffer_desc[2].DataSize = written_buffer_size;
+ gst_d3d11_vp8_dec_copy_frame_params (self, picture, parser, pic_params);
+ gst_d3d11_vp8_dec_copy_reference_frames (self, pic_params);
+ gst_d3d11_vp8_dec_copy_segmentation_params (self, parser, pic_params);
- if (!gst_d3d11_decoder_submit_decoder_buffers (self->d3d11_decoder,
- 3, buffer_desc)) {
- GST_ERROR_OBJECT (self, "Couldn't submit decoder buffers");
- goto error;
- }
+ inner->bitstream_buffer.resize (picture->size);
+ memcpy (&inner->bitstream_buffer[0], picture->data, picture->size);
- buffer_offset += bytes_to_copy;
- is_first = FALSE;
- }
+ slice->BSNALunitDataLocation = 0;
+ slice->SliceBytesInBuffer = inner->bitstream_buffer.size ();
+ slice->wBadSliceChopping = 0;
return TRUE;
-
-error:
- return FALSE;
}
static gboolean
-gst_d3d11_vp8_dec_decode_picture (GstVp8Decoder * decoder,
- GstVp8Picture * picture, GstVp8Parser * parser)
+gst_d3d11_vp8_dec_end_picture (GstVp8Decoder * decoder, GstVp8Picture * picture)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
- DXVA_PicParams_VP8 pic_params = { 0, };
+ GstD3D11Vp8DecInner *inner = self->inner;
ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
- const GstVp8FrameHdr *frame_hdr = &picture->frame_hdr;
+ size_t bitstream_buffer_size;
+ size_t bitstream_pos;
+ GstD3D11DecodeInputStreamArgs input_args;
+
+ if (inner->bitstream_buffer.empty ()) {
+ GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
+ return FALSE;
+ }
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
picture, &view_id);
@@ -738,41 +601,73 @@ gst_d3d11_vp8_dec_decode_picture (GstVp8Decoder * decoder,
return FALSE;
}
- pic_params.first_part_size = frame_hdr->first_part_size;
- pic_params.width = self->width;
- pic_params.height = self->height;
- pic_params.CurrPic.Index7Bits = view_id;
- pic_params.StatusReportFeedbackNumber = 1;
+ memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
+
+ bitstream_pos = inner->bitstream_buffer.size ();
+ bitstream_buffer_size = GST_ROUND_UP_128 (bitstream_pos);
+
+ if (bitstream_buffer_size > bitstream_pos) {
+ size_t padding = bitstream_buffer_size - bitstream_pos;
+
+ /* As per DXVA spec, total amount of bitstream buffer size should be
+ * 128 bytes aligned. If actual data is not multiple of 128 bytes,
+ * the last slice data needs to be zero-padded */
+ inner->bitstream_buffer.resize (bitstream_buffer_size, 0);
- gst_d3d11_vp8_dec_copy_frame_params (self, picture, parser, &pic_params);
- gst_d3d11_vp8_dec_copy_reference_frames (self, &pic_params);
- gst_d3d11_vp8_dec_copy_segmentation_params (self, parser, &pic_params);
+ inner->slice.SliceBytesInBuffer += padding;
+ }
+
+ input_args.picture_params = &inner->pic_params;
+ input_args.picture_params_size = sizeof (DXVA_PicParams_VP8);
+ input_args.slice_control = &inner->slice;
+ input_args.slice_control_size = sizeof (DXVA_Slice_VPx_Short);
+ input_args.bitstream = &inner->bitstream_buffer[0];
+ input_args.bitstream_size = inner->bitstream_buffer.size ();
- return gst_d3d11_vp8_dec_submit_picture_data (self, picture, &pic_params);
+ return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
+ view, &input_args);
}
-static gboolean
-gst_d3d11_vp8_dec_end_picture (GstVp8Decoder * decoder, GstVp8Picture * picture)
+static GstFlowReturn
+gst_d3d11_vp8_dec_output_picture (GstVp8Decoder * decoder,
+ GstVideoCodecFrame * frame, GstVp8Picture * picture)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
+ GstD3D11Vp8DecInner *inner = self->inner;
+ GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
+ GstBuffer *view_buffer;
- if (!gst_d3d11_decoder_end_frame (self->d3d11_decoder)) {
- GST_ERROR_OBJECT (self, "Failed to EndFrame");
- return FALSE;
+ g_assert (picture->frame_hdr.show_frame);
+
+ GST_LOG_OBJECT (self, "Outputting picture %p", picture);
+
+ view_buffer = (GstBuffer *) gst_vp8_picture_get_user_data (picture);
+
+ if (!view_buffer) {
+ GST_ERROR_OBJECT (self, "Could not get output view");
+ goto error;
}
- return TRUE;
-}
+ if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
+ inner->width, inner->height, view_buffer, &frame->output_buffer)) {
+ GST_ERROR_OBJECT (self, "Failed to copy buffer");
+ goto error;
+ }
-typedef struct
-{
- guint width;
- guint height;
-} GstD3D11Vp8DecResolution;
+ gst_vp8_picture_unref (picture);
+
+ return gst_video_decoder_finish_frame (vdec, frame);
+
+error:
+ gst_vp8_picture_unref (picture);
+ gst_video_decoder_release_frame (vdec, frame);
+
+ return GST_FLOW_ERROR;
+}
void
gst_d3d11_vp8_dec_register (GstPlugin * plugin, GstD3D11Device * device,
- GstD3D11Decoder * decoder, guint rank)
+ guint rank)
{
GType type;
gchar *type_name;
@@ -791,10 +686,6 @@ gst_d3d11_vp8_dec_register (GstPlugin * plugin, GstD3D11Device * device,
(GInstanceInitFunc) gst_d3d11_vp8_dec_init,
};
const GUID *profile_guid = NULL;
- /* values were taken from chromium. See supported_profile_helper.cc */
- GstD3D11Vp8DecResolution resolutions_to_check[] = {
- {1920, 1088}, {2560, 1440}, {3840, 2160}, {4096, 2160}, {4096, 2304}
- };
GstCaps *sink_caps = NULL;
GstCaps *src_caps = NULL;
guint max_width = 0;
@@ -802,18 +693,18 @@ gst_d3d11_vp8_dec_register (GstPlugin * plugin, GstD3D11Device * device,
guint resolution;
DXGI_FORMAT format = DXGI_FORMAT_NV12;
- if (!gst_d3d11_decoder_get_supported_decoder_profile (decoder,
- GST_D3D11_CODEC_VP8, GST_VIDEO_FORMAT_NV12, &profile_guid)) {
+ if (!gst_d3d11_decoder_get_supported_decoder_profile (device,
+ GST_DXVA_CODEC_VP8, GST_VIDEO_FORMAT_NV12, &profile_guid)) {
GST_INFO_OBJECT (device, "device does not support VP8 decoding");
return;
}
- for (i = 0; i < G_N_ELEMENTS (resolutions_to_check); i++) {
- if (gst_d3d11_decoder_supports_resolution (decoder, profile_guid,
- format, resolutions_to_check[i].width,
- resolutions_to_check[i].height)) {
- max_width = resolutions_to_check[i].width;
- max_height = resolutions_to_check[i].height;
+ for (i = 0; i < G_N_ELEMENTS (gst_dxva_resolutions); i++) {
+ if (gst_d3d11_decoder_supports_resolution (device, profile_guid,
+ format, gst_dxva_resolutions[i].width,
+ gst_dxva_resolutions[i].height)) {
+ max_width = gst_dxva_resolutions[i].width;
+ max_height = gst_dxva_resolutions[i].height;
GST_DEBUG_OBJECT (device,
"device support resolution %dx%d", max_width, max_height);
@@ -843,7 +734,7 @@ gst_d3d11_vp8_dec_register (GstPlugin * plugin, GstD3D11Device * device,
"height", GST_TYPE_INT_RANGE, 1, resolution, NULL);
type_info.class_data =
- gst_d3d11_decoder_class_data_new (device, GST_D3D11_CODEC_VP8,
+ gst_d3d11_decoder_class_data_new (device, GST_DXVA_CODEC_VP8,
sink_caps, src_caps);
type_name = g_strdup ("GstD3D11Vp8Dec");
diff --git a/sys/d3d11/gstd3d11vp8dec.h b/sys/d3d11/gstd3d11vp8dec.h
index a0f816159..d05dfc842 100644
--- a/sys/d3d11/gstd3d11vp8dec.h
+++ b/sys/d3d11/gstd3d11vp8dec.h
@@ -26,7 +26,6 @@ G_BEGIN_DECLS
void gst_d3d11_vp8_dec_register (GstPlugin * plugin,
GstD3D11Device * device,
- GstD3D11Decoder * decoder,
guint rank);
G_END_DECLS
diff --git a/sys/d3d11/gstd3d11vp9dec.cpp b/sys/d3d11/gstd3d11vp9dec.cpp
index a769c21d1..b2e3faf9d 100644
--- a/sys/d3d11/gstd3d11vp9dec.cpp
+++ b/sys/d3d11/gstd3d11vp9dec.cpp
@@ -70,6 +70,7 @@
#include <gst/codecs/gstvp9decoder.h>
#include <string.h>
+#include <vector>
/* HACK: to expose dxva data structure on UWP */
#ifdef WINAPI_PARTITION_DESKTOP
@@ -85,17 +86,30 @@ GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_vp9_dec_debug);
/* reference list 8 + 4 margin */
#define NUM_OUTPUT_VIEW 12
-typedef struct _GstD3D11Vp9Dec
+/* *INDENT-OFF* */
+typedef struct _GstD3D11Vp9DecInner
{
- GstVp9Decoder parent;
+ GstD3D11Device *device = nullptr;
+ GstD3D11Decoder *d3d11_decoder = nullptr;
+
+ DXVA_PicParams_VP9 pic_params;
+ DXVA_Slice_VPx_Short slice;
- GstD3D11Device *device;
- GstD3D11Decoder *d3d11_decoder;
+ /* In case of VP9, there's only one slice per picture so we don't
+ * need this bitstream buffer, but this will be used for 128 bytes alignment */
+ std::vector<guint8> bitstream_buffer;
/* To calculate use_prev_in_find_mv_refs */
- guint last_frame_width;
- guint last_frame_height;
- gboolean last_show_frame;
+ guint last_frame_width = 0;
+ guint last_frame_height = 0;
+ gboolean last_show_frame = FALSE;
+} GstD3D11Vp9DecInner;
+/* *INDENT-ON* */
+
+typedef struct _GstD3D11Vp9Dec
+{
+ GstVp9Decoder parent;
+ GstD3D11Vp9DecInner *inner;
} GstD3D11Vp9Dec;
typedef struct _GstD3D11Vp9DecClass
@@ -112,6 +126,7 @@ static GstElementClass *parent_class = NULL;
static void gst_d3d11_vp9_dec_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
+static void gst_d3d11_vp9_dec_finalize (GObject * object);
static void gst_d3d11_vp9_dec_set_context (GstElement * element,
GstContext * context);
@@ -132,14 +147,14 @@ static gboolean gst_d3d11_vp9_dec_new_picture (GstVp9Decoder * decoder,
GstVideoCodecFrame * frame, GstVp9Picture * picture);
static GstVp9Picture *gst_d3d11_vp9_dec_duplicate_picture (GstVp9Decoder *
decoder, GstVideoCodecFrame * frame, GstVp9Picture * picture);
-static GstFlowReturn gst_d3d11_vp9_dec_output_picture (GstVp9Decoder *
- decoder, GstVideoCodecFrame * frame, GstVp9Picture * picture);
static gboolean gst_d3d11_vp9_dec_start_picture (GstVp9Decoder * decoder,
GstVp9Picture * picture);
static gboolean gst_d3d11_vp9_dec_decode_picture (GstVp9Decoder * decoder,
GstVp9Picture * picture, GstVp9Dpb * dpb);
static gboolean gst_d3d11_vp9_dec_end_picture (GstVp9Decoder * decoder,
GstVp9Picture * picture);
+static GstFlowReturn gst_d3d11_vp9_dec_output_picture (GstVp9Decoder *
+ decoder, GstVideoCodecFrame * frame, GstVp9Picture * picture);
static void
gst_d3d11_vp9_dec_class_init (GstD3D11Vp9DecClass * klass, gpointer data)
@@ -151,6 +166,7 @@ gst_d3d11_vp9_dec_class_init (GstD3D11Vp9DecClass * klass, gpointer data)
GstD3D11DecoderClassData *cdata = (GstD3D11DecoderClassData *) data;
gobject_class->get_property = gst_d3d11_vp9_dec_get_property;
+ gobject_class->finalize = gst_d3d11_vp9_dec_finalize;
element_class->set_context =
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_set_context);
@@ -174,19 +190,20 @@ gst_d3d11_vp9_dec_class_init (GstD3D11Vp9DecClass * klass, gpointer data)
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_new_picture);
vp9decoder_class->duplicate_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_duplicate_picture);
- vp9decoder_class->output_picture =
- GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_output_picture);
vp9decoder_class->start_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_start_picture);
vp9decoder_class->decode_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_decode_picture);
vp9decoder_class->end_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_end_picture);
+ vp9decoder_class->output_picture =
+ GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_output_picture);
}
static void
gst_d3d11_vp9_dec_init (GstD3D11Vp9Dec * self)
{
+ self->inner = new GstD3D11Vp9DecInner ();
}
static void
@@ -200,14 +217,25 @@ gst_d3d11_vp9_dec_get_property (GObject * object, guint prop_id,
}
static void
+gst_d3d11_vp9_dec_finalize (GObject * object)
+{
+ GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (object);
+
+ delete self->inner;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
gst_d3d11_vp9_dec_set_context (GstElement * element, GstContext * context)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (element);
+ GstD3D11Vp9DecInner *inner = self->inner;
GstD3D11Vp9DecClass *klass = GST_D3D11_VP9_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
gst_d3d11_handle_set_context (element, context, cdata->adapter,
- &self->device);
+ &inner->device);
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
}
@@ -216,20 +244,22 @@ static gboolean
gst_d3d11_vp9_dec_open (GstVideoDecoder * decoder)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
+ GstD3D11Vp9DecInner *inner = self->inner;
GstD3D11Vp9DecClass *klass = GST_D3D11_VP9_DEC_GET_CLASS (self);
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
if (!gst_d3d11_ensure_element_data (GST_ELEMENT_CAST (self), cdata->adapter,
- &self->device)) {
+ &inner->device)) {
GST_ERROR_OBJECT (self, "Cannot create d3d11device");
return FALSE;
}
- self->d3d11_decoder = gst_d3d11_decoder_new (self->device);
+ inner->d3d11_decoder = gst_d3d11_decoder_new (inner->device,
+ GST_DXVA_CODEC_VP9);
- if (!self->d3d11_decoder) {
+ if (!inner->d3d11_decoder) {
GST_ERROR_OBJECT (self, "Cannot create d3d11 decoder");
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->device);
return FALSE;
}
@@ -240,9 +270,10 @@ static gboolean
gst_d3d11_vp9_dec_close (GstVideoDecoder * decoder)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
+ GstD3D11Vp9DecInner *inner = self->inner;
- gst_clear_object (&self->d3d11_decoder);
- gst_clear_object (&self->device);
+ gst_clear_object (&inner->d3d11_decoder);
+ gst_clear_object (&inner->device);
return TRUE;
}
@@ -251,8 +282,9 @@ static gboolean
gst_d3d11_vp9_dec_negotiate (GstVideoDecoder * decoder)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
+ GstD3D11Vp9DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_negotiate (self->d3d11_decoder, decoder))
+ if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
return FALSE;
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
@@ -263,8 +295,9 @@ gst_d3d11_vp9_dec_decide_allocation (GstVideoDecoder * decoder,
GstQuery * query)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
+ GstD3D11Vp9DecInner *inner = self->inner;
- if (!gst_d3d11_decoder_decide_allocation (self->d3d11_decoder,
+ if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder,
decoder, query)) {
return FALSE;
}
@@ -277,11 +310,12 @@ static gboolean
gst_d3d11_vp9_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
+ GstD3D11Vp9DecInner *inner = self->inner;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
- query, self->device)) {
+ query, inner->device)) {
return TRUE;
}
break;
@@ -296,15 +330,16 @@ static gboolean
gst_d3d11_vp9_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
+ GstD3D11Vp9DecInner *inner = self->inner;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, TRUE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
break;
case GST_EVENT_FLUSH_STOP:
- if (self->d3d11_decoder)
- gst_d3d11_decoder_set_flushing (self->d3d11_decoder, decoder, FALSE);
+ if (inner->d3d11_decoder)
+ gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
default:
break;
}
@@ -317,6 +352,7 @@ gst_d3d11_vp9_dec_new_sequence (GstVp9Decoder * decoder,
const GstVp9FrameHeader * frame_hdr)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
+ GstD3D11Vp9DecInner *inner = self->inner;
GstVideoInfo info;
GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
@@ -335,7 +371,7 @@ gst_d3d11_vp9_dec_new_sequence (GstVp9Decoder * decoder,
gst_video_info_set_format (&info,
out_format, frame_hdr->width, frame_hdr->height);
- if (!gst_d3d11_decoder_configure (self->d3d11_decoder, GST_D3D11_CODEC_VP9,
+ if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
decoder->input_state, &info, (gint) frame_hdr->width,
(gint) frame_hdr->height, NUM_OUTPUT_VIEW)) {
GST_ERROR_OBJECT (self, "Failed to create decoder");
@@ -348,7 +384,8 @@ gst_d3d11_vp9_dec_new_sequence (GstVp9Decoder * decoder,
}
/* Will be updated per decode_picture */
- self->last_frame_width = self->last_frame_height = 0;
+ inner->last_frame_width = inner->last_frame_height = 0;
+ inner->last_show_frame = FALSE;
return TRUE;
}
@@ -358,9 +395,10 @@ gst_d3d11_vp9_dec_new_picture (GstVp9Decoder * decoder,
GstVideoCodecFrame * frame, GstVp9Picture * picture)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
+ GstD3D11Vp9DecInner *inner = self->inner;
GstBuffer *view_buffer;
- view_buffer = gst_d3d11_decoder_get_output_view_buffer (self->d3d11_decoder,
+ view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
@@ -382,11 +420,12 @@ gst_d3d11_vp9_dec_duplicate_picture (GstVp9Decoder * decoder,
GstVideoCodecFrame * frame, GstVp9Picture * picture)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
+ GstD3D11Vp9DecInner *inner = self->inner;
GstBuffer *view_buffer;
GstVp9Picture *new_picture;
/* This method is called when show_frame == FALSE */
- self->last_show_frame = FALSE;
+ inner->last_show_frame = FALSE;
view_buffer = (GstBuffer *) gst_vp9_picture_get_user_data (picture);
@@ -407,45 +446,23 @@ gst_d3d11_vp9_dec_duplicate_picture (GstVp9Decoder * decoder,
return new_picture;
}
-static GstFlowReturn
-gst_d3d11_vp9_dec_output_picture (GstVp9Decoder * decoder,
- GstVideoCodecFrame * frame, GstVp9Picture * picture)
+static gboolean
+gst_d3d11_vp9_dec_start_picture (GstVp9Decoder * decoder,
+ GstVp9Picture * picture)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
- GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
- GstBuffer *view_buffer;
-
- GST_LOG_OBJECT (self, "Outputting picture %p", picture);
-
- view_buffer = (GstBuffer *) gst_vp9_picture_get_user_data (picture);
-
- if (!view_buffer) {
- GST_ERROR_OBJECT (self, "Could not get output view");
- goto error;
- }
-
- if (!gst_d3d11_decoder_process_output (self->d3d11_decoder, vdec,
- picture->frame_hdr.width, picture->frame_hdr.height, view_buffer,
- &frame->output_buffer)) {
- GST_ERROR_OBJECT (self, "Failed to copy buffer");
- goto error;
- }
+ GstD3D11Vp9DecInner *inner = self->inner;
- gst_vp9_picture_unref (picture);
-
- return gst_video_decoder_finish_frame (vdec, frame);
+ inner->bitstream_buffer.resize (0);
-error:
- gst_vp9_picture_unref (picture);
- gst_video_decoder_drop_frame (vdec, frame);
-
- return GST_FLOW_ERROR;
+ return TRUE;
}
static ID3D11VideoDecoderOutputView *
gst_d3d11_vp9_dec_get_output_view_from_picture (GstD3D11Vp9Dec * self,
GstVp9Picture * picture, guint8 * view_id)
{
+ GstD3D11Vp9DecInner *inner = self->inner;
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
@@ -456,7 +473,7 @@ gst_d3d11_vp9_dec_get_output_view_from_picture (GstD3D11Vp9Dec * self,
}
view =
- gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
+ gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
@@ -466,29 +483,6 @@ gst_d3d11_vp9_dec_get_output_view_from_picture (GstD3D11Vp9Dec * self,
return view;
}
-static gboolean
-gst_d3d11_vp9_dec_start_picture (GstVp9Decoder * decoder,
- GstVp9Picture * picture)
-{
- GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
- ID3D11VideoDecoderOutputView *view;
-
- view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, picture, NULL);
- if (!view) {
- GST_ERROR_OBJECT (self, "current picture does not have output view handle");
- return FALSE;
- }
-
- GST_TRACE_OBJECT (self, "Begin frame");
-
- if (!gst_d3d11_decoder_begin_frame (self->d3d11_decoder, view, 0, NULL)) {
- GST_ERROR_OBJECT (self, "Failed to begin frame");
- return FALSE;
- }
-
- return TRUE;
-}
-
static void
gst_d3d11_vp9_dec_copy_frame_params (GstD3D11Vp9Dec * self,
GstVp9Picture * picture, DXVA_PicParams_VP9 * params)
@@ -575,6 +569,7 @@ static void
gst_d3d11_vp9_dec_copy_loop_filter_params (GstD3D11Vp9Dec * self,
GstVp9Picture * picture, DXVA_PicParams_VP9 * params)
{
+ GstD3D11Vp9DecInner *inner = self->inner;
const GstVp9FrameHeader *frame_hdr = &picture->frame_hdr;
const GstVp9LoopFilterParams *lfp = &frame_hdr->loop_filter_params;
@@ -583,9 +578,9 @@ gst_d3d11_vp9_dec_copy_loop_filter_params (GstD3D11Vp9Dec * self,
params->mode_ref_delta_enabled = lfp->loop_filter_delta_enabled;
params->mode_ref_delta_update = lfp->loop_filter_delta_update;
params->use_prev_in_find_mv_refs =
- self->last_show_frame &&
- frame_hdr->width == self->last_frame_width &&
- frame_hdr->height == self->last_frame_height &&
+ inner->last_show_frame &&
+ frame_hdr->width == inner->last_frame_width &&
+ frame_hdr->height == inner->last_frame_height &&
!frame_hdr->error_resilient_mode &&
!(frame_hdr->frame_type == GST_VP9_KEY_FRAME || frame_hdr->intra_only);
@@ -664,239 +659,141 @@ gst_d3d11_vp9_dec_copy_segmentation_params (GstD3D11Vp9Dec * self,
}
static gboolean
-gst_d3d11_vp9_dec_submit_picture_data (GstD3D11Vp9Dec * self,
- GstVp9Picture * picture, DXVA_PicParams_VP9 * params)
+gst_d3d11_vp9_dec_decode_picture (GstVp9Decoder * decoder,
+ GstVp9Picture * picture, GstVp9Dpb * dpb)
{
- guint d3d11_buffer_size;
- gpointer d3d11_buffer;
- gsize buffer_offset = 0;
- gboolean is_first = TRUE;
-
- GST_TRACE_OBJECT (self, "Getting picture params buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS, &d3d11_buffer_size,
- &d3d11_buffer)) {
- GST_ERROR_OBJECT (self,
- "Failed to get decoder buffer for picture parameters");
- return FALSE;
- }
-
- memcpy (d3d11_buffer, params, sizeof (DXVA_PicParams_VP9));
-
- GST_TRACE_OBJECT (self, "Release picture param decoder buffer");
-
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS)) {
- GST_ERROR_OBJECT (self, "Failed to release decoder buffer");
- return FALSE;
- }
+ GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
+ GstD3D11Vp9DecInner *inner = self->inner;
+ DXVA_PicParams_VP9 *pic_params = &inner->pic_params;
+ DXVA_Slice_VPx_Short *slice = &inner->slice;
+ ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
- if (!picture->data || !picture->size) {
- GST_ERROR_OBJECT (self, "No data to submit");
+ view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, picture,
+ &view_id);
+ if (!view) {
+ GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
}
- GST_TRACE_OBJECT (self, "Submit total %" G_GSIZE_FORMAT " bytes",
- picture->size);
-
- while (buffer_offset < picture->size) {
- gsize bytes_to_copy = picture->size - buffer_offset;
- gsize written_buffer_size;
- gboolean is_last = TRUE;
- DXVA_Slice_VPx_Short slice_short = { 0, };
- D3D11_VIDEO_DECODER_BUFFER_DESC buffer_desc[3];
- gboolean bad_aligned_bitstream_buffer = FALSE;
-
- memset (buffer_desc, 0, sizeof (buffer_desc));
-
- GST_TRACE_OBJECT (self, "Getting bitstream buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM, &d3d11_buffer_size,
- &d3d11_buffer)) {
- GST_ERROR_OBJECT (self, "Couldn't get bitstream buffer");
- goto error;
- }
-
- if ((d3d11_buffer_size & 127) != 0) {
- GST_WARNING_OBJECT (self,
- "The size of bitstream buffer is not 128 bytes aligned");
- bad_aligned_bitstream_buffer = TRUE;
- }
-
- if (bytes_to_copy > d3d11_buffer_size) {
- /* if the size of this slice is larger than the size of remaining d3d11
- * decoder bitstream memory, write the data up to the remaining d3d11
- * decoder bitstream memory size and the rest would be written to the
- * next d3d11 bitstream memory */
- bytes_to_copy = d3d11_buffer_size;
- is_last = FALSE;
- }
-
- memcpy (d3d11_buffer, picture->data + buffer_offset, bytes_to_copy);
- written_buffer_size = bytes_to_copy;
-
- /* DXVA2 spec is saying that written bitstream data must be 128 bytes
- * aligned if the bitstream buffer contains end of frame
- * (i.e., wBadSliceChopping == 0 or 2) */
- if (is_last) {
- guint padding = MIN (GST_ROUND_UP_128 (bytes_to_copy) - bytes_to_copy,
- d3d11_buffer_size - bytes_to_copy);
-
- if (padding) {
- GST_TRACE_OBJECT (self,
- "Written bitstream buffer size %" G_GSIZE_FORMAT
- " is not 128 bytes aligned, add padding %d bytes",
- bytes_to_copy, padding);
- memset ((guint8 *) d3d11_buffer + bytes_to_copy, 0, padding);
- written_buffer_size += padding;
- }
- }
-
- GST_TRACE_OBJECT (self, "Release bitstream buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_BITSTREAM)) {
- GST_ERROR_OBJECT (self, "Failed to release bitstream buffer");
-
- goto error;
- }
-
- slice_short.BSNALunitDataLocation = 0;
- slice_short.SliceBytesInBuffer = (UINT) written_buffer_size;
-
- /* wBadSliceChopping: (dxva spec.)
- * 0: All bits for the slice are located within the corresponding
- * bitstream data buffer
- * 1: The bitstream data buffer contains the start of the slice,
- * but not the entire slice, because the buffer is full
- * 2: The bitstream data buffer contains the end of the slice.
- * It does not contain the start of the slice, because the start of
- * the slice was located in the previous bitstream data buffer.
- * 3: The bitstream data buffer does not contain the start of the slice
- * (because the start of the slice was located in the previous
- * bitstream data buffer), and it does not contain the end of the slice
- * (because the current bitstream data buffer is also full).
- */
- if (is_last && is_first) {
- slice_short.wBadSliceChopping = 0;
- } else if (!is_last && is_first) {
- slice_short.wBadSliceChopping = 1;
- } else if (is_last && !is_first) {
- slice_short.wBadSliceChopping = 2;
- } else {
- slice_short.wBadSliceChopping = 3;
- }
-
- GST_TRACE_OBJECT (self, "Getting slice control buffer");
- if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL, &d3d11_buffer_size,
- &d3d11_buffer)) {
- GST_ERROR_OBJECT (self, "Couldn't get slice control buffer");
+ memset (pic_params, 0, sizeof (DXVA_PicParams_VP9));
- goto error;
- }
-
- memcpy (d3d11_buffer, &slice_short, sizeof (DXVA_Slice_VPx_Short));
-
- GST_TRACE_OBJECT (self, "Release slice control buffer");
- if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder,
- D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL)) {
- GST_ERROR_OBJECT (self, "Failed to release slice control buffer");
-
- goto error;
- }
-
- buffer_desc[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
- buffer_desc[0].DataOffset = 0;
- buffer_desc[0].DataSize = sizeof (DXVA_PicParams_VP9);
-
- buffer_desc[1].BufferType = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
- buffer_desc[1].DataOffset = 0;
- buffer_desc[1].DataSize = sizeof (DXVA_Slice_VPx_Short);
-
- if (!bad_aligned_bitstream_buffer && (written_buffer_size & 127) != 0) {
- GST_WARNING_OBJECT (self,
- "Written bitstream buffer size %" G_GSIZE_FORMAT
- " is not 128 bytes aligned", written_buffer_size);
- }
+ pic_params->CurrPic.Index7Bits = view_id;
+ pic_params->uncompressed_header_size_byte_aligned =
+ picture->frame_hdr.frame_header_length_in_bytes;
+ pic_params->first_partition_size = picture->frame_hdr.header_size_in_bytes;
+ pic_params->StatusReportFeedbackNumber = 1;
- buffer_desc[2].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
- buffer_desc[2].DataOffset = 0;
- buffer_desc[2].DataSize = written_buffer_size;
+ gst_d3d11_vp9_dec_copy_frame_params (self, picture, pic_params);
+ gst_d3d11_vp9_dec_copy_reference_frames (self, picture, dpb, pic_params);
+ gst_d3d11_vp9_dec_copy_frame_refs (self, picture, pic_params);
+ gst_d3d11_vp9_dec_copy_loop_filter_params (self, picture, pic_params);
+ gst_d3d11_vp9_dec_copy_quant_params (self, picture, pic_params);
+ gst_d3d11_vp9_dec_copy_segmentation_params (self, picture, pic_params);
- if (!gst_d3d11_decoder_submit_decoder_buffers (self->d3d11_decoder,
- 3, buffer_desc)) {
- GST_ERROR_OBJECT (self, "Couldn't submit decoder buffers");
- goto error;
- }
+ inner->bitstream_buffer.resize (picture->size);
+ memcpy (&inner->bitstream_buffer[0], picture->data, picture->size);
- buffer_offset += bytes_to_copy;
- is_first = FALSE;
- }
+ slice->BSNALunitDataLocation = 0;
+ slice->SliceBytesInBuffer = inner->bitstream_buffer.size ();
+ slice->wBadSliceChopping = 0;
- self->last_frame_width = params->width;
- self->last_frame_height = params->height;
- self->last_show_frame = TRUE;
+ inner->last_frame_width = pic_params->width;
+ inner->last_frame_height = pic_params->height;
+ inner->last_show_frame = TRUE;
return TRUE;
-
-error:
- return FALSE;
}
static gboolean
-gst_d3d11_vp9_dec_decode_picture (GstVp9Decoder * decoder,
- GstVp9Picture * picture, GstVp9Dpb * dpb)
+gst_d3d11_vp9_dec_end_picture (GstVp9Decoder * decoder, GstVp9Picture * picture)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
- DXVA_PicParams_VP9 pic_params = { 0, };
+ GstD3D11Vp9DecInner *inner = self->inner;
ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
+ size_t bitstream_buffer_size;
+ size_t bitstream_pos;
+ GstD3D11DecodeInputStreamArgs input_args;
- view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, picture,
- &view_id);
+ if (inner->bitstream_buffer.empty ()) {
+ GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
+ return FALSE;
+ }
+
+ view = gst_d3d11_vp9_dec_get_output_view_from_picture (self,
+ picture, &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
}
- pic_params.CurrPic.Index7Bits = view_id;
- pic_params.uncompressed_header_size_byte_aligned =
- picture->frame_hdr.frame_header_length_in_bytes;
- pic_params.first_partition_size = picture->frame_hdr.header_size_in_bytes;
- pic_params.StatusReportFeedbackNumber = 1;
+ memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
- gst_d3d11_vp9_dec_copy_frame_params (self, picture, &pic_params);
- gst_d3d11_vp9_dec_copy_reference_frames (self, picture, dpb, &pic_params);
- gst_d3d11_vp9_dec_copy_frame_refs (self, picture, &pic_params);
- gst_d3d11_vp9_dec_copy_loop_filter_params (self, picture, &pic_params);
- gst_d3d11_vp9_dec_copy_quant_params (self, picture, &pic_params);
- gst_d3d11_vp9_dec_copy_segmentation_params (self, picture, &pic_params);
+ bitstream_pos = inner->bitstream_buffer.size ();
+ bitstream_buffer_size = GST_ROUND_UP_128 (bitstream_pos);
- return gst_d3d11_vp9_dec_submit_picture_data (self, picture, &pic_params);
+ if (bitstream_buffer_size > bitstream_pos) {
+ size_t padding = bitstream_buffer_size - bitstream_pos;
+
+ /* As per DXVA spec, total amount of bitstream buffer size should be
+ * 128 bytes aligned. If actual data is not multiple of 128 bytes,
+ * the last slice data needs to be zero-padded */
+ inner->bitstream_buffer.resize (bitstream_buffer_size, 0);
+
+ inner->slice.SliceBytesInBuffer += padding;
+ }
+
+ input_args.picture_params = &inner->pic_params;
+ input_args.picture_params_size = sizeof (DXVA_PicParams_VP9);
+ input_args.slice_control = &inner->slice;
+ input_args.slice_control_size = sizeof (DXVA_Slice_VPx_Short);
+ input_args.bitstream = &inner->bitstream_buffer[0];
+ input_args.bitstream_size = inner->bitstream_buffer.size ();
+
+ return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
+ view, &input_args);
}
-static gboolean
-gst_d3d11_vp9_dec_end_picture (GstVp9Decoder * decoder, GstVp9Picture * picture)
+static GstFlowReturn
+gst_d3d11_vp9_dec_output_picture (GstVp9Decoder * decoder,
+ GstVideoCodecFrame * frame, GstVp9Picture * picture)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
+ GstD3D11Vp9DecInner *inner = self->inner;
+ GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
+ GstBuffer *view_buffer;
- if (!gst_d3d11_decoder_end_frame (self->d3d11_decoder)) {
- GST_ERROR_OBJECT (self, "Failed to EndFrame");
- return FALSE;
+ GST_LOG_OBJECT (self, "Outputting picture %p", picture);
+
+ view_buffer = (GstBuffer *) gst_vp9_picture_get_user_data (picture);
+
+ if (!view_buffer) {
+ GST_ERROR_OBJECT (self, "Could not get output view");
+ goto error;
}
- return TRUE;
-}
+ if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
+ picture->frame_hdr.width, picture->frame_hdr.height, view_buffer,
+ &frame->output_buffer)) {
+ GST_ERROR_OBJECT (self, "Failed to copy buffer");
+ goto error;
+ }
-typedef struct
-{
- guint width;
- guint height;
-} GstD3D11Vp9DecResolution;
+ gst_vp9_picture_unref (picture);
+
+ return gst_video_decoder_finish_frame (vdec, frame);
+
+error:
+ gst_vp9_picture_unref (picture);
+ gst_video_decoder_release_frame (vdec, frame);
+
+ return GST_FLOW_ERROR;
+}
void
gst_d3d11_vp9_dec_register (GstPlugin * plugin, GstD3D11Device * device,
- GstD3D11Decoder * decoder, guint rank)
+ guint rank)
{
GType type;
gchar *type_name;
@@ -917,10 +814,6 @@ gst_d3d11_vp9_dec_register (GstPlugin * plugin, GstD3D11Device * device,
};
const GUID *profile2_guid = NULL;
const GUID *profile0_guid = NULL;
- /* values were taken from chromium. See supported_profile_helper.cc */
- GstD3D11Vp9DecResolution resolutions_to_check[] = {
- {4096, 2160}, {4096, 2304}, {7680, 4320}, {8192, 4320}, {8192, 8192}
- };
GstCaps *sink_caps = NULL;
GstCaps *src_caps = NULL;
guint max_width = 0;
@@ -931,27 +824,27 @@ gst_d3d11_vp9_dec_register (GstPlugin * plugin, GstD3D11Device * device,
DXGI_FORMAT format = DXGI_FORMAT_UNKNOWN;
GValue vp9_profiles = G_VALUE_INIT;
- have_profile2 = gst_d3d11_decoder_get_supported_decoder_profile (decoder,
- GST_D3D11_CODEC_VP9, GST_VIDEO_FORMAT_P010_10LE, &profile2_guid);
+ have_profile2 = gst_d3d11_decoder_get_supported_decoder_profile (device,
+ GST_DXVA_CODEC_VP9, GST_VIDEO_FORMAT_P010_10LE, &profile2_guid);
if (!have_profile2) {
GST_DEBUG_OBJECT (device,
"decoder does not support VP9_VLD_10BIT_PROFILE2");
} else {
have_profile2 &=
- gst_d3d11_decoder_supports_format (decoder,
+ gst_d3d11_decoder_supports_format (device,
profile2_guid, DXGI_FORMAT_P010);
if (!have_profile2) {
GST_FIXME_OBJECT (device, "device does not support P010 format");
}
}
- have_profile0 = gst_d3d11_decoder_get_supported_decoder_profile (decoder,
- GST_D3D11_CODEC_VP9, GST_VIDEO_FORMAT_NV12, &profile0_guid);
+ have_profile0 = gst_d3d11_decoder_get_supported_decoder_profile (device,
+ GST_DXVA_CODEC_VP9, GST_VIDEO_FORMAT_NV12, &profile0_guid);
if (!have_profile0) {
GST_DEBUG_OBJECT (device, "decoder does not support VP9_VLD_PROFILE0");
} else {
have_profile0 =
- gst_d3d11_decoder_supports_format (decoder, profile0_guid,
+ gst_d3d11_decoder_supports_format (device, profile0_guid,
DXGI_FORMAT_NV12);
if (!have_profile0) {
GST_FIXME_OBJECT (device, "device does not support NV12 format");
@@ -971,12 +864,12 @@ gst_d3d11_vp9_dec_register (GstPlugin * plugin, GstD3D11Device * device,
format = DXGI_FORMAT_P010;
}
- for (i = 0; i < G_N_ELEMENTS (resolutions_to_check); i++) {
- if (gst_d3d11_decoder_supports_resolution (decoder, profile,
- format, resolutions_to_check[i].width,
- resolutions_to_check[i].height)) {
- max_width = resolutions_to_check[i].width;
- max_height = resolutions_to_check[i].height;
+ for (i = 0; i < G_N_ELEMENTS (gst_dxva_resolutions); i++) {
+ if (gst_d3d11_decoder_supports_resolution (device, profile,
+ format, gst_dxva_resolutions[i].width,
+ gst_dxva_resolutions[i].height)) {
+ max_width = gst_dxva_resolutions[i].width;
+ max_height = gst_dxva_resolutions[i].height;
GST_DEBUG_OBJECT (device,
"device support resolution %dx%d", max_width, max_height);
@@ -1042,7 +935,7 @@ gst_d3d11_vp9_dec_register (GstPlugin * plugin, GstD3D11Device * device,
"height", GST_TYPE_INT_RANGE, 1, resolution, NULL);
type_info.class_data =
- gst_d3d11_decoder_class_data_new (device, GST_D3D11_CODEC_VP9,
+ gst_d3d11_decoder_class_data_new (device, GST_DXVA_CODEC_VP9,
sink_caps, src_caps);
type_name = g_strdup ("GstD3D11Vp9Dec");
diff --git a/sys/d3d11/gstd3d11vp9dec.h b/sys/d3d11/gstd3d11vp9dec.h
index ddc66c482..320d6959d 100644
--- a/sys/d3d11/gstd3d11vp9dec.h
+++ b/sys/d3d11/gstd3d11vp9dec.h
@@ -26,7 +26,6 @@ G_BEGIN_DECLS
void gst_d3d11_vp9_dec_register (GstPlugin * plugin,
GstD3D11Device * device,
- GstD3D11Decoder * decoder,
guint rank);
G_END_DECLS
diff --git a/sys/d3d11/plugin.cpp b/sys/d3d11/plugin.cpp
index 2a6f09400..bc41e3882 100644
--- a/sys/d3d11/plugin.cpp
+++ b/sys/d3d11/plugin.cpp
@@ -57,6 +57,7 @@ GST_DEBUG_CATEGORY (gst_d3d11_video_processor_debug);
GST_DEBUG_CATEGORY (gst_d3d11_compositor_debug);
#ifdef HAVE_DXVA_H
+GST_DEBUG_CATEGORY (gst_d3d11_decoder_debug);
GST_DEBUG_CATEGORY (gst_d3d11_h264_dec_debug);
GST_DEBUG_CATEGORY (gst_d3d11_h265_dec_debug);
GST_DEBUG_CATEGORY (gst_d3d11_vp9_dec_debug);
@@ -105,6 +106,8 @@ plugin_init (GstPlugin * plugin)
#ifdef HAVE_DXVA_H
/* DXVA2 API is availble since Windows 8 */
if (gst_d3d11_is_windows_8_or_greater ()) {
+ GST_DEBUG_CATEGORY_INIT (gst_d3d11_decoder_debug,
+ "d3d11decoder", 0, "Direct3D11 Video Decoder object");
GST_DEBUG_CATEGORY_INIT (gst_d3d11_h264_dec_debug,
"d3d11h264dec", 0, "Direct3D11 H.264 Video Decoder");
GST_DEBUG_CATEGORY_INIT (gst_d3d11_vp9_dec_debug,
@@ -145,38 +148,18 @@ plugin_init (GstPlugin * plugin)
#ifdef HAVE_DXVA_H
/* DXVA2 API is availble since Windows 8 */
- if (gst_d3d11_is_windows_8_or_greater ()) {
- GstD3D11Decoder *decoder = NULL;
- gboolean legacy;
- gboolean hardware;
-
- g_object_get (device, "hardware", &hardware, NULL);
- if (!hardware)
- goto done;
+ if (gst_d3d11_is_windows_8_or_greater () &&
+ gst_d3d11_device_get_video_device_handle (device)) {
+ gboolean legacy = gst_d3d11_decoder_util_is_legacy_device (device);
- decoder = gst_d3d11_decoder_new (device);
- if (!decoder)
- goto done;
-
- legacy = gst_d3d11_decoder_util_is_legacy_device (device);
-
- gst_d3d11_h264_dec_register (plugin,
- device, decoder, GST_RANK_SECONDARY, legacy);
+ gst_d3d11_h264_dec_register (plugin, device, GST_RANK_SECONDARY, legacy);
if (!legacy) {
- gst_d3d11_h265_dec_register (plugin, device, decoder,
- GST_RANK_SECONDARY);
- gst_d3d11_vp9_dec_register (plugin, device, decoder,
- GST_RANK_SECONDARY);
- gst_d3d11_vp8_dec_register (plugin, device, decoder,
- GST_RANK_SECONDARY);
- gst_d3d11_mpeg2_dec_register (plugin, device, decoder,
- GST_RANK_SECONDARY);
- gst_d3d11_av1_dec_register (plugin, device, decoder,
- GST_RANK_SECONDARY);
+ gst_d3d11_h265_dec_register (plugin, device, GST_RANK_SECONDARY);
+ gst_d3d11_vp9_dec_register (plugin, device, GST_RANK_SECONDARY);
+ gst_d3d11_vp8_dec_register (plugin, device, GST_RANK_SECONDARY);
+ gst_d3d11_mpeg2_dec_register (plugin, device, GST_RANK_SECONDARY);
+ gst_d3d11_av1_dec_register (plugin, device, GST_RANK_SECONDARY);
}
-
- done:
- gst_clear_object (&decoder);
}
#endif