Re-implementation of array-of-texture based on d3d11 memory pool.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/2097>
guint dpb_size;
guint downstream_min_buffers;
+ /* Used for array-of-texture */
+ guint8 next_view_id;
+
/* for staging */
ID3D11Texture2D *staging;
gsize staging_texture_offset[GST_VIDEO_MAX_PLANES];
return decoder->configured;
}
+static GQuark
+gst_d3d11_decoder_view_id_quark (void)
+{
+ static gsize id_quark = 0;
+
+ if (g_once_init_enter (&id_quark)) {
+ GQuark quark = g_quark_from_string ("GstD3D11DecoderViewId");
+ g_once_init_leave (&id_quark, quark);
+ }
+
+ return (GQuark) id_quark;
+}
+
static gboolean
gst_d3d11_decoder_ensure_output_view (GstD3D11Decoder * self,
GstBuffer * buffer)
{
GstD3D11Memory *mem;
+ gpointer val = NULL;
mem = (GstD3D11Memory *) gst_buffer_peek_memory (buffer, 0);
if (!gst_d3d11_memory_get_decoder_output_view (mem, self->video_device,
return FALSE;
}
+ if (!self->use_array_of_texture)
+ return TRUE;
+
+ val = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
+ gst_d3d11_decoder_view_id_quark ());
+ if (!val) {
+ g_assert (self->next_view_id < 128);
+ g_assert (self->next_view_id > 0);
+
+ gst_mini_object_set_qdata (GST_MINI_OBJECT (mem),
+ gst_d3d11_decoder_view_id_quark (),
+ GUINT_TO_POINTER (self->next_view_id), NULL);
+
+ self->next_view_id++;
+ /* valid view range is [0, 126], but 0 is not used to here
+ * (it's NULL as well) */
+ self->next_view_id %= 128;
+ if (self->next_view_id == 0)
+ self->next_view_id = 1;
+ }
+
+
return TRUE;
}
"(dpb size: %d, downstream min buffers: %d)", pool_size, self->dpb_size,
self->downstream_min_buffers);
- if (!self->use_array_of_texture)
+ if (!self->use_array_of_texture) {
alloc_params->desc[0].ArraySize = pool_size;
+ } else {
+ /* Valid view id is [0, 126], but we will use [1, 127] range so that
+ * it can be used by qdata, because zero is equal to null */
+ self->next_view_id = 1;
+
+ /* our pool size can be increased as much as possbile */
+ pool_size = 0;
+ }
+
gst_video_alignment_reset (&align);
align.padding_right = self->aligned_width - GST_VIDEO_INFO_WIDTH (info);
GST_DEBUG_OBJECT (self, "ConfigDecoderSpecific 0x%x",
best_config->ConfigDecoderSpecific);
- /* FIXME: Revisit this at some point.
- * Some 4K VP9 + super frame enabled streams would be broken with
- * this configuration (driver crash) on Intel and Nvidia
- */
-#if 0
/* bit 14 is equal to 1b means this config support array of texture and
* it's recommended type as per DXVA spec */
if ((best_config->ConfigDecoderSpecific & 0x4000) == 0x4000) {
GST_DEBUG_OBJECT (self, "Config support array of texture");
self->use_array_of_texture = TRUE;
}
-#endif
hr = video_device->CreateVideoDecoder (&decoder_desc,
best_config, &self->decoder_handle);
ID3D11VideoDecoderOutputView *
gst_d3d11_decoder_get_output_view_from_buffer (GstD3D11Decoder * decoder,
- GstBuffer * buffer)
+ GstBuffer * buffer, guint8 * index)
{
GstMemory *mem;
GstD3D11Memory *dmem;
return NULL;
}
- return view;
-}
-
-guint8
-gst_d3d11_decoder_get_output_view_index (ID3D11VideoDecoderOutputView *
- view_handle)
-{
- D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC view_desc;
+ if (index) {
+ if (decoder->use_array_of_texture) {
+ guint8 id;
+ gpointer val = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
+ gst_d3d11_decoder_view_id_quark ());
+ if (!val) {
+ GST_ERROR_OBJECT (decoder, "memory has no qdata");
+ return NULL;
+ }
- g_return_val_if_fail (view_handle != NULL, 0xff);
+ id = (guint8) GPOINTER_TO_UINT (val);
+ g_assert (id < 128);
- view_handle->GetDesc (&view_desc);
+ *index = (id - 1);
+ } else {
+ *index = gst_d3d11_memory_get_subresource_index (dmem);
+ }
+ }
- return view_desc.Texture2D.ArraySlice;
+ return view;
}
static gboolean
gst_d3d11_decoder_can_direct_render (GstD3D11Decoder * decoder,
GstBuffer * view_buffer, GstMiniObject * picture)
{
- return FALSE;
+ g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
+
+ if (!decoder->can_direct_rendering || !decoder->downstream_supports_d3d11 ||
+ !decoder->use_array_of_texture)
+ return FALSE;
+
+ return TRUE;
}
/* Keep sync with chromium and keep in sorted order.
GstVideoDecoder * videodec);
ID3D11VideoDecoderOutputView * gst_d3d11_decoder_get_output_view_from_buffer (GstD3D11Decoder * decoder,
- GstBuffer * buffer);
-
-guint8 gst_d3d11_decoder_get_output_view_index (ID3D11VideoDecoderOutputView * view_handle);
+ GstBuffer * buffer,
+ guint8 * view_id);
gboolean gst_d3d11_decoder_process_output (GstD3D11Decoder * decoder,
GstVideoInfo * info,
static ID3D11VideoDecoderOutputView *
gst_d3d11_h264_dec_get_output_view_from_picture (GstD3D11H264Dec * self,
- GstH264Picture * picture)
+ GstH264Picture * picture, guint8 * view_id)
{
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
}
view = gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
- view_buffer);
+ view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL;
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
GArray *dpb_array;
GstH264SPS *sps;
GstH264PPS *pps;
sps = pps->sequence;
g_assert (sps != NULL);
- view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture);
+ view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture,
+ &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
for (i = dpb_array->len - 1, j = 0; i >= 0 && j < 16; i--) {
GstH264Picture *other = g_array_index (dpb_array, GstH264Picture *, i);
- ID3D11VideoDecoderOutputView *other_view;
- gint id = 0xff;
+ guint8 id = 0xff;
if (!GST_H264_PICTURE_IS_REF (other))
continue;
if (other->second_field)
continue;
- other_view = gst_d3d11_h264_dec_get_output_view_from_picture (self, other);
-
- if (other_view)
- id = gst_d3d11_decoder_get_output_view_index (other_view);
-
+ gst_d3d11_h264_dec_get_output_view_from_picture (self, other, &id);
self->ref_frame_list[j].Index7Bits = id;
if (GST_H264_PICTURE_IS_LONG_TERM_REF (other)) {
gst_d3d11_h264_dec_fill_picture_params (self, &slice->header, &pic_params);
- pic_params.CurrPic.Index7Bits =
- gst_d3d11_decoder_get_output_view_index (view);
+ pic_params.CurrPic.Index7Bits = view_id;
pic_params.RefPicFlag = GST_H264_PICTURE_IS_REF (picture);
pic_params.frame_num = picture->frame_num;
static ID3D11VideoDecoderOutputView *
gst_d3d11_h265_dec_get_output_view_from_picture (GstD3D11H265Dec * self,
- GstH265Picture * picture)
+ GstH265Picture * picture, guint8 * view_id)
{
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
}
view = gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
- view_buffer);
+ view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL;
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
guint i, j;
GArray *dpb_array;
GstH265SPS *sps;
sps = pps->sps;
g_assert (sps != NULL);
- view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture);
+ view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture,
+ &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
for (i = 0; i < dpb_array->len && i < G_N_ELEMENTS (self->ref_pic_list); i++) {
GstH265Picture *other = g_array_index (dpb_array, GstH265Picture *, i);
- ID3D11VideoDecoderOutputView *other_view;
- gint id = 0xff;
+ guint8 id = 0xff;
if (!other->ref) {
GST_LOG_OBJECT (self, "%dth picture in dpb is not reference, skip", i);
continue;
}
- other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self, other);
-
- if (other_view)
- id = gst_d3d11_decoder_get_output_view_index (other_view);
-
+ gst_d3d11_h265_dec_get_output_view_from_picture (self, other, &id);
self->ref_pic_list[i].Index7Bits = id;
self->ref_pic_list[i].AssociatedFlag = other->long_term;
self->pic_order_cnt_val_list[i] = other->pic_order_cnt;
for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_st_curr_before); i++) {
GstH265Picture *other = NULL;
- gint id = 0xff;
+ guint8 other_view_id = 0xff;
+ guint8 id = 0xff;
while (other == NULL && j < decoder->NumPocStCurrBefore)
other = decoder->RefPicSetStCurrBefore[j++];
if (other) {
ID3D11VideoDecoderOutputView *other_view;
- other_view =
- gst_d3d11_h265_dec_get_output_view_from_picture (self, other);
+ other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
+ other, &other_view_id);
- if (other_view) {
- id = gst_d3d11_h265_dec_get_ref_index (self,
- gst_d3d11_decoder_get_output_view_index (other_view));
- }
+ if (other_view)
+ id = gst_d3d11_h265_dec_get_ref_index (self, other_view_id);
}
self->ref_pic_set_st_curr_before[i] = id;
for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_st_curr_after); i++) {
GstH265Picture *other = NULL;
- gint id = 0xff;
+ guint8 other_view_id = 0xff;
+ guint8 id = 0xff;
while (other == NULL && j < decoder->NumPocStCurrAfter)
other = decoder->RefPicSetStCurrAfter[j++];
if (other) {
ID3D11VideoDecoderOutputView *other_view;
- other_view =
- gst_d3d11_h265_dec_get_output_view_from_picture (self, other);
+ other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
+ other, &other_view_id);
- if (other_view) {
- id = gst_d3d11_h265_dec_get_ref_index (self,
- gst_d3d11_decoder_get_output_view_index (other_view));
- }
+ if (other_view)
+ id = gst_d3d11_h265_dec_get_ref_index (self, other_view_id);
}
self->ref_pic_set_st_curr_after[i] = id;
for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_lt_curr); i++) {
GstH265Picture *other = NULL;
- gint id = 0xff;
+ guint8 other_view_id = 0xff;
+ guint8 id = 0xff;
while (other == NULL && j < decoder->NumPocLtCurr)
other = decoder->RefPicSetLtCurr[j++];
if (other) {
ID3D11VideoDecoderOutputView *other_view;
- other_view =
- gst_d3d11_h265_dec_get_output_view_from_picture (self, other);
+ other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
+ other, &other_view_id);
- if (other_view) {
- id = gst_d3d11_h265_dec_get_ref_index (self,
- gst_d3d11_decoder_get_output_view_index (other_view));
- }
+ if (other_view)
+ id = gst_d3d11_h265_dec_get_ref_index (self, other_view_id);
}
self->ref_pic_set_lt_curr[i] = id;
}
- view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture);
-
- if (!view) {
- GST_ERROR_OBJECT (self, "current picture does not have output view");
- return FALSE;
- }
-
gst_d3d11_h265_dec_fill_picture_params (self, &slice->header, &pic_params);
- pic_params.CurrPic.Index7Bits =
- gst_d3d11_decoder_get_output_view_index (view);
+ pic_params.CurrPic.Index7Bits = view_id;
pic_params.IrapPicFlag = GST_H265_IS_NAL_TYPE_IRAP (slice->nalu.type);
pic_params.IdrPicFlag = GST_H265_IS_NAL_TYPE_IDR (slice->nalu.type);
pic_params.IntraPicFlag = GST_H265_IS_NAL_TYPE_IRAP (slice->nalu.type);
static ID3D11VideoDecoderOutputView *
gst_d3d11_mpeg2_dec_get_output_view_from_picture (GstD3D11Mpeg2Dec * self,
- GstMpeg2Picture * picture)
+ GstMpeg2Picture * picture, guint8 * view_id)
{
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
view =
gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
- view_buffer);
+ view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL;
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
ID3D11VideoDecoderOutputView *view;
ID3D11VideoDecoderOutputView *other_view;
+ guint8 view_id = 0xff;
+ guint8 other_view_id = 0xff;
DXVA_PictureParameters pic_params = { 0, };
DXVA_QmatrixData iq_matrix = { 0, };
guint d3d11_buffer_size = 0;
gboolean is_field =
picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
- view = gst_d3d11_mpeg2_dec_get_output_view_from_picture (self, picture);
+ view = gst_d3d11_mpeg2_dec_get_output_view_from_picture (self, picture,
+ &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
}
/* Fill DXVA_PictureParameters */
- pic_params.wDecodedPictureIndex =
- gst_d3d11_decoder_get_output_view_index (view);
+ pic_params.wDecodedPictureIndex = view_id;
pic_params.wForwardRefPictureIndex = 0xffff;
pic_params.wBackwardRefPictureIndex = 0xffff;
if (next_picture) {
other_view =
gst_d3d11_mpeg2_dec_get_output_view_from_picture (self,
- next_picture);
+ next_picture, &other_view_id);
if (other_view)
- pic_params.wBackwardRefPictureIndex =
- gst_d3d11_decoder_get_output_view_index (other_view);
+ pic_params.wBackwardRefPictureIndex = other_view_id;
}
}
/* fall-through */
if (prev_picture) {
other_view =
gst_d3d11_mpeg2_dec_get_output_view_from_picture (self,
- prev_picture);
+ prev_picture, &other_view_id);
if (other_view)
- pic_params.wForwardRefPictureIndex =
- gst_d3d11_decoder_get_output_view_index (other_view);
+ pic_params.wForwardRefPictureIndex = other_view_id;
}
}
default:
static ID3D11VideoDecoderOutputView *
gst_d3d11_vp8_dec_get_output_view_from_picture (GstD3D11Vp8Dec * self,
- GstVp8Picture * picture)
+ GstVp8Picture * picture, guint8 * view_id)
{
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
view =
gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
- view_buffer);
+ view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL;
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
ID3D11VideoDecoderOutputView *view;
- view = gst_d3d11_vp8_dec_get_output_view_from_picture (self, picture);
+ view = gst_d3d11_vp8_dec_get_output_view_from_picture (self, picture, NULL);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
{
GstVp8Decoder *decoder = GST_VP8_DECODER (self);
ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
if (decoder->alt_ref_picture) {
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
- decoder->alt_ref_picture);
+ decoder->alt_ref_picture, &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "picture does not have output view handle");
return;
}
- params->alt_fb_idx.Index7Bits =
- gst_d3d11_decoder_get_output_view_index (view);
+ params->alt_fb_idx.Index7Bits = view_id;
} else {
params->alt_fb_idx.bPicEntry = 0xff;
}
if (decoder->golden_ref_picture) {
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
- decoder->golden_ref_picture);
+ decoder->golden_ref_picture, &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "picture does not have output view handle");
return;
}
- params->gld_fb_idx.Index7Bits =
- gst_d3d11_decoder_get_output_view_index (view);
+ params->gld_fb_idx.Index7Bits = view_id;
} else {
params->gld_fb_idx.bPicEntry = 0xff;
}
if (decoder->last_picture) {
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
- decoder->last_picture);
+ decoder->last_picture, &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "picture does not have output view handle");
return;
}
- params->lst_fb_idx.Index7Bits =
- gst_d3d11_decoder_get_output_view_index (view);
+ params->lst_fb_idx.Index7Bits = view_id;
} else {
params->lst_fb_idx.bPicEntry = 0xff;
}
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
DXVA_PicParams_VP8 pic_params = { 0, };
ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
const GstVp8FrameHdr *frame_hdr = &picture->frame_hdr;
- view = gst_d3d11_vp8_dec_get_output_view_from_picture (self, picture);
+ view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
+ picture, &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
pic_params.first_part_size = frame_hdr->first_part_size;
pic_params.width = self->width;
pic_params.height = self->height;
- pic_params.CurrPic.Index7Bits =
- gst_d3d11_decoder_get_output_view_index (view);
+ pic_params.CurrPic.Index7Bits = view_id;
pic_params.StatusReportFeedbackNumber = 1;
gst_d3d11_vp8_dec_copy_frame_params (self, picture, parser, &pic_params);
static ID3D11VideoDecoderOutputView *
gst_d3d11_vp9_dec_get_output_view_from_picture (GstD3D11Vp9Dec * self,
- GstVp9Picture * picture)
+ GstVp9Picture * picture, guint8 * view_id)
{
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
view =
gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
- view_buffer);
+ view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL;
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
ID3D11VideoDecoderOutputView *view;
- view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, picture);
+ view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, picture, NULL);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
if (dpb->pic_list[i]) {
GstVp9Picture *other_pic = dpb->pic_list[i];
ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
- view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, other_pic);
+ view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, other_pic,
+ &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "picture does not have output view handle");
return;
}
- params->ref_frame_map[i].Index7Bits =
- gst_d3d11_decoder_get_output_view_index (view);
+ params->ref_frame_map[i].Index7Bits = view_id;
params->ref_frame_coded_width[i] = picture->frame_hdr.width;
params->ref_frame_coded_height[i] = picture->frame_hdr.height;
} else {
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
DXVA_PicParams_VP9 pic_params = { 0, };
ID3D11VideoDecoderOutputView *view;
+ guint8 view_id = 0xff;
- view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, picture);
+ view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, picture,
+ &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
}
- pic_params.CurrPic.Index7Bits =
- gst_d3d11_decoder_get_output_view_index (view);
+ pic_params.CurrPic.Index7Bits = view_id;
pic_params.uncompressed_header_size_byte_aligned =
picture->frame_hdr.frame_header_length_in_bytes;
pic_params.first_partition_size = picture->frame_hdr.first_partition_size;