2 * Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include "gstqsvdecoder.h"
25 #include <mfxvideo++.h>
29 #include "gstqsvallocator_d3d11.h"
34 using namespace Microsoft::WRL;
37 #include "gstqsvallocator_va.h"
38 #endif /* G_OS_WIN32 */
40 GST_DEBUG_CATEGORY_STATIC (gst_qsv_decoder_debug);
41 #define GST_CAT_DEFAULT gst_qsv_decoder_debug
43 #define GST_QSV_DECODER_FLOW_NEW_SEQUENCE GST_FLOW_CUSTOM_SUCCESS_1
52 typedef struct _GstQsvDecoderSurface
54 mfxFrameSurface1 surface;
56 /* mfxFrameSurface1:Data:MemId */
59 } GstQsvDecoderSurface;
61 typedef struct _GstQsvDecoderTask
63 mfxSyncPoint sync_point;
65 /* without ownership */
66 GstQsvDecoderSurface *surface;
69 struct _GstQsvDecoderPrivate
73 GstVideoCodecState *input_state;
74 GstVideoCodecState *output_state;
75 GstQsvAllocator *allocator;
77 GstBufferPool *internal_pool;
80 GstVideoInfo aligned_info;
83 mfxVideoParam video_param;
85 /* holding allocated GstQsvFrame, should be cleared via
86 * mfxFrameAllocator::Free() */
87 mfxFrameAllocResponse response;
89 MFXVideoDECODE *decoder;
90 GstQsvMemoryType mem_type;
91 gboolean can_direct_render;
95 /* Array of GstQsvDecoderSurface */
98 /* Array of GstQsvDecoderTask */
100 guint next_task_index;
103 #define gst_qsv_decoder_parent_class parent_class
104 G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstQsvDecoder, gst_qsv_decoder,
105 GST_TYPE_VIDEO_DECODER, G_ADD_PRIVATE (GstQsvDecoder);
106 GST_DEBUG_CATEGORY_INIT (gst_qsv_decoder_debug,
107 "qsvdecoder", 0, "qsvdecoder"));
109 static void gst_qsv_decoder_get_property (GObject * object, guint prop_id,
110 GValue * value, GParamSpec * pspec);
111 static void gst_qsv_decoder_dispose (GObject * object);
112 static void gst_qsv_decoder_finalize (GObject * object);
114 static void gst_qsv_decoder_set_context (GstElement * element,
115 GstContext * context);
117 static gboolean gst_qsv_decoder_open (GstVideoDecoder * decoder);
118 static gboolean gst_qsv_decoder_stop (GstVideoDecoder * decoder);
119 static gboolean gst_qsv_decoder_close (GstVideoDecoder * decoder);
120 static gboolean gst_qsv_decoder_set_format (GstVideoDecoder * decoder,
121 GstVideoCodecState * state);
122 static gboolean gst_qsv_decoder_negotiate (GstVideoDecoder * decoder);
123 static gboolean gst_qsv_decoder_decide_allocation (GstVideoDecoder * decoder,
125 static gboolean gst_qsv_decoder_sink_query (GstVideoDecoder * decoder,
127 static gboolean gst_qsv_decoder_src_query (GstVideoDecoder * decoder,
129 static GstFlowReturn gst_qsv_decoder_handle_frame (GstVideoDecoder * decoder,
130 GstVideoCodecFrame * frame);
131 static gboolean gst_qsv_decoder_flush (GstVideoDecoder * decoder);
132 static GstFlowReturn gst_qsv_decoder_finish (GstVideoDecoder * decoder);
133 static GstFlowReturn gst_qsv_decoder_drain (GstVideoDecoder * decoder);
135 static void gst_qsv_decoder_surface_clear (GstQsvDecoderSurface * surface);
136 static void gst_qsv_decoder_task_clear (GstQsvDecoderTask * task);
139 gst_qsv_decoder_class_init (GstQsvDecoderClass * klass)
141 GObjectClass *object_class = G_OBJECT_CLASS (klass);
142 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
143 GstVideoDecoderClass *videodec_class = GST_VIDEO_DECODER_CLASS (klass);
145 object_class->get_property = gst_qsv_decoder_get_property;
146 object_class->dispose = gst_qsv_decoder_dispose;
147 object_class->finalize = gst_qsv_decoder_finalize;
150 g_object_class_install_property (object_class, PROP_ADAPTER_LUID,
151 g_param_spec_int64 ("adapter-luid", "Adapter LUID",
152 "DXGI Adapter LUID (Locally Unique Identifier) of created device",
153 G_MININT64, G_MAXINT64, 0,
154 (GParamFlags) (GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_READABLE |
155 G_PARAM_STATIC_STRINGS)));
157 g_object_class_install_property (object_class, PROP_DEVICE_PATH,
158 g_param_spec_string ("device-path", "Device Path",
159 "DRM device path", nullptr,
160 (GParamFlags) (GST_PARAM_CONDITIONALLY_AVAILABLE |
161 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
164 element_class->set_context = GST_DEBUG_FUNCPTR (gst_qsv_decoder_set_context);
166 videodec_class->open = GST_DEBUG_FUNCPTR (gst_qsv_decoder_open);
167 videodec_class->stop = GST_DEBUG_FUNCPTR (gst_qsv_decoder_stop);
168 videodec_class->close = GST_DEBUG_FUNCPTR (gst_qsv_decoder_close);
169 videodec_class->negotiate = GST_DEBUG_FUNCPTR (gst_qsv_decoder_negotiate);
170 videodec_class->decide_allocation =
171 GST_DEBUG_FUNCPTR (gst_qsv_decoder_decide_allocation);
172 videodec_class->sink_query = GST_DEBUG_FUNCPTR (gst_qsv_decoder_sink_query);
173 videodec_class->src_query = GST_DEBUG_FUNCPTR (gst_qsv_decoder_src_query);
174 videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_qsv_decoder_set_format);
175 videodec_class->handle_frame =
176 GST_DEBUG_FUNCPTR (gst_qsv_decoder_handle_frame);
177 videodec_class->drain = GST_DEBUG_FUNCPTR (gst_qsv_decoder_drain);
178 videodec_class->finish = GST_DEBUG_FUNCPTR (gst_qsv_decoder_finish);
179 videodec_class->flush = GST_DEBUG_FUNCPTR (gst_qsv_decoder_flush);
183 gst_qsv_decoder_init (GstQsvDecoder * self)
185 GstQsvDecoderPrivate *priv;
188 (GstQsvDecoderPrivate *) gst_qsv_decoder_get_instance_private (self);
190 priv->surface_pool = g_array_new (FALSE, TRUE, sizeof (GstQsvDecoderSurface));
191 g_array_set_clear_func (priv->surface_pool,
192 (GDestroyNotify) gst_qsv_decoder_surface_clear);
194 priv->task_pool = g_array_new (FALSE, TRUE, sizeof (GstQsvDecoderTask));
195 g_array_set_clear_func (priv->task_pool,
196 (GDestroyNotify) gst_qsv_decoder_task_clear);
198 gst_video_decoder_set_packetized (GST_VIDEO_DECODER (self), TRUE);
202 gst_qsv_decoder_get_property (GObject * object, guint prop_id, GValue * value,
205 GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (object);
208 case PROP_ADAPTER_LUID:
209 g_value_set_int64 (value, klass->adapter_luid);
211 case PROP_DEVICE_PATH:
212 g_value_set_string (value, klass->display_path);
215 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
221 gst_qsv_decoder_dispose (GObject * object)
223 GstQsvDecoder *self = GST_QSV_DECODER (object);
224 GstQsvDecoderPrivate *priv = self->priv;
226 gst_clear_object (&priv->device);
228 G_OBJECT_CLASS (parent_class)->dispose (object);
232 gst_qsv_decoder_finalize (GObject * object)
234 GstQsvDecoder *self = GST_QSV_DECODER (object);
235 GstQsvDecoderPrivate *priv = self->priv;
237 g_array_unref (priv->task_pool);
238 g_array_unref (priv->surface_pool);
240 G_OBJECT_CLASS (parent_class)->finalize (object);
244 gst_qsv_decoder_set_context (GstElement * element, GstContext * context)
246 GstQsvDecoder *self = GST_QSV_DECODER (element);
247 GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (element);
248 GstQsvDecoderPrivate *priv = self->priv;
251 gst_d3d11_handle_set_context_for_adapter_luid (element,
252 context, klass->adapter_luid, (GstD3D11Device **) & priv->device);
254 gst_va_handle_set_context (element, context, klass->display_path,
255 (GstVaDisplay **) & priv->device);
258 GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
263 gst_qsv_decoder_open_platform_device (GstQsvDecoder * self)
265 GstQsvDecoderPrivate *priv = self->priv;
266 GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
267 ComPtr < ID3D10Multithread > multi_thread;
269 ID3D11Device *device_handle;
271 GstD3D11Device *device;
273 if (!gst_d3d11_ensure_element_data_for_adapter_luid (GST_ELEMENT (self),
274 klass->adapter_luid, (GstD3D11Device **) & priv->device)) {
275 GST_ERROR_OBJECT (self, "d3d11 device is unavailable");
279 device = GST_D3D11_DEVICE_CAST (priv->device);
280 priv->allocator = gst_qsv_d3d11_allocator_new (device);
282 /* For D3D11 device handle to be used by QSV, multithread protection layer
283 * must be enabled before the MFXVideoCORE_SetHandle() call.
285 * TODO: Need to check performance impact by this mutithread protection layer,
286 * since it may have a negative impact on overall pipeline performance.
287 * If so, we should create decoding session dedicated d3d11 device and
288 * make use of shared resource */
289 device_handle = gst_d3d11_device_get_device_handle (device);
290 hr = device_handle->QueryInterface (IID_PPV_ARGS (&multi_thread));
291 if (!gst_d3d11_result (hr, device)) {
292 GST_ERROR_OBJECT (self, "ID3D10Multithread interface is unavailable");
296 multi_thread->SetMultithreadProtected (TRUE);
297 status = MFXVideoCORE_SetHandle (priv->session, MFX_HANDLE_D3D11_DEVICE,
299 if (status != MFX_ERR_NONE) {
300 GST_ERROR_OBJECT (self, "Failed to set d3d11 device handle");
304 /* Similar to the QSV encoder, we don't use this allocator for actual
305 * D3D11 texture allocation. But still required because of QSV API design.
307 status = MFXVideoCORE_SetFrameAllocator (priv->session,
308 gst_qsv_allocator_get_allocator_handle (priv->allocator));
309 if (status != MFX_ERR_NONE) {
310 GST_ERROR_OBJECT (self, "Failed to set frame allocator %d", status);
318 gst_qsv_decoder_open_platform_device (GstQsvDecoder * self)
320 GstQsvDecoderPrivate *priv = self->priv;
321 GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
323 GstVaDisplay *display;
325 if (!gst_va_ensure_element_data (GST_ELEMENT (self), klass->display_path,
326 (GstVaDisplay **) & priv->device)) {
327 GST_ERROR_OBJECT (self, "VA display is unavailable");
331 display = GST_VA_DISPLAY (priv->device);
333 priv->allocator = gst_qsv_va_allocator_new (display);
335 status = MFXVideoCORE_SetHandle (priv->session, MFX_HANDLE_VA_DISPLAY,
336 gst_va_display_get_va_dpy (display));
337 if (status != MFX_ERR_NONE) {
338 GST_ERROR_OBJECT (self, "Failed to set VA display handle");
342 status = MFXVideoCORE_SetFrameAllocator (priv->session,
343 gst_qsv_allocator_get_allocator_handle (priv->allocator));
344 if (status != MFX_ERR_NONE) {
345 GST_ERROR_OBJECT (self, "Failed to set frame allocator %d", status);
354 gst_qsv_decoder_open (GstVideoDecoder * decoder)
356 GstQsvDecoder *self = GST_QSV_DECODER (decoder);
357 GstQsvDecoderPrivate *priv = self->priv;
358 GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
361 status = MFXCreateSession (gst_qsv_get_loader (), klass->impl_index,
363 if (status != MFX_ERR_NONE) {
364 GST_ERROR_OBJECT (self, "Failed to create session");
368 if (!gst_qsv_decoder_open_platform_device (self)) {
369 g_clear_pointer (&priv->session, MFXClose);
370 gst_clear_object (&priv->allocator);
371 gst_clear_object (&priv->device);
380 gst_qsv_decoder_reset (GstQsvDecoder * self)
382 GstQsvDecoderPrivate *priv = self->priv;
384 GST_DEBUG_OBJECT (self, "Reset");
387 delete priv->decoder;
388 priv->decoder = nullptr;
391 if (priv->internal_pool) {
392 gst_buffer_pool_set_active (priv->internal_pool, FALSE);
393 gst_clear_object (&priv->internal_pool);
396 if (priv->allocator) {
397 mfxFrameAllocator *alloc =
398 gst_qsv_allocator_get_allocator_handle (priv->allocator);
399 alloc->Free ((mfxHDL) priv->allocator, &priv->response);
401 memset (&priv->response, 0, sizeof (mfxFrameAllocResponse));
403 g_array_set_size (priv->surface_pool, 0);
404 g_array_set_size (priv->task_pool, 0);
410 gst_qsv_decoder_stop (GstVideoDecoder * decoder)
412 GstQsvDecoder *self = GST_QSV_DECODER (decoder);
413 GstQsvDecoderPrivate *priv = self->priv;
415 g_clear_pointer (&priv->input_state, gst_video_codec_state_unref);
416 g_clear_pointer (&priv->output_state, gst_video_codec_state_unref);
418 return gst_qsv_decoder_reset (self);
422 gst_qsv_decoder_close (GstVideoDecoder * decoder)
424 GstQsvDecoder *self = GST_QSV_DECODER (decoder);
425 GstQsvDecoderPrivate *priv = self->priv;
427 g_clear_pointer (&priv->session, MFXClose);
428 gst_clear_object (&priv->allocator);
429 gst_clear_object (&priv->device);
435 gst_qsv_decoder_surface_clear (GstQsvDecoderSurface * surface)
440 memset (surface, 0, sizeof (GstQsvDecoderSurface));
444 gst_qsv_decoder_task_clear (GstQsvDecoderTask * task)
450 task->surface->need_output = FALSE;
451 if (task->surface->frame && task->surface->surface.Data.Locked == 0)
452 gst_qsv_frame_set_buffer (task->surface->frame, nullptr);
455 task->surface = nullptr;
456 task->sync_point = nullptr;
459 static GstQsvDecoderSurface *
460 gst_qsv_decoder_get_next_surface (GstQsvDecoder * self)
462 GstQsvDecoderPrivate *priv = self->priv;
463 GstQsvDecoderSurface *surface = nullptr;
467 /* Clear unlocked frames as well */
468 for (guint i = 0; i < priv->surface_pool->len; i++) {
469 GstQsvDecoderSurface *iter =
470 &g_array_index (priv->surface_pool, GstQsvDecoderSurface, i);
472 if (iter->surface.Data.Locked > 0 || iter->need_output)
475 gst_qsv_frame_set_buffer (iter->frame, nullptr);
482 GST_ERROR_OBJECT (self, "Failed to find unlocked surface");
486 ret = gst_buffer_pool_acquire_buffer (priv->internal_pool, &buffer, nullptr);
487 if (ret != GST_FLOW_OK) {
488 GST_ERROR_OBJECT (self, "Failed to allocate buffer");
492 gst_qsv_frame_set_buffer (surface->frame, buffer);
497 static GstQsvDecoderTask *
498 gst_qsv_decoder_get_next_task (GstQsvDecoder * self)
500 GstQsvDecoderPrivate *priv = self->priv;
501 GstQsvDecoderTask *task;
503 task = &g_array_index (priv->task_pool,
504 GstQsvDecoderTask, priv->next_task_index);
505 priv->next_task_index++;
506 priv->next_task_index %= priv->task_pool->len;
511 static GstVideoCodecFrame *
512 gst_qsv_decoder_find_output_frame (GstQsvDecoder * self, GstClockTime pts)
514 GList *frames, *iter;
515 GstVideoCodecFrame *ret = nullptr;
516 GstVideoCodecFrame *closest = nullptr;
517 guint64 min_pts_abs_diff = 0;
519 /* give up, just returns the oldest frame */
520 if (!GST_CLOCK_TIME_IS_VALID (pts))
521 return gst_video_decoder_get_oldest_frame (GST_VIDEO_DECODER (self));
523 frames = gst_video_decoder_get_frames (GST_VIDEO_DECODER (self));
525 for (iter = frames; iter; iter = g_list_next (iter)) {
526 GstVideoCodecFrame *frame = (GstVideoCodecFrame *) iter->data;
529 if (!GST_CLOCK_TIME_IS_VALID (frame->pts))
532 if (pts == frame->pts) {
537 if (pts >= frame->pts)
538 abs_diff = pts - frame->pts;
540 abs_diff = frame->pts - pts;
542 if (!closest || abs_diff < min_pts_abs_diff) {
544 min_pts_abs_diff = abs_diff;
552 gst_video_codec_frame_ref (ret);
554 /* Release older frames, it can happen if input buffer holds only single
555 * field in case of H264 */
556 for (iter = frames; iter; iter = g_list_next (iter)) {
557 GstVideoCodecFrame *frame = (GstVideoCodecFrame *) iter->data;
562 if (!GST_CLOCK_TIME_IS_VALID (frame->pts))
565 if (frame->pts < ret->pts) {
566 gst_video_decoder_release_frame (GST_VIDEO_DECODER (self),
567 gst_video_codec_frame_ref (frame));
571 ret = gst_video_decoder_get_oldest_frame (GST_VIDEO_DECODER (self));
575 g_list_free_full (frames, (GDestroyNotify) gst_video_codec_frame_unref);
581 gst_qsv_decoder_finish_frame (GstQsvDecoder * self, GstQsvDecoderTask * task,
584 GstVideoDecoder *vdec = GST_VIDEO_DECODER (self);
585 GstQsvDecoderPrivate *priv = self->priv;
587 GstVideoCodecFrame *frame;
588 GstClockTime pts = GST_CLOCK_TIME_NONE;
590 guint retry_count = 0;
592 const guint retry_threshold = 100;
593 GstQsvDecoderSurface *surface = task->surface;
595 gboolean force_copy = FALSE;
597 g_assert (surface != nullptr);
598 g_assert (task->sync_point != nullptr);
600 status = MFX_ERR_NONE;
602 /* magic number 100 ms */
603 status = MFXVideoCORE_SyncOperation (priv->session, task->sync_point, 100);
605 /* Retry up to 10 sec (100 ms x 100 times), that should be enough time for
606 * decoding a frame using hardware */
607 if (status == MFX_WRN_IN_EXECUTION && retry_count < retry_threshold) {
608 GST_DEBUG_OBJECT (self,
609 "Operation is still in execution, retry count (%d/%d)",
610 retry_count, retry_threshold);
619 gst_qsv_decoder_task_clear (task);
623 if (status != MFX_ERR_NONE) {
624 gst_qsv_decoder_task_clear (task);
626 if (status == MFX_ERR_ABORTED) {
627 GST_INFO_OBJECT (self, "Operation was aborted");
628 return GST_FLOW_FLUSHING;
631 GST_WARNING_OBJECT (self, "SyncOperation returned %d (%s)",
632 QSV_STATUS_ARGS (status));
634 return GST_FLOW_ERROR;
637 pts = gst_qsv_timestamp_to_gst (surface->surface.Data.TimeStamp);
638 pool = gst_video_decoder_get_buffer_pool (vdec);
640 GST_ERROR_OBJECT (self, "Decoder doesn't hold buffer pool");
641 gst_qsv_decoder_task_clear (task);
642 return GST_FLOW_ERROR;
645 /* Copy decoded frame in case of reverse playback, too many bound frame to
646 * decoder may cause driver unhappy */
647 if (!priv->can_direct_render || vdec->input_segment.rate < 0.0)
650 /* TODO: Handle non-zero crop-{x,y} position via crop meta or similar */
651 buffer = gst_qsv_allocator_download_frame (priv->allocator, force_copy,
652 surface->frame, pool);
653 gst_object_unref (pool);
654 gst_qsv_decoder_task_clear (task);
657 GST_ERROR_OBJECT (self, "No output buffer");
658 return GST_FLOW_ERROR;
661 if (priv->aligned_info.interlace_mode == GST_VIDEO_INTERLACE_MODE_MIXED) {
662 if ((surface->surface.Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF) != 0) {
663 GST_BUFFER_FLAG_SET (buffer,
664 GST_VIDEO_BUFFER_FLAG_TFF | GST_VIDEO_BUFFER_FLAG_INTERLACED);
665 } else if ((surface->surface.Info.PicStruct & MFX_PICSTRUCT_FIELD_BFF) != 0) {
666 GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
667 GST_BUFFER_FLAG_UNSET (buffer, GST_VIDEO_BUFFER_FLAG_TFF);
671 frame = gst_qsv_decoder_find_output_frame (self, pts);
674 frame->output_buffer = buffer;
676 return gst_video_decoder_finish_frame (vdec, frame);
679 /* Empty available frame, something went wrong but we can just push this
681 GST_WARNING_OBJECT (self, "Failed to find corresponding frame");
682 GST_BUFFER_PTS (buffer) = pts;
684 return gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), buffer);
688 gst_qsv_decoder_decode_frame (GstQsvDecoder * self, mfxBitstream * bitstream,
691 GstQsvDecoderPrivate *priv = self->priv;
693 guint retry_count = 0;
695 const guint retry_threshold = 1000;
696 GstQsvDecoderSurface *surface = nullptr;
700 mfxFrameSurface1 *out_surface = nullptr;
701 GstQsvDecoderTask *task = gst_qsv_decoder_get_next_task (self);
702 if (task->sync_point) {
703 ret = gst_qsv_decoder_finish_frame (self, task, flushing);
705 if (ret != GST_FLOW_OK)
710 surface = gst_qsv_decoder_get_next_surface (self);
713 GST_ERROR_OBJECT (self, "No available surface");
714 return GST_FLOW_ERROR;
717 status = priv->decoder->DecodeFrameAsync (bitstream, &surface->surface,
718 &out_surface, &task->sync_point);
720 if (status != MFX_ERR_NONE) {
721 GST_LOG_OBJECT (self, "DecodeFrameAsync returned %d (%s)",
722 QSV_STATUS_ARGS (status));
726 g_assert (task->sync_point != nullptr);
728 for (guint i = 0; i < priv->surface_pool->len; i++) {
729 GstQsvDecoderSurface *iter =
730 &g_array_index (priv->surface_pool, GstQsvDecoderSurface, i);
732 if (iter->surface.Data.MemId == out_surface->Data.MemId) {
733 task->surface = iter;
738 if (!task->surface) {
739 GST_ERROR_OBJECT (self, "Failed to find surface");
740 gst_qsv_decoder_task_clear (task);
741 return GST_FLOW_ERROR;
744 /* Make need-output to hold underlying GstBuffer until output happens */
745 task->surface->need_output = TRUE;
750 case MFX_WRN_VIDEO_PARAM_CHANGED:{
751 if (surface->surface.Data.Locked > 0)
754 if (bitstream && bitstream->DataLength == 0)
759 case MFX_ERR_MORE_SURFACE:
761 case MFX_ERR_INCOMPATIBLE_VIDEO_PARAM:
762 GST_DEBUG_OBJECT (self, "Found new sequence");
763 return GST_QSV_DECODER_FLOW_NEW_SEQUENCE;
764 case MFX_ERR_MORE_DATA:
765 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
766 case MFX_WRN_DEVICE_BUSY:
767 GST_LOG_OBJECT (self, "GPU is busy, retry count (%d/%d)",
768 retry_count, retry_threshold);
770 if (retry_count > retry_threshold) {
771 GST_ERROR_OBJECT (self, "Give up");
772 return GST_FLOW_ERROR;
777 /* Magic number 1ms */
781 if (status < MFX_ERR_NONE) {
782 GST_ERROR_OBJECT (self, "Got error %d (%s)",
783 QSV_STATUS_ARGS (status));
784 return GST_FLOW_ERROR;
790 return GST_FLOW_ERROR;
794 gst_qsv_decoder_drain_internal (GstQsvDecoder * self, gboolean flushing)
796 GstQsvDecoderPrivate *priv = self->priv;
797 GstFlowReturn ret = GST_FLOW_OK;
799 if (!priv->session || !priv->decoder)
803 ret = gst_qsv_decoder_decode_frame (self, nullptr, flushing);
804 } while (ret != GST_VIDEO_DECODER_FLOW_NEED_DATA && ret >= GST_FLOW_OK);
806 for (guint i = 0; i < priv->task_pool->len; i++) {
807 GstQsvDecoderTask *task = gst_qsv_decoder_get_next_task (self);
809 if (!task->sync_point)
812 ret = gst_qsv_decoder_finish_frame (self, task, flushing);
816 case GST_VIDEO_DECODER_FLOW_NEED_DATA:
817 case GST_QSV_DECODER_FLOW_NEW_SEQUENCE:
827 gst_qsv_decoder_set_format (GstVideoDecoder * decoder,
828 GstVideoCodecState * state)
830 GstQsvDecoder *self = GST_QSV_DECODER (decoder);
831 GstQsvDecoderPrivate *priv = self->priv;
832 GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
835 GST_DEBUG_OBJECT (self, "Set format with %" GST_PTR_FORMAT, state->caps);
837 gst_qsv_decoder_drain_internal (self, FALSE);
839 g_clear_pointer (&priv->input_state, gst_video_codec_state_unref);
841 priv->input_state = gst_video_codec_state_ref (state);
843 memset (&priv->video_param, 0, sizeof (mfxVideoParam));
844 priv->video_param.mfx.CodecId = klass->codec_id;
846 /* If upstream is live, we will use single async-depth for low-latency
848 query = gst_query_new_latency ();
849 if (gst_pad_peer_query (GST_VIDEO_DECODER_SINK_PAD (self), query))
850 gst_query_parse_latency (query, &priv->is_live, nullptr, nullptr);
851 gst_query_unref (query);
853 /* We will open decoder later once sequence header is parsed */
854 if (klass->set_format)
855 return klass->set_format (self, state);
862 gst_qsv_decoder_prepare_d3d11_pool (GstQsvDecoder * self,
863 GstCaps * caps, GstVideoInfo * info, GstVideoAlignment * align)
865 GstQsvDecoderPrivate *priv = self->priv;
866 GstStructure *config;
867 GstD3D11AllocationParams *params;
868 GstD3D11Device *device = GST_D3D11_DEVICE_CAST (priv->device);
870 GST_DEBUG_OBJECT (self, "Use d3d11 memory pool");
872 priv->internal_pool = gst_d3d11_buffer_pool_new (device);
873 config = gst_buffer_pool_get_config (priv->internal_pool);
874 /* Bind to shader resource as well for this texture can be used
875 * in generic pixel shader */
876 params = gst_d3d11_allocation_params_new (device, info,
877 (GstD3D11AllocationFlags) 0,
878 D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE);
879 gst_d3d11_allocation_params_alignment (params, align);
880 gst_buffer_pool_config_set_d3d11_allocation_params (config, params);
881 gst_d3d11_allocation_params_free (params);
882 gst_buffer_pool_config_set_params (config, caps,
883 GST_VIDEO_INFO_SIZE (info), 0, 0);
884 gst_buffer_pool_set_config (priv->internal_pool, config);
885 gst_buffer_pool_set_active (priv->internal_pool, TRUE);
892 gst_qsv_decoder_prepare_system_pool (GstQsvDecoder * self,
893 GstCaps * caps, GstVideoInfo * info, GstVideoAlignment * align)
895 GstQsvDecoderPrivate *priv = self->priv;
896 GstStructure *config;
898 GST_DEBUG_OBJECT (self, "Use system memory pool");
900 priv->internal_pool = gst_video_buffer_pool_new ();
901 config = gst_buffer_pool_get_config (priv->internal_pool);
902 gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
903 gst_buffer_pool_config_add_option (config,
904 GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT);
905 gst_buffer_pool_config_set_video_alignment (config, align);
906 gst_buffer_pool_config_set_params (config,
907 caps, GST_VIDEO_INFO_SIZE (info), 0, 0);
909 gst_buffer_pool_set_config (priv->internal_pool, config);
910 gst_buffer_pool_set_active (priv->internal_pool, TRUE);
916 gst_qsv_decoder_prepare_pool (GstQsvDecoder * self, mfxU16 * io_pattern)
918 GstQsvDecoderPrivate *priv = self->priv;
919 gboolean ret = FALSE;
921 GstVideoAlignment align;
923 if (priv->internal_pool) {
924 gst_buffer_pool_set_active (priv->internal_pool, FALSE);
925 gst_clear_object (&priv->internal_pool);
928 caps = gst_video_info_to_caps (&priv->info);
930 GST_ERROR_OBJECT (self, "Failed to convet video-info to caps");
934 gst_video_alignment_reset (&align);
935 align.padding_left = priv->aligned_info.width - priv->info.width;
936 align.padding_bottom = priv->aligned_info.height - priv->info.height;
938 /* TODO: Add Linux video memory (VA/DMABuf) support */
940 priv->mem_type = GST_QSV_VIDEO_MEMORY | GST_QSV_DECODER_OUT_MEMORY;
941 *io_pattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
943 ret = gst_qsv_decoder_prepare_d3d11_pool (self, caps, &priv->info, &align);
947 priv->mem_type = GST_QSV_SYSTEM_MEMORY | GST_QSV_DECODER_OUT_MEMORY;
948 *io_pattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
950 ret = gst_qsv_decoder_prepare_system_pool (self, caps, &priv->info, &align);
952 gst_caps_unref (caps);
958 gst_qsv_decoder_init_session (GstQsvDecoder * self)
960 GstQsvDecoderPrivate *priv = self->priv;
961 mfxVideoParam *param = &priv->video_param;
962 mfxFrameInfo *frame_info = ¶m->mfx.FrameInfo;
963 MFXVideoDECODE *decoder_handle = nullptr;
964 mfxFrameAllocRequest request;
967 GST_DEBUG_OBJECT (self, "Init session");
969 memset (&request, 0, sizeof (mfxFrameAllocRequest));
971 gst_qsv_decoder_reset (self);
973 if (!gst_qsv_decoder_prepare_pool (self, ¶m->IOPattern)) {
974 GST_ERROR_OBJECT (self, "Failed to prepare pool");
978 param->AsyncDepth = priv->is_live ? 1 : 4;
980 decoder_handle = new MFXVideoDECODE (priv->session);
982 /* Additional 4 frames for margin. Actually large pool size would be fine
983 * because we don't pre-allocate underlying output memory objects */
984 gst_qsv_allocator_set_options (priv->allocator, param->AsyncDepth + 4, TRUE);
986 status = decoder_handle->Query (param, param);
987 QSV_CHECK_STATUS (self, status, MFXVideoDECODE::Query);
989 status = decoder_handle->QueryIOSurf (param, &request);
990 QSV_CHECK_STATUS (self, status, MFXVideoDECODE::QueryIOSurf);
992 status = decoder_handle->Init (param);
993 QSV_CHECK_STATUS (self, status, MFXVideoDECODE::Init);
995 status = decoder_handle->GetVideoParam (param);
996 QSV_CHECK_STATUS (self, status, MFXVideoDECODE::GetVideoParam);
998 /* In case that we use video memory, MFXVideoDECODE::Init() will invoke
999 * mfxFrameAllocator::Alloc(). Pull the pre-allocated dummy GstQsvFrame
1000 * objects here and fill with GstBuffer later when needed */
1001 if (GST_QSV_MEM_TYPE_IS_SYSTEM (priv->mem_type)) {
1002 mfxFrameAllocator *alloc_handle =
1003 gst_qsv_allocator_get_allocator_handle (priv->allocator);
1005 request.Type |= MFX_MEMTYPE_EXTERNAL_FRAME;
1006 status = alloc_handle->Alloc ((mfxHDL) priv->allocator, &request,
1008 if (status != MFX_ERR_NONE) {
1009 GST_ERROR_OBJECT (self, "Failed to allocate system memory frames");
1012 } else if (!gst_qsv_allocator_get_cached_response (priv->allocator,
1014 GST_ERROR_OBJECT (self, "Failed to get cached response");
1018 g_array_set_size (priv->surface_pool, priv->response.NumFrameActual);
1019 for (guint i = 0; i < priv->surface_pool->len; i++) {
1020 GstQsvDecoderSurface *surface = &g_array_index (priv->surface_pool,
1021 GstQsvDecoderSurface, i);
1024 gst_qsv_decoder_surface_clear (surface);
1025 surface->surface.Info = *frame_info;
1026 surface->surface.Data.MemId = priv->response.mids[i];
1028 /* holds casted object without ref, to make code cleaner */
1029 surface->frame = (GstQsvFrame *) surface->surface.Data.MemId;
1031 /* This frame must not hold buffer at this moment */
1032 buf = gst_qsv_frame_peek_buffer (surface->frame);
1033 g_assert (buf == nullptr);
1036 g_array_set_size (priv->task_pool, param->AsyncDepth);
1037 for (guint i = 0; i < priv->task_pool->len; i++) {
1038 GstQsvDecoderTask *task = &g_array_index (priv->task_pool,
1039 GstQsvDecoderTask, i);
1041 gst_qsv_decoder_task_clear (task);
1043 priv->next_task_index = 0;
1045 priv->decoder = decoder_handle;
1051 delete decoder_handle;
1053 gst_qsv_decoder_reset (self);
1059 gst_qsv_decoder_negotiate (GstVideoDecoder * decoder)
1061 GstQsvDecoder *self = GST_QSV_DECODER (decoder);
1062 GstQsvDecoderPrivate *priv = self->priv;
1063 GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
1064 guint width, height;
1065 guint coded_width, coded_height;
1066 guint aligned_width, aligned_height;
1067 mfxVideoParam *param = &priv->video_param;
1068 mfxFrameInfo *frame_info = ¶m->mfx.FrameInfo;
1069 GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;
1070 GstVideoInterlaceMode interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
1072 width = coded_width = frame_info->Width;
1073 height = coded_height = frame_info->Height;
1075 if (frame_info->CropW > 0 && frame_info->CropH > 0) {
1076 width = frame_info->CropW;
1077 height = frame_info->CropH;
1080 switch (frame_info->FourCC) {
1081 case MFX_FOURCC_NV12:
1082 format = GST_VIDEO_FORMAT_NV12;
1084 case MFX_FOURCC_P010:
1085 format = GST_VIDEO_FORMAT_P010_10LE;
1091 if (format == GST_VIDEO_FORMAT_UNKNOWN) {
1092 GST_ERROR_OBJECT (self, "Unknown video format");
1096 aligned_width = GST_ROUND_UP_16 (coded_width);
1097 if (klass->codec_id == MFX_CODEC_AVC) {
1098 if (frame_info->PicStruct == MFX_PICSTRUCT_PROGRESSIVE) {
1099 aligned_height = GST_ROUND_UP_16 (coded_height);
1101 aligned_height = GST_ROUND_UP_32 (coded_height);
1102 /* In theory, tff/bff can be altered in a sequence */
1103 interlace_mode = GST_VIDEO_INTERLACE_MODE_MIXED;
1106 aligned_height = GST_ROUND_UP_16 (coded_height);
1109 frame_info->Width = aligned_width;
1110 frame_info->Height = aligned_height;
1112 gst_video_info_set_interlaced_format (&priv->info, format,
1113 interlace_mode, width, height);
1114 gst_video_info_set_interlaced_format (&priv->aligned_info, format,
1115 interlace_mode, aligned_width, aligned_height);
1117 g_clear_pointer (&priv->output_state, gst_video_codec_state_unref);
1118 priv->output_state =
1119 gst_video_decoder_set_interlaced_output_state (GST_VIDEO_DECODER (self),
1120 format, interlace_mode, width, height, priv->input_state);
1122 priv->output_state->caps = gst_video_info_to_caps (&priv->output_state->info);
1123 priv->can_direct_render = FALSE;
1126 GstCaps *peer_caps =
1127 gst_pad_get_allowed_caps (GST_VIDEO_DECODER_SRC_PAD (self));
1128 GST_DEBUG_OBJECT (self, "Allowed caps %" GST_PTR_FORMAT, peer_caps);
1130 if (!peer_caps || gst_caps_is_any (peer_caps)) {
1131 GST_DEBUG_OBJECT (self,
1132 "cannot determine output format, use system memory");
1134 GstCapsFeatures *features;
1135 guint size = gst_caps_get_size (peer_caps);
1137 for (guint i = 0; i < size; i++) {
1138 features = gst_caps_get_features (peer_caps, i);
1143 if (gst_caps_features_contains (features,
1144 GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY)) {
1145 priv->can_direct_render = TRUE;
1150 gst_clear_caps (&peer_caps);
1152 if (priv->can_direct_render) {
1153 GST_DEBUG_OBJECT (self, "Downstream supports D3D11 memory");
1154 gst_caps_set_features (priv->output_state->caps, 0,
1155 gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, nullptr));
1159 GST_DEBUG_OBJECT (self,
1160 "Negotiating with %" GST_PTR_FORMAT, priv->output_state->caps);
1162 return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
1167 gst_qsv_decoder_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
1169 GstQsvDecoder *self = GST_QSV_DECODER (decoder);
1170 GstQsvDecoderPrivate *priv = self->priv;
1172 GstBufferPool *pool = nullptr;
1173 guint n, size, min = 0, max = 0;
1175 GstStructure *config;
1176 GstD3D11AllocationParams *d3d11_params;
1177 gboolean use_d3d11_pool;
1178 gboolean has_videometa;
1179 GstD3D11Device *device = GST_D3D11_DEVICE (priv->device);
1181 gst_query_parse_allocation (query, &outcaps, nullptr);
1184 GST_DEBUG_OBJECT (decoder, "No output caps");
1188 has_videometa = gst_query_find_allocation_meta (query,
1189 GST_VIDEO_META_API_TYPE, nullptr);
1190 use_d3d11_pool = priv->can_direct_render;
1192 gst_video_info_from_caps (&vinfo, outcaps);
1193 n = gst_query_get_n_allocation_pools (query);
1195 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
1198 if (use_d3d11_pool) {
1199 if (!GST_IS_D3D11_BUFFER_POOL (pool)) {
1200 GST_DEBUG_OBJECT (self,
1201 "Downstream pool is not d3d11, will create new one");
1202 gst_clear_object (&pool);
1204 GstD3D11BufferPool *dpool = GST_D3D11_BUFFER_POOL (pool);
1205 if (dpool->device != device) {
1206 GST_DEBUG_OBJECT (self, "Different device, will create new one");
1207 gst_clear_object (&pool);
1210 } else if (has_videometa) {
1211 /* We will use d3d11 staging buffer pool */
1212 gst_clear_object (&pool);
1218 pool = gst_d3d11_buffer_pool_new (device);
1219 else if (has_videometa)
1220 pool = gst_d3d11_staging_buffer_pool_new (device);
1222 pool = gst_video_buffer_pool_new ();
1224 size = (guint) vinfo.size;
1227 config = gst_buffer_pool_get_config (pool);
1228 gst_buffer_pool_config_set_params (config, outcaps, size, min, max);
1229 gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
1231 /* Decoder will use internal pool to output but this pool is required for
1232 * copying in case of reverse playback */
1233 if (use_d3d11_pool) {
1234 d3d11_params = gst_buffer_pool_config_get_d3d11_allocation_params (config);
1236 d3d11_params = gst_d3d11_allocation_params_new (device, &vinfo,
1237 (GstD3D11AllocationFlags) 0, 0);
1238 /* Use both render target (for videoprocessor) and shader resource
1239 * for (pixel shader) bind flags for downstream to be able to use consistent
1240 * conversion path even when we copy textures */
1241 d3d11_params->desc[0].BindFlags |=
1242 (D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE);
1244 gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params);
1245 gst_d3d11_allocation_params_free (d3d11_params);
1248 gst_buffer_pool_set_config (pool, config);
1249 /* d3d11 buffer pool will update buffer size based on allocated texture,
1250 * get size from config again */
1251 config = gst_buffer_pool_get_config (pool);
1252 gst_buffer_pool_config_get_params (config, nullptr, &size, nullptr, nullptr);
1253 gst_structure_free (config);
1256 gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
1258 gst_query_add_allocation_pool (query, pool, size, min, max);
1259 gst_object_unref (pool);
1261 return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
1266 gst_qsv_decoder_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
1268 /* TODO: add VA support */
1269 return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
1272 #endif /* G_OS_WIN32 */
1275 gst_qsv_decoder_handle_context_query (GstQsvDecoder * self, GstQuery * query)
1277 GstQsvDecoderPrivate *priv = self->priv;
1280 return gst_d3d11_handle_context_query (GST_ELEMENT (self), query,
1281 (GstD3D11Device *) priv->device);
1283 return gst_va_handle_context_query (GST_ELEMENT (self), query,
1284 (GstVaDisplay *) priv->device);
1289 gst_qsv_decoder_sink_query (GstVideoDecoder * decoder, GstQuery * query)
1291 GstQsvDecoder *self = GST_QSV_DECODER (decoder);
1293 switch (GST_QUERY_TYPE (query)) {
1294 case GST_QUERY_CONTEXT:
1295 if (gst_qsv_decoder_handle_context_query (self, query))
1302 return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
1306 gst_qsv_decoder_src_query (GstVideoDecoder * decoder, GstQuery * query)
1308 GstQsvDecoder *self = GST_QSV_DECODER (decoder);
1310 switch (GST_QUERY_TYPE (query)) {
1311 case GST_QUERY_CONTEXT:
1312 if (gst_qsv_decoder_handle_context_query (self, query))
1319 return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query);
1322 static GstFlowReturn
1323 gst_qsv_decoder_handle_frame (GstVideoDecoder * decoder,
1324 GstVideoCodecFrame * frame)
1326 GstQsvDecoder *self = GST_QSV_DECODER (decoder);
1327 GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
1328 GstQsvDecoderPrivate *priv = self->priv;
1332 GstFlowReturn ret = GST_FLOW_ERROR;
1333 gboolean was_reconfigured = FALSE;
1334 GstBuffer *input_buf = nullptr;
1336 if (klass->process_input) {
1337 input_buf = klass->process_input (self, priv->decoder ? FALSE : TRUE,
1338 frame->input_buffer);
1340 input_buf = gst_buffer_ref (frame->input_buffer);
1343 if (!input_buf || !gst_buffer_map (input_buf, &info, GST_MAP_READ)) {
1344 GST_ERROR_OBJECT (self, "Failed to map input buffer");
1345 gst_clear_buffer (&input_buf);
1346 gst_video_decoder_release_frame (decoder, frame);
1347 return GST_FLOW_ERROR;
1350 memset (&bs, 0, sizeof (mfxBitstream));
1352 bs.Data = (mfxU8 *) info.data;
1353 bs.DataLength = bs.MaxLength = (mfxU32) info.size;
1354 bs.TimeStamp = gst_qsv_timestamp_from_gst (frame->pts);
1357 if (!priv->decoder) {
1358 status = MFXVideoDECODE_DecodeHeader (priv->session,
1359 &bs, &priv->video_param);
1361 if (status != MFX_ERR_NONE) {
1362 if (status == MFX_ERR_MORE_DATA) {
1363 GST_WARNING_OBJECT (self, "Need more date to parse header");
1366 GST_ERROR_OBJECT (self, "Failed to parse header %d (%s)",
1367 QSV_STATUS_ARGS (status));
1370 goto unmap_and_error;
1373 if (!gst_video_decoder_negotiate (decoder)) {
1374 GST_ERROR_OBJECT (self, "Failed to negotiate");
1375 ret = GST_FLOW_NOT_NEGOTIATED;
1376 goto unmap_and_error;
1379 if (!gst_qsv_decoder_init_session (self)) {
1380 GST_ERROR_OBJECT (self, "Failed to init session");
1381 return GST_FLOW_ERROR;
1385 if (!priv->decoder) {
1386 GST_ERROR_OBJECT (self, "Decoder object was not configured");
1387 ret = GST_FLOW_NOT_NEGOTIATED;
1388 goto unmap_and_error;
1391 ret = gst_qsv_decoder_decode_frame (self, &bs, FALSE);
1394 case GST_QSV_DECODER_FLOW_NEW_SEQUENCE:
1395 if (!was_reconfigured) {
1396 gst_qsv_decoder_drain_internal (self, FALSE);
1397 gst_qsv_decoder_reset (self);
1398 was_reconfigured = TRUE;
1403 ret = GST_FLOW_ERROR;
1405 case GST_VIDEO_DECODER_FLOW_NEED_DATA:
1412 gst_buffer_unmap (input_buf, &info);
1413 gst_buffer_unref (input_buf);
1414 gst_video_codec_frame_unref (frame);
1419 gst_buffer_unmap (input_buf, &info);
1420 gst_buffer_unref (input_buf);
1421 gst_video_decoder_release_frame (decoder, frame);
1427 gst_qsv_decoder_flush (GstVideoDecoder * decoder)
1429 GstQsvDecoder *self = GST_QSV_DECODER (decoder);
1431 GST_DEBUG_OBJECT (self, "Flush");
1433 gst_qsv_decoder_drain_internal (self, TRUE);
1438 static GstFlowReturn
1439 gst_qsv_decoder_finish (GstVideoDecoder * decoder)
1441 GstQsvDecoder *self = GST_QSV_DECODER (decoder);
1443 GST_DEBUG_OBJECT (self, "Finish");
1445 return gst_qsv_decoder_drain_internal (self, FALSE);
1448 static GstFlowReturn
1449 gst_qsv_decoder_drain (GstVideoDecoder * decoder)
1451 GstQsvDecoder *self = GST_QSV_DECODER (decoder);
1453 GST_DEBUG_OBJECT (self, "Drain");
1455 return gst_qsv_decoder_drain_internal (self, FALSE);