2 * Copyright (C) 2021 Seungha Yang <seungha@centricular.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
21 * SECTION:element-d3d11deinterlaceelement
22 * @title: d3d11deinterlaceelement
24 * Deinterlacing interlaced video frames to progressive video frames by using
25 * ID3D11VideoProcessor API. Depending on the hardware it runs on,
26 * this element will only support a very limited set of video formats.
27 * Use #d3d11deinterlace instead, which will take care of conversion.
37 #include <gst/video/video.h>
38 #include <gst/base/gstbasetransform.h>
40 #include "gstd3d11deinterlace.h"
41 #include "gstd3d11pluginutils.h"
46 using namespace Microsoft::WRL;
50 GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_deinterlace_debug);
51 #define GST_CAT_DEFAULT gst_d3d11_deinterlace_debug
56 /* Deinterlacing Methods:
57 * Direct3D11 provides Blend, Bob, Adaptive, Motion Compensation, and
58 * Inverse Telecine methods. But depending on video processor device,
59 * some of method might not be supported.
60 * - Blend: the two fields of a interlaced frame are blended into a single
61 * progressive frame. Output rate will be half of input (e.g., 60i -> 30p)
62 * but due to the way of framerate signalling of GStreamer, that is, it uses
63 * frame rate, not field rate for interlaced stream, in/output framerate
64 * of caps will be identical.
65 * - Bob: missing field lines are interpolated from the lines above and below.
66 * Output rate will be the same as that of input (e.g., 60i -> 60p).
67 * In order words, video processor will generate two frames from two field
68 * of a intelaced frame.
69 * - Adaptive, Motion Compensation: future and past frames are used for
70 * reference frame for deinterlacing process. User should provide sufficent
71 * number of reference frames, otherwise processor device will fallback to
74 * Direct3D11 doesn't provide a method for explicit deinterlacing method
75 * selection. Instead, it could be done indirectly.
76 * - Blend: sets output rate as half via VideoProcessorSetStreamOutputRate().
77 * - Bob: sets output rate as normal. And performs VideoProcessorBlt() twice per
78 * a interlaced frame. D3D11_VIDEO_PROCESSOR_STREAM::OutputIndex needs to be
79 * incremented per field (e.g., OutputIndex = 0 for the first field,
80 * and 1 for the second field).
81 * - Adaptive, Motion Compensation: in addition to the requirement of Bob,
82 * user should provide reference frames via
83 * D3D11_VIDEO_PROCESSOR_STREAM::ppPastSurfaces and
84 * D3D11_VIDEO_PROCESSOR_STREAM::ppFutureSurfaces
87 /* g_queue_clear_full is available since 2.60 */
88 #if !GLIB_CHECK_VERSION(2,60,0)
89 #define g_queue_clear_full gst_d3d11_deinterlace_g_queue_clear_full
91 gst_d3d11_deinterlace_g_queue_clear_full (GQueue * queue,
92 GDestroyNotify free_func)
94 g_return_if_fail (queue != NULL);
96 if (free_func != NULL)
97 g_queue_foreach (queue, (GFunc) free_func, NULL);
99 g_queue_clear (queue);
105 GST_D3D11_DEINTERLACE_METHOD_BLEND =
106 D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BLEND,
107 GST_D3D11_DEINTERLACE_METHOD_BOB =
108 D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BOB,
109 GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE =
110 D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_ADAPTIVE,
111 GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION =
112 D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_MOTION_COMPENSATION,
114 /* TODO: INVERSE_TELECINE */
115 } GstD3D11DeinterlaceMethod;
118 * GstD3D11DeinterlaceMethod:
120 * Deinterlacing method
124 #define GST_TYPE_D3D11_DEINTERLACE_METHOD (gst_d3d11_deinterlace_method_type())
127 gst_d3d11_deinterlace_method_type (void)
129 static gsize method_type = 0;
131 if (g_once_init_enter (&method_type)) {
132 static const GFlagsValue method_types[] = {
133 {GST_D3D11_DEINTERLACE_METHOD_BLEND,
134 "Blend: Blending top/bottom field pictures into one frame. "
135 "Framerate will be preserved (e.g., 60i -> 30p)", "blend"},
136 {GST_D3D11_DEINTERLACE_METHOD_BOB,
137 "Bob: Interpolating missing lines by using the adjacent lines. "
138 "Framerate will be doubled (e,g, 60i -> 60p)", "bob"},
139 {GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE,
140 "Adaptive: Interpolating missing lines by using spatial/temporal references. "
141 "Framerate will be doubled (e,g, 60i -> 60p)",
143 {GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION,
144 "Motion Compensation: Recreating missing lines by using motion vector. "
145 "Framerate will be doubled (e,g, 60i -> 60p)", "mocomp"},
148 GType tmp = g_flags_register_static ("GstD3D11DeinterlaceMethod",
150 g_once_init_leave (&method_type, tmp);
153 return (GType) method_type;
158 GstD3D11DeinterlaceMethod supported_methods;
159 GstD3D11DeinterlaceMethod default_method;
161 guint max_past_frames;
162 guint max_future_frames;
163 } GstD3D11DeinterlaceDeviceCaps;
167 GType deinterlace_type;
176 GstD3D11DeinterlaceDeviceCaps device_caps;
179 } GstD3D11DeinterlaceClassData;
181 static GstD3D11DeinterlaceClassData *
182 gst_d3d11_deinterlace_class_data_new (void)
184 GstD3D11DeinterlaceClassData *self = g_new0 (GstD3D11DeinterlaceClassData, 1);
191 static GstD3D11DeinterlaceClassData *
192 gst_d3d11_deinterlace_class_data_ref (GstD3D11DeinterlaceClassData * data)
194 g_assert (data != NULL);
196 g_atomic_int_add (&data->ref_count, 1);
202 gst_d3d11_deinterlace_class_data_unref (GstD3D11DeinterlaceClassData * data)
204 g_assert (data != NULL);
206 if (g_atomic_int_dec_and_test (&data->ref_count)) {
207 gst_clear_caps (&data->sink_caps);
208 gst_clear_caps (&data->src_caps);
209 g_free (data->description);
221 PROP_SUPPORTED_METHODS,
224 /* hardcoded maximum queue size for each past/future frame queue */
225 #define MAX_NUM_REFERENCES 2
227 typedef struct _GstD3D11Deinterlace
229 GstBaseTransform parent;
231 GstVideoInfo in_info;
232 GstVideoInfo out_info;
233 /* Calculated buffer duration by using upstream framerate */
234 GstClockTime default_buffer_duration;
236 GstD3D11Device *device;
238 ID3D11VideoDevice *video_device;
239 ID3D11VideoContext *video_context;
240 ID3D11VideoProcessorEnumerator *video_enum;
241 ID3D11VideoProcessor *video_proc;
243 GstD3D11DeinterlaceMethod method;
246 GQueue past_frame_queue;
247 GQueue future_frame_queue;
248 GstBuffer *to_process;
250 guint max_past_frames;
251 guint max_future_frames;
253 /* D3D11_VIDEO_PROCESSOR_STREAM::InputFrameOrField */
256 /* Clear/Update per submit_input_buffer() */
257 guint num_output_per_input;
258 guint num_transformed;
259 gboolean first_output;
261 GstBufferPool *fallback_in_pool;
262 GstBufferPool *fallback_out_pool;
263 } GstD3D11Deinterlace;
265 typedef struct _GstD3D11DeinterlaceClass
267 GstBaseTransformClass parent_class;
273 GstD3D11DeinterlaceDeviceCaps device_caps;
274 } GstD3D11DeinterlaceClass;
276 static GstElementClass *parent_class = NULL;
278 #define GST_D3D11_DEINTERLACE(object) ((GstD3D11Deinterlace *) (object))
279 #define GST_D3D11_DEINTERLACE_GET_CLASS(object) \
280 (G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object), \
281 GstD3D11DeinterlaceClass))
282 #define GST_D3D11_DEINTERLACE_LOCK(self) \
283 g_rec_mutex_lock (&GST_D3D11_DEINTERLACE (self)->lock);
284 #define GST_D3D11_DEINTERLACE_UNLOCK(self) \
285 g_rec_mutex_unlock (&GST_D3D11_DEINTERLACE (self)->lock);
288 gst_d3d11_deinterlace_update_method (GstD3D11Deinterlace * self);
289 static void gst_d3d11_deinterlace_reset (GstD3D11Deinterlace * self);
290 static GstFlowReturn gst_d3d11_deinterlace_drain (GstD3D11Deinterlace * self);
292 /* GObjectClass vfunc */
293 static void gst_d3d11_deinterlace_get_property (GObject * object,
294 guint prop_id, GValue * value, GParamSpec * pspec);
295 static void gst_d3d11_deinterlace_set_property (GObject * object, guint prop_id,
296 const GValue * value, GParamSpec * pspec);
297 static void gst_d3d11_deinterlace_finalize (GObject * object);
299 /* GstElementClass vfunc */
300 static void gst_d3d11_deinterlace_set_context (GstElement * element,
301 GstContext * context);
303 /* GstBaseTransformClass vfunc */
304 static gboolean gst_d3d11_deinterlace_start (GstBaseTransform * trans);
305 static gboolean gst_d3d11_deinterlace_stop (GstBaseTransform * trans);
306 static gboolean gst_d3d11_deinterlace_query (GstBaseTransform * trans,
307 GstPadDirection direction, GstQuery * query);
308 static GstCaps *gst_d3d11_deinterlace_transform_caps (GstBaseTransform * trans,
309 GstPadDirection direction, GstCaps * caps, GstCaps * filter);
310 static GstCaps *gst_d3d11_deinterlace_fixate_caps (GstBaseTransform * trans,
311 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps);
313 gst_d3d11_deinterlace_propose_allocation (GstBaseTransform * trans,
314 GstQuery * decide_query, GstQuery * query);
316 gst_d3d11_deinterlace_decide_allocation (GstBaseTransform * trans,
318 static gboolean gst_d3d11_deinterlace_set_caps (GstBaseTransform * trans,
319 GstCaps * incaps, GstCaps * outcaps);
321 gst_d3d11_deinterlace_submit_input_buffer (GstBaseTransform * trans,
322 gboolean is_discont, GstBuffer * input);
324 gst_d3d11_deinterlace_generate_output (GstBaseTransform * trans,
325 GstBuffer ** outbuf);
327 gst_d3d11_deinterlace_transform (GstBaseTransform * trans, GstBuffer * inbuf,
329 static gboolean gst_d3d11_deinterlace_sink_event (GstBaseTransform * trans,
333 gst_d3d11_deinterlace_class_init (GstD3D11DeinterlaceClass * klass,
336 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
337 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
338 GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
339 GstD3D11DeinterlaceClassData *cdata = (GstD3D11DeinterlaceClassData *) data;
342 parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
344 gobject_class->get_property = gst_d3d11_deinterlace_get_property;
345 gobject_class->set_property = gst_d3d11_deinterlace_set_property;
346 gobject_class->finalize = gst_d3d11_deinterlace_finalize;
348 g_object_class_install_property (gobject_class, PROP_ADAPTER,
349 g_param_spec_uint ("adapter", "Adapter",
350 "DXGI Adapter index for creating device",
351 0, G_MAXUINT32, cdata->adapter,
352 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
353 g_object_class_install_property (gobject_class, PROP_DEVICE_ID,
354 g_param_spec_uint ("device-id", "Device Id",
355 "DXGI Device ID", 0, G_MAXUINT32, 0,
356 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
357 g_object_class_install_property (gobject_class, PROP_VENDOR_ID,
358 g_param_spec_uint ("vendor-id", "Vendor Id",
359 "DXGI Vendor ID", 0, G_MAXUINT32, 0,
360 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
361 g_object_class_install_property (gobject_class, PROP_METHOD,
362 g_param_spec_flags ("method", "Method",
363 "Deinterlace Method. Use can set multiple methods as a flagset "
364 "and element will select one of method automatically. "
365 "If deinterlacing device failed to deinterlace with given mode, "
366 "fallback might happen by the device",
367 GST_TYPE_D3D11_DEINTERLACE_METHOD, cdata->device_caps.default_method,
368 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
369 GST_PARAM_MUTABLE_READY)));
370 g_object_class_install_property (gobject_class, PROP_SUPPORTED_METHODS,
371 g_param_spec_flags ("supported-methods", "Supported Methods",
372 "Set of supported deinterlace methods by device",
373 GST_TYPE_D3D11_DEINTERLACE_METHOD,
374 cdata->device_caps.supported_methods,
375 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
377 element_class->set_context =
378 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_set_context);
380 long_name = g_strdup_printf ("Direct3D11 %s Deinterlacer",
382 gst_element_class_set_metadata (element_class, long_name,
383 "Filter/Effect/Video/Deinterlace/Hardware",
384 "A Direct3D11 based deinterlacer",
385 "Seungha Yang <seungha@centricular.com>");
388 gst_element_class_add_pad_template (element_class,
389 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
391 gst_element_class_add_pad_template (element_class,
392 gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
395 trans_class->passthrough_on_same_caps = TRUE;
397 trans_class->start = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_start);
398 trans_class->stop = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_stop);
399 trans_class->query = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_query);
400 trans_class->transform_caps =
401 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_transform_caps);
402 trans_class->fixate_caps =
403 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_fixate_caps);
404 trans_class->propose_allocation =
405 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_propose_allocation);
406 trans_class->decide_allocation =
407 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_decide_allocation);
408 trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_set_caps);
409 trans_class->submit_input_buffer =
410 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_submit_input_buffer);
411 trans_class->generate_output =
412 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_generate_output);
413 trans_class->transform = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_transform);
414 trans_class->sink_event =
415 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_sink_event);
417 klass->adapter = cdata->adapter;
418 klass->device_id = cdata->device_id;
419 klass->vendor_id = cdata->vendor_id;
420 klass->device_caps = cdata->device_caps;
422 gst_d3d11_deinterlace_class_data_unref (cdata);
424 gst_type_mark_as_plugin_api (GST_TYPE_D3D11_DEINTERLACE_METHOD,
425 (GstPluginAPIFlags) 0);
429 gst_d3d11_deinterlace_init (GstD3D11Deinterlace * self)
431 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
433 self->method = klass->device_caps.default_method;
434 self->default_buffer_duration = GST_CLOCK_TIME_NONE;
435 gst_d3d11_deinterlace_update_method (self);
437 g_queue_init (&self->past_frame_queue);
438 g_queue_init (&self->future_frame_queue);
439 g_rec_mutex_init (&self->lock);
443 gst_d3d11_deinterlace_get_property (GObject * object, guint prop_id,
444 GValue * value, GParamSpec * pspec)
446 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (object);
447 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (object);
451 g_value_set_uint (value, klass->adapter);
454 g_value_set_uint (value, klass->device_id);
457 g_value_set_uint (value, klass->vendor_id);
460 g_value_set_flags (value, self->method);
462 case PROP_SUPPORTED_METHODS:
463 g_value_set_flags (value, klass->device_caps.supported_methods);
466 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
472 gst_d3d11_deinterlace_update_method (GstD3D11Deinterlace * self)
474 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
475 GstD3D11DeinterlaceMethod requested_method = self->method;
476 gboolean updated = TRUE;
478 /* Verify whether requested method is supported */
479 if ((self->method & klass->device_caps.supported_methods) == 0) {
480 #ifndef GST_DISABLE_GST_DEBUG
481 gchar *supported, *requested;
483 supported = g_flags_to_string (GST_TYPE_D3D11_DEINTERLACE_METHOD,
484 klass->device_caps.supported_methods);
485 requested = g_flags_to_string (GST_TYPE_D3D11_DEINTERLACE_METHOD,
486 klass->device_caps.supported_methods);
488 GST_WARNING_OBJECT (self,
489 "Requested method %s is not supported (supported: %s)",
490 requested, supported);
496 self->method = klass->device_caps.default_method;
501 /* Drop not supported methods */
502 self->method = (GstD3D11DeinterlaceMethod)
503 (klass->device_caps.supported_methods & self->method);
505 /* Single method was requested? */
506 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BLEND ||
507 self->method == GST_D3D11_DEINTERLACE_METHOD_BOB ||
508 self->method == GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE ||
509 self->method == GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION) {
510 if (self->method == requested_method)
513 /* Pick single method from requested */
514 if ((self->method & GST_D3D11_DEINTERLACE_METHOD_BOB) ==
515 GST_D3D11_DEINTERLACE_METHOD_BOB) {
516 self->method = GST_D3D11_DEINTERLACE_METHOD_BOB;
517 } else if ((self->method & GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE) ==
518 GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE) {
519 self->method = GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE;
520 } else if ((self->method & GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION)
521 == GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION) {
522 self->method = GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION;
523 } else if ((self->method & GST_D3D11_DEINTERLACE_METHOD_BLEND) ==
524 GST_D3D11_DEINTERLACE_METHOD_BLEND) {
525 self->method = GST_D3D11_DEINTERLACE_METHOD_BLEND;
527 self->method = klass->device_caps.default_method;
528 g_assert_not_reached ();
533 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BLEND) {
534 /* Both methods don't use reference frame for deinterlacing */
535 self->max_past_frames = self->max_future_frames = 0;
536 } else if (self->method == GST_D3D11_DEINTERLACE_METHOD_BOB) {
537 /* To calculate timestamp and duration of output fraems, we will hold one
538 * future frame even though processor device will not use reference */
539 self->max_past_frames = 0;
540 self->max_future_frames = 1;
542 /* FIXME: how many frames should be allowed? also, this needs to be
544 self->max_past_frames = MIN (klass->device_caps.max_past_frames,
547 /* Likewise Bob, we need at least one future frame for timestamp/duration
549 self->max_future_frames =
550 MAX (MIN (klass->device_caps.max_future_frames, MAX_NUM_REFERENCES), 1);
557 gst_d3d11_deinterlace_set_property (GObject * object, guint prop_id,
558 const GValue * value, GParamSpec * pspec)
560 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (object);
564 gboolean notify_update = FALSE;
566 GST_OBJECT_LOCK (self);
567 self->method = (GstD3D11DeinterlaceMethod) g_value_get_flags (value);
568 notify_update = gst_d3d11_deinterlace_update_method (self);
569 GST_OBJECT_UNLOCK (self);
572 g_object_notify (object, "method");
576 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
582 gst_d3d11_deinterlace_finalize (GObject * object)
584 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (object);
586 g_rec_mutex_clear (&self->lock);
588 G_OBJECT_CLASS (parent_class)->finalize (object);
592 gst_d3d11_deinterlace_set_context (GstElement * element, GstContext * context)
594 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (element);
595 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
597 gst_d3d11_handle_set_context (element, context, klass->adapter,
600 GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
604 gst_d3d11_deinterlace_open (GstD3D11Deinterlace * self)
606 ID3D11VideoDevice *video_device;
607 ID3D11VideoContext *video_context;
609 video_device = gst_d3d11_device_get_video_device_handle (self->device);
611 GST_ERROR_OBJECT (self, "ID3D11VideoDevice is not availale");
615 video_context = gst_d3d11_device_get_video_context_handle (self->device);
616 if (!video_context) {
617 GST_ERROR_OBJECT (self, "ID3D11VideoContext is not available");
621 self->video_device = video_device;
622 video_device->AddRef ();
624 self->video_context = video_context;
625 video_context->AddRef ();
630 /* Must be called with lock taken */
632 gst_d3d11_deinterlace_reset_history (GstD3D11Deinterlace * self)
634 self->input_index = 0;
635 self->num_output_per_input = 1;
636 self->num_transformed = 0;
637 self->first_output = TRUE;
639 g_queue_clear_full (&self->past_frame_queue,
640 (GDestroyNotify) gst_buffer_unref);
641 g_queue_clear_full (&self->future_frame_queue,
642 (GDestroyNotify) gst_buffer_unref);
643 gst_clear_buffer (&self->to_process);
647 gst_d3d11_deinterlace_reset (GstD3D11Deinterlace * self)
649 GST_D3D11_DEINTERLACE_LOCK (self);
650 if (self->fallback_in_pool) {
651 gst_buffer_pool_set_active (self->fallback_in_pool, FALSE);
652 gst_object_unref (self->fallback_in_pool);
653 self->fallback_in_pool = NULL;
656 if (self->fallback_out_pool) {
657 gst_buffer_pool_set_active (self->fallback_out_pool, FALSE);
658 gst_object_unref (self->fallback_out_pool);
659 self->fallback_out_pool = NULL;
662 GST_D3D11_CLEAR_COM (self->video_enum);
663 GST_D3D11_CLEAR_COM (self->video_proc);
665 gst_d3d11_deinterlace_reset_history (self);
666 self->default_buffer_duration = GST_CLOCK_TIME_NONE;
668 GST_D3D11_DEINTERLACE_UNLOCK (self);
672 gst_d3d11_deinterlace_close (GstD3D11Deinterlace * self)
674 gst_d3d11_deinterlace_reset (self);
676 GST_D3D11_CLEAR_COM (self->video_device);
677 GST_D3D11_CLEAR_COM (self->video_context);
679 gst_clear_object (&self->device);
683 gst_d3d11_deinterlace_start (GstBaseTransform * trans)
685 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
686 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
688 if (!gst_d3d11_ensure_element_data (GST_ELEMENT_CAST (self), klass->adapter,
690 GST_ERROR_OBJECT (self, "Couldn't create d3d11device");
694 if (!gst_d3d11_deinterlace_open (self)) {
695 GST_ERROR_OBJECT (self, "Couldn't open video device");
696 gst_d3d11_deinterlace_close (self);
704 gst_d3d11_deinterlace_stop (GstBaseTransform * trans)
706 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
708 gst_d3d11_deinterlace_close (self);
714 gst_d3d11_deinterlace_query (GstBaseTransform * trans,
715 GstPadDirection direction, GstQuery * query)
717 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
719 switch (GST_QUERY_TYPE (query)) {
720 case GST_QUERY_CONTEXT:
721 if (gst_d3d11_handle_context_query (GST_ELEMENT_CAST (self),
722 query, self->device)) {
730 return GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction,
735 gst_d3d11_deinterlace_remove_interlace_info (GstCaps * caps,
736 gboolean remove_framerate)
742 GstCapsFeatures *feature =
743 gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY);
745 res = gst_caps_new_empty ();
747 n = gst_caps_get_size (caps);
748 for (i = 0; i < n; i++) {
749 st = gst_caps_get_structure (caps, i);
750 f = gst_caps_get_features (caps, i);
752 /* If this is already expressed by the existing caps
753 * skip this structure */
754 if (i > 0 && gst_caps_is_subset_structure_full (res, st, f))
757 st = gst_structure_copy (st);
758 /* Only remove format info for the cases when we can actually convert */
759 if (!gst_caps_features_is_any (f)
760 && gst_caps_features_is_equal (f, feature)) {
761 if (remove_framerate) {
762 gst_structure_remove_fields (st, "interlace-mode", "field-order",
765 gst_structure_remove_fields (st, "interlace-mode", "field-order", NULL);
769 gst_caps_append_structure_full (res, st, gst_caps_features_copy (f));
772 gst_caps_features_free (feature);
778 gst_d3d11_deinterlace_transform_caps (GstBaseTransform * trans,
779 GstPadDirection direction, GstCaps * caps, GstCaps * filter)
781 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
785 /* Get all possible caps that we can transform to */
786 tmp = gst_d3d11_deinterlace_remove_interlace_info (caps,
787 /* Non-blend mode will double framerate */
788 self->method != GST_D3D11_DEINTERLACE_METHOD_BLEND);
791 tmp2 = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
792 gst_caps_unref (tmp);
798 GST_DEBUG_OBJECT (trans, "transformed %" GST_PTR_FORMAT " into %"
799 GST_PTR_FORMAT, caps, result);
805 gst_d3d11_deinterlace_fixate_caps (GstBaseTransform * trans,
806 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
808 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
813 const gchar *interlace_mode;
815 othercaps = gst_caps_truncate (othercaps);
816 othercaps = gst_caps_make_writable (othercaps);
818 if (direction == GST_PAD_SRC)
819 return gst_caps_fixate (othercaps);
821 tmp = gst_caps_copy (caps);
822 tmp = gst_caps_fixate (tmp);
824 if (!gst_video_info_from_caps (&info, tmp)) {
825 GST_WARNING_OBJECT (self, "Invalid caps %" GST_PTR_FORMAT, caps);
826 gst_caps_unref (tmp);
828 return gst_caps_fixate (othercaps);
831 s = gst_caps_get_structure (tmp, 0);
832 if (gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d)) {
833 /* for non-blend method, output framerate will be doubled */
834 if (self->method != GST_D3D11_DEINTERLACE_METHOD_BLEND &&
835 GST_VIDEO_INFO_IS_INTERLACED (&info)) {
839 gst_caps_set_simple (othercaps,
840 "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
843 interlace_mode = gst_structure_get_string (s, "interlace-mode");
844 if (g_strcmp0 ("progressive", interlace_mode) == 0) {
845 /* Just forward interlace-mode=progressive.
846 * By this way, basetransform will enable passthrough for non-interlaced
848 gst_caps_set_simple (othercaps,
849 "interlace-mode", G_TYPE_STRING, "progressive", NULL);
852 gst_caps_unref (tmp);
854 return gst_caps_fixate (othercaps);
858 gst_d3d11_deinterlace_propose_allocation (GstBaseTransform * trans,
859 GstQuery * decide_query, GstQuery * query)
861 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
863 GstBufferPool *pool = NULL;
866 GstStructure *config;
868 GstD3D11AllocationParams *d3d11_params;
869 guint min_buffers = 0;
871 if (!GST_BASE_TRANSFORM_CLASS (parent_class)->propose_allocation (trans,
872 decide_query, query))
875 /* passthrough, we're done */
876 if (decide_query == NULL)
879 gst_query_parse_allocation (query, &caps, NULL);
884 if (!gst_video_info_from_caps (&info, caps))
887 n_pools = gst_query_get_n_allocation_pools (query);
888 for (i = 0; i < n_pools; i++) {
889 gst_query_parse_nth_allocation_pool (query, i, &pool, NULL, NULL, NULL);
890 if (!GST_IS_D3D11_BUFFER_POOL (pool)) {
891 gst_object_unref (pool);
897 pool = gst_d3d11_buffer_pool_new (self->device);
899 config = gst_buffer_pool_get_config (pool);
900 gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
902 d3d11_params = gst_buffer_pool_config_get_d3d11_allocation_params (config);
904 d3d11_params = gst_d3d11_allocation_params_new (self->device, &info,
905 (GstD3D11AllocationFlags) 0, D3D11_BIND_RENDER_TARGET);
907 d3d11_params->desc[0].BindFlags |= D3D11_BIND_RENDER_TARGET;
910 gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params);
911 gst_d3d11_allocation_params_free (d3d11_params);
913 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BOB) {
914 /* For non-blend methods, we will produce two progressive frames from
915 * a single interlaced frame. To determine timestamp and duration,
916 * we might need to hold one past frame if buffer duration is unknown */
918 } else if (self->method == GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE ||
919 self->method == GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION) {
920 /* For advanced deinterlacing methods, we will hold more frame so that
921 * device can use them as reference frames */
923 min_buffers += self->max_past_frames;
924 min_buffers += self->max_future_frames;
925 /* And one for current frame */
928 /* we will hold at least one frame for timestamp/duration calculation */
929 min_buffers = MAX (min_buffers, 2);
932 /* size will be updated by d3d11 buffer pool */
933 gst_buffer_pool_config_set_params (config, caps, 0, min_buffers, 0);
935 if (!gst_buffer_pool_set_config (pool, config))
938 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
939 gst_query_add_allocation_meta (query,
940 GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL);
942 size = GST_D3D11_BUFFER_POOL (pool)->buffer_size;
943 gst_query_add_allocation_pool (query, pool, size, min_buffers, 0);
945 gst_object_unref (pool);
952 GST_ERROR_OBJECT (self, "failed to set config");
953 gst_object_unref (pool);
959 gst_d3d11_deinterlace_decide_allocation (GstBaseTransform * trans,
962 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
963 GstCaps *outcaps = NULL;
964 GstBufferPool *pool = NULL;
965 guint size, min = 0, max = 0;
966 GstStructure *config;
967 GstD3D11AllocationParams *d3d11_params;
968 gboolean update_pool = FALSE;
971 gst_query_parse_allocation (query, &outcaps, NULL);
976 if (!gst_video_info_from_caps (&info, outcaps))
979 size = GST_VIDEO_INFO_SIZE (&info);
981 if (gst_query_get_n_allocation_pools (query) > 0) {
982 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
983 if (pool && !GST_IS_D3D11_BUFFER_POOL (pool)) {
984 gst_object_unref (pool);
992 pool = gst_d3d11_buffer_pool_new (self->device);
994 config = gst_buffer_pool_get_config (pool);
995 gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
997 d3d11_params = gst_buffer_pool_config_get_d3d11_allocation_params (config);
999 d3d11_params = gst_d3d11_allocation_params_new (self->device, &info,
1000 (GstD3D11AllocationFlags) 0, D3D11_BIND_RENDER_TARGET);
1002 d3d11_params->desc[0].BindFlags |= D3D11_BIND_RENDER_TARGET;
1005 gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params);
1006 gst_d3d11_allocation_params_free (d3d11_params);
1008 gst_buffer_pool_config_set_params (config, outcaps, size, min, max);
1009 gst_buffer_pool_set_config (pool, config);
1011 size = GST_D3D11_BUFFER_POOL (pool)->buffer_size;
1014 gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
1016 gst_query_add_allocation_pool (query, pool, size, min, max);
1018 gst_object_unref (pool);
1020 return GST_BASE_TRANSFORM_CLASS (parent_class)->decide_allocation (trans,
1025 gst_d3d11_deinterlace_prepare_fallback_pool (GstD3D11Deinterlace * self,
1026 GstCaps * in_caps, GstVideoInfo * in_info, GstCaps * out_caps,
1027 GstVideoInfo * out_info)
1029 GstD3D11AllocationParams *d3d11_params;
1031 /* Clearing potentially remaining resource here would be redundant.
1032 * Just to be safe enough */
1033 g_queue_clear_full (&self->past_frame_queue,
1034 (GDestroyNotify) gst_buffer_unref);
1035 g_queue_clear_full (&self->future_frame_queue,
1036 (GDestroyNotify) gst_buffer_unref);
1038 if (self->fallback_in_pool) {
1039 gst_buffer_pool_set_active (self->fallback_in_pool, FALSE);
1040 gst_object_unref (self->fallback_in_pool);
1041 self->fallback_in_pool = NULL;
1044 if (self->fallback_out_pool) {
1045 gst_buffer_pool_set_active (self->fallback_out_pool, FALSE);
1046 gst_object_unref (self->fallback_out_pool);
1047 self->fallback_out_pool = NULL;
1050 /* Empty bind flag is allowed for video processor input */
1051 d3d11_params = gst_d3d11_allocation_params_new (self->device, in_info,
1052 (GstD3D11AllocationFlags) 0, 0);
1053 self->fallback_in_pool = gst_d3d11_buffer_pool_new_with_options (self->device,
1054 in_caps, d3d11_params, 0, 0);
1055 gst_d3d11_allocation_params_free (d3d11_params);
1057 if (!self->fallback_in_pool) {
1058 GST_ERROR_OBJECT (self, "Failed to create input fallback buffer pool");
1062 /* For processor output, render target bind flag is required */
1063 d3d11_params = gst_d3d11_allocation_params_new (self->device, out_info,
1064 (GstD3D11AllocationFlags) 0, D3D11_BIND_RENDER_TARGET);
1065 self->fallback_out_pool =
1066 gst_d3d11_buffer_pool_new_with_options (self->device,
1067 out_caps, d3d11_params, 0, 0);
1068 gst_d3d11_allocation_params_free (d3d11_params);
1070 if (!self->fallback_out_pool) {
1071 GST_ERROR_OBJECT (self, "Failed to create output fallback buffer pool");
1072 gst_clear_object (&self->fallback_out_pool);
1080 gst_d3d11_deinterlace_set_caps (GstBaseTransform * trans,
1081 GstCaps * incaps, GstCaps * outcaps)
1083 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1084 GstVideoInfo in_info, out_info;
1086 ComPtr<ID3D11VideoProcessorEnumerator> video_enum;
1087 ComPtr<ID3D11VideoProcessor> video_proc;
1089 D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc;
1090 D3D11_VIDEO_PROCESSOR_CAPS proc_caps;
1091 D3D11_VIDEO_PROCESSOR_RATE_CONVERSION_CAPS rate_conv_caps;
1092 D3D11_VIDEO_PROCESSOR_OUTPUT_RATE output_rate =
1093 D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL;
1098 if (gst_base_transform_is_passthrough (trans))
1101 if (!gst_video_info_from_caps (&in_info, incaps)) {
1102 GST_ERROR_OBJECT (self, "Invalid input caps %" GST_PTR_FORMAT, incaps);
1106 if (!gst_video_info_from_caps (&out_info, outcaps)) {
1107 GST_ERROR_OBJECT (self, "Invalid output caps %" GST_PTR_FORMAT, outcaps);
1111 self->in_info = in_info;
1112 self->out_info = out_info;
1114 /* Calculate expected buffer duration. We might need to reference this value
1115 * when buffer duration is unknown */
1116 if (GST_VIDEO_INFO_FPS_N (&in_info) > 0 &&
1117 GST_VIDEO_INFO_FPS_D (&in_info) > 0) {
1118 self->default_buffer_duration =
1119 gst_util_uint64_scale_int (GST_SECOND, GST_VIDEO_INFO_FPS_D (&in_info),
1120 GST_VIDEO_INFO_FPS_N (&in_info));
1122 /* Assume 25 fps. We need this for reporting latency at least */
1123 self->default_buffer_duration =
1124 gst_util_uint64_scale_int (GST_SECOND, 1, 25);
1127 gst_d3d11_deinterlace_reset (self);
1130 if (!GST_VIDEO_INFO_IS_INTERLACED (&in_info)) {
1131 gst_base_transform_set_passthrough (trans, TRUE);
1136 /* TFF or BFF is not important here, this is just for enumerating
1137 * available deinterlace devices */
1138 memset (&desc, 0, sizeof (D3D11_VIDEO_PROCESSOR_CONTENT_DESC));
1140 desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
1141 if (GST_VIDEO_INFO_FIELD_ORDER (&in_info) ==
1142 GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST)
1143 desc.InputFrameFormat =
1144 D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
1145 desc.InputWidth = GST_VIDEO_INFO_WIDTH (&in_info);
1146 desc.InputHeight = GST_VIDEO_INFO_HEIGHT (&in_info);
1147 desc.OutputWidth = GST_VIDEO_INFO_WIDTH (&out_info);
1148 desc.OutputHeight = GST_VIDEO_INFO_HEIGHT (&out_info);
1149 desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
1151 hr = self->video_device->CreateVideoProcessorEnumerator (&desc, &video_enum);
1152 if (!gst_d3d11_result (hr, self->device)) {
1153 GST_ERROR_OBJECT (self, "Couldn't create VideoProcessorEnumerator");
1157 hr = video_enum->GetVideoProcessorCaps (&proc_caps);
1158 if (!gst_d3d11_result (hr, self->device)) {
1159 GST_ERROR_OBJECT (self, "Couldn't query processor caps");
1163 /* Shouldn't happen, we checked this already during plugin_init */
1164 if (proc_caps.RateConversionCapsCount == 0) {
1165 GST_ERROR_OBJECT (self, "Deinterlacing is not supported");
1169 for (i = 0; i < proc_caps.RateConversionCapsCount; i++) {
1170 hr = video_enum->GetVideoProcessorRateConversionCaps (i, &rate_conv_caps);
1174 if ((rate_conv_caps.ProcessorCaps & self->method) == self->method)
1178 if (i >= proc_caps.RateConversionCapsCount) {
1179 GST_ERROR_OBJECT (self, "Deinterlacing method 0x%x is not supported",
1184 hr = self->video_device->CreateVideoProcessor (video_enum.Get (),
1186 if (!gst_d3d11_result (hr, self->device)) {
1187 GST_ERROR_OBJECT (self, "Couldn't create processor");
1191 if (!gst_d3d11_deinterlace_prepare_fallback_pool (self, incaps, &in_info,
1192 outcaps, &out_info)) {
1193 GST_ERROR_OBJECT (self, "Couldn't prepare fallback buffer pool");
1197 self->video_enum = video_enum.Detach ();
1198 self->video_proc = video_proc.Detach ();
1202 rect.right = GST_VIDEO_INFO_WIDTH (&self->in_info);
1203 rect.bottom = GST_VIDEO_INFO_HEIGHT (&self->in_info);
1205 /* Blending seems to be considered as half rate. See also
1206 * https://docs.microsoft.com/en-us/windows/win32/api/d3d12video/ns-d3d12video-d3d12_video_process_input_stream_rate */
1207 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BLEND)
1208 output_rate = D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_HALF;
1210 gst_d3d11_device_lock (self->device);
1211 self->video_context->VideoProcessorSetStreamSourceRect (self->video_proc,
1213 self->video_context->VideoProcessorSetStreamDestRect (self->video_proc,
1215 self->video_context->VideoProcessorSetOutputTargetRect (self->video_proc,
1217 self->video_context->
1218 VideoProcessorSetStreamAutoProcessingMode (self->video_proc, 0, FALSE);
1219 self->video_context->VideoProcessorSetStreamOutputRate (self->video_proc, 0,
1220 output_rate, TRUE, NULL);
1221 gst_d3d11_device_unlock (self->device);
1226 static ID3D11VideoProcessorInputView *
1227 gst_d3d11_deinterace_get_piv_from_buffer (GstD3D11Deinterlace * self,
1231 GstD3D11Memory *dmem;
1232 ID3D11VideoProcessorInputView *piv;
1234 if (gst_buffer_n_memory (buffer) != 1) {
1235 GST_WARNING_OBJECT (self, "Input buffer has more than one memory");
1239 mem = gst_buffer_peek_memory (buffer, 0);
1240 if (!gst_is_d3d11_memory (mem)) {
1241 GST_WARNING_OBJECT (self, "Input buffer is holding non-D3D11 memory");
1245 dmem = (GstD3D11Memory *) mem;
1246 if (dmem->device != self->device) {
1247 GST_WARNING_OBJECT (self,
1248 "Input D3D11 memory was allocated by other device");
1252 piv = gst_d3d11_memory_get_processor_input_view (dmem,
1253 self->video_device, self->video_enum);
1255 GST_WARNING_OBJECT (self, "ID3D11VideoProcessorInputView is unavailable");
1263 gst_d3d11_deinterlace_ensure_input_buffer (GstD3D11Deinterlace * self,
1266 GstD3D11Memory *dmem;
1267 ID3D11VideoProcessorInputView *piv;
1268 GstBuffer *new_buf = NULL;
1273 piv = gst_d3d11_deinterace_get_piv_from_buffer (self, input);
1277 if (!self->fallback_in_pool ||
1278 !gst_buffer_pool_set_active (self->fallback_in_pool, TRUE) ||
1279 gst_buffer_pool_acquire_buffer (self->fallback_in_pool, &new_buf,
1280 NULL) != GST_FLOW_OK) {
1281 GST_ERROR_OBJECT (self, "Fallback input buffer is unavailable");
1282 gst_buffer_unref (input);
1287 if (!gst_d3d11_buffer_copy_into (new_buf, input, &self->in_info)) {
1288 GST_ERROR_OBJECT (self, "Couldn't copy input buffer to fallback buffer");
1289 gst_buffer_unref (new_buf);
1290 gst_buffer_unref (input);
1295 dmem = (GstD3D11Memory *) gst_buffer_peek_memory (new_buf, 0);
1296 piv = gst_d3d11_memory_get_processor_input_view (dmem,
1297 self->video_device, self->video_enum);
1299 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorInputView is unavailable");
1300 gst_buffer_unref (new_buf);
1301 gst_buffer_unref (input);
1306 /* copy metadata, default implemenation of baseclass will copy everything
1308 GST_BASE_TRANSFORM_CLASS (parent_class)->copy_metadata
1309 (GST_BASE_TRANSFORM_CAST (self), input, new_buf);
1311 gst_buffer_unref (input);
1316 static GstFlowReturn
1317 gst_d3d11_deinterlace_submit_future_frame (GstD3D11Deinterlace * self,
1320 GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (self);
1323 /* push tail and pop head, so that head frame can be the nearest frame
1324 * of current frame */
1326 g_queue_push_tail (&self->future_frame_queue, buffer);
1328 len = g_queue_get_length (&self->future_frame_queue);
1330 g_assert (len <= self->max_future_frames + 1);
1332 if (self->to_process) {
1333 GST_WARNING_OBJECT (self, "Found uncleared processing buffer");
1334 gst_clear_buffer (&self->to_process);
1337 if (len > self->max_future_frames ||
1338 /* NULL means drain */
1339 (buffer == NULL && len > 0)) {
1340 GstClockTime cur_timestmap = GST_CLOCK_TIME_NONE;
1341 GstClockTime duration = GST_CLOCK_TIME_NONE;
1342 GstBuffer *next_buf;
1345 (GstBuffer *) g_queue_pop_head (&self->future_frame_queue);
1347 /* For non-blend methods, we will produce two frames from a single
1348 * interlaced frame. So, sufficiently correct buffer duration is required
1349 * to set timestamp for the second output frame */
1350 if (self->method != GST_D3D11_DEINTERLACE_METHOD_BLEND) {
1351 if (GST_BUFFER_PTS_IS_VALID (self->to_process)) {
1352 cur_timestmap = GST_BUFFER_PTS (self->to_process);
1354 cur_timestmap = GST_BUFFER_DTS (self->to_process);
1357 /* Ensure buffer duration */
1358 next_buf = (GstBuffer *) g_queue_peek_head (&self->future_frame_queue);
1359 if (next_buf && GST_CLOCK_STIME_IS_VALID (cur_timestmap)) {
1360 GstClockTime next_timestamp;
1362 if (GST_BUFFER_PTS_IS_VALID (next_buf)) {
1363 next_timestamp = GST_BUFFER_PTS (next_buf);
1365 next_timestamp = GST_BUFFER_DTS (next_buf);
1368 if (GST_CLOCK_STIME_IS_VALID (next_timestamp)) {
1369 if (trans->segment.rate >= 0.0 && next_timestamp > cur_timestmap) {
1370 duration = next_timestamp - cur_timestmap;
1371 } else if (trans->segment.rate < 0.0
1372 && next_timestamp < cur_timestmap) {
1373 duration = cur_timestmap - next_timestamp;
1378 /* Make sure that we can update buffer duration safely */
1379 self->to_process = gst_buffer_make_writable (self->to_process);
1380 if (GST_CLOCK_TIME_IS_VALID (duration)) {
1381 GST_BUFFER_DURATION (self->to_process) = duration;
1383 GST_BUFFER_DURATION (self->to_process) = self->default_buffer_duration;
1386 /* Bonus points, DTS doesn't make sense for raw video frame */
1387 GST_BUFFER_PTS (self->to_process) = cur_timestmap;
1388 GST_BUFFER_DTS (self->to_process) = GST_CLOCK_TIME_NONE;
1390 /* And mark the number of output frames for this input frame */
1391 self->num_output_per_input = 2;
1393 self->num_output_per_input = 1;
1396 self->first_output = TRUE;
1402 static GstFlowReturn
1403 gst_d3d11_deinterlace_submit_input_buffer (GstBaseTransform * trans,
1404 gboolean is_discont, GstBuffer * input)
1406 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1410 /* Let baseclass handle QoS first */
1411 ret = GST_BASE_TRANSFORM_CLASS (parent_class)->submit_input_buffer (trans,
1413 if (ret != GST_FLOW_OK)
1416 if (gst_base_transform_is_passthrough (trans))
1419 /* at this moment, baseclass must hold queued_buf */
1420 g_assert (trans->queued_buf != NULL);
1422 /* Check if we can use this buffer directly. If not, copy this into
1423 * our fallback buffer */
1424 buf = trans->queued_buf;
1425 trans->queued_buf = NULL;
1427 buf = gst_d3d11_deinterlace_ensure_input_buffer (self, buf);
1429 GST_ERROR_OBJECT (self, "Invalid input buffer");
1430 return GST_FLOW_ERROR;
1433 return gst_d3d11_deinterlace_submit_future_frame (self, buf);
1436 static ID3D11VideoProcessorOutputView *
1437 gst_d3d11_deinterace_get_pov_from_buffer (GstD3D11Deinterlace * self,
1441 GstD3D11Memory *dmem;
1442 ID3D11VideoProcessorOutputView *pov;
1444 if (gst_buffer_n_memory (buffer) != 1) {
1445 GST_WARNING_OBJECT (self, "Output buffer has more than one memory");
1449 mem = gst_buffer_peek_memory (buffer, 0);
1450 if (!gst_is_d3d11_memory (mem)) {
1451 GST_WARNING_OBJECT (self, "Output buffer is holding non-D3D11 memory");
1455 dmem = (GstD3D11Memory *) mem;
1456 if (dmem->device != self->device) {
1457 GST_WARNING_OBJECT (self,
1458 "Output D3D11 memory was allocated by other device");
1462 pov = gst_d3d11_memory_get_processor_output_view (dmem,
1463 self->video_device, self->video_enum);
1465 GST_WARNING_OBJECT (self, "ID3D11VideoProcessorOutputView is unavailable");
1473 gst_d3d11_deinterlace_ensure_output_buffer (GstD3D11Deinterlace * self,
1476 GstD3D11Memory *dmem;
1477 ID3D11VideoProcessorOutputView *pov;
1478 GstBuffer *new_buf = NULL;
1480 pov = gst_d3d11_deinterace_get_pov_from_buffer (self, output);
1484 if (!self->fallback_out_pool ||
1485 !gst_buffer_pool_set_active (self->fallback_out_pool, TRUE) ||
1486 gst_buffer_pool_acquire_buffer (self->fallback_out_pool, &new_buf,
1487 NULL) != GST_FLOW_OK) {
1488 GST_ERROR_OBJECT (self, "Fallback output buffer is unavailable");
1489 gst_buffer_unref (output);
1494 dmem = (GstD3D11Memory *) gst_buffer_peek_memory (new_buf, 0);
1495 pov = gst_d3d11_memory_get_processor_output_view (dmem,
1496 self->video_device, self->video_enum);
1498 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorOutputView is unavailable");
1499 gst_buffer_unref (new_buf);
1500 gst_buffer_unref (output);
1505 /* copy metadata, default implemenation of baseclass will copy everything
1507 GST_BASE_TRANSFORM_CLASS (parent_class)->copy_metadata
1508 (GST_BASE_TRANSFORM_CAST (self), output, new_buf);
1510 gst_buffer_unref (output);
1515 static GstFlowReturn
1516 gst_d3d11_deinterlace_submit_past_frame (GstD3D11Deinterlace * self,
1519 /* push head and pop tail, so that head frame can be the nearest frame
1520 * of current frame */
1521 g_queue_push_head (&self->past_frame_queue, buffer);
1522 while (g_queue_get_length (&self->past_frame_queue) > self->max_past_frames) {
1523 GstBuffer *to_drop =
1524 (GstBuffer *) g_queue_pop_tail (&self->past_frame_queue);
1527 gst_buffer_unref (to_drop);
1533 static GstFlowReturn
1534 gst_d3d11_deinterlace_generate_output (GstBaseTransform * trans,
1535 GstBuffer ** outbuf)
1537 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1538 GstFlowReturn ret = GST_FLOW_OK;
1540 GstBuffer *buf = NULL;
1542 if (gst_base_transform_is_passthrough (trans)) {
1543 return GST_BASE_TRANSFORM_CLASS (parent_class)->generate_output (trans,
1548 inbuf = self->to_process;
1553 GST_BASE_TRANSFORM_CLASS (parent_class)->prepare_output_buffer (trans,
1556 if (ret != GST_FLOW_OK || !buf) {
1557 GST_WARNING_OBJECT (trans, "could not get buffer from pool: %s",
1558 gst_flow_get_name (ret));
1563 g_assert (inbuf != buf);
1565 buf = gst_d3d11_deinterlace_ensure_output_buffer (self, buf);
1567 GST_ERROR_OBJECT (self, "Failed to allocate output buffer to process");
1569 return GST_FLOW_ERROR;
1572 ret = gst_d3d11_deinterlace_transform (trans, inbuf, buf);
1573 if (ret != GST_FLOW_OK) {
1574 gst_buffer_unref (buf);
1578 g_assert (self->num_output_per_input == 1 || self->num_output_per_input == 2);
1580 /* Update timestamp and buffer duration.
1581 * Here, PTS and duration of inbuf must be valid,
1582 * unless there's programing error, since we updated timestamp and duration
1583 * already around submit_input_buffer() */
1584 if (self->num_output_per_input == 2) {
1585 if (!GST_BUFFER_DURATION_IS_VALID (inbuf)) {
1586 GST_LOG_OBJECT (self, "Input buffer duration is unknown");
1587 } else if (!GST_BUFFER_PTS_IS_VALID (inbuf)) {
1588 GST_LOG_OBJECT (self, "Input buffer timestamp is unknown");
1590 GstClockTime duration = GST_BUFFER_DURATION (inbuf) / 2;
1591 gboolean second_field = FALSE;
1593 if (self->first_output) {
1594 /* For reverse playback, first output is the second field */
1595 if (trans->segment.rate < 0)
1596 second_field = TRUE;
1598 second_field = FALSE;
1600 if (trans->segment.rate < 0)
1601 second_field = FALSE;
1603 second_field = TRUE;
1606 GST_BUFFER_DURATION (buf) = duration;
1608 GST_BUFFER_PTS (buf) = GST_BUFFER_PTS (buf) + duration;
1614 self->first_output = FALSE;
1615 self->num_transformed++;
1616 /* https://docs.microsoft.com/en-us/windows/win32/api/d3d12video/ns-d3d12video-d3d12_video_process_input_stream_rate */
1617 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BLEND) {
1618 self->input_index += 2;
1620 self->input_index++;
1623 if (self->num_output_per_input <= self->num_transformed) {
1624 /* Move processed frame to past_frame queue */
1625 gst_d3d11_deinterlace_submit_past_frame (self, self->to_process);
1626 self->to_process = NULL;
1632 static GstFlowReturn
1633 gst_d3d11_deinterlace_transform (GstBaseTransform * trans, GstBuffer * inbuf,
1636 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1637 ID3D11VideoProcessorInputView *piv;
1638 ID3D11VideoProcessorOutputView *pov;
1639 D3D11_VIDEO_FRAME_FORMAT frame_foramt = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
1640 D3D11_VIDEO_PROCESSOR_STREAM proc_stream = { 0, };
1641 ID3D11VideoProcessorInputView *future_surfaces[MAX_NUM_REFERENCES] =
1643 ID3D11VideoProcessorInputView *past_surfaces[MAX_NUM_REFERENCES] = { NULL, };
1644 guint future_frames = 0;
1645 guint past_frames = 0;
1649 /* Input/output buffer must be holding valid D3D11 memory here,
1650 * as we checked it already in submit_input_buffer() and generate_output() */
1651 piv = gst_d3d11_deinterace_get_piv_from_buffer (self, inbuf);
1653 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorInputView is unavailable");
1654 return GST_FLOW_ERROR;
1657 pov = gst_d3d11_deinterace_get_pov_from_buffer (self, outbuf);
1659 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorOutputView is unavailable");
1660 return GST_FLOW_ERROR;
1663 /* Check field order */
1664 if (GST_VIDEO_INFO_INTERLACE_MODE (&self->in_info) ==
1665 GST_VIDEO_INTERLACE_MODE_MIXED ||
1666 (GST_VIDEO_INFO_INTERLACE_MODE (&self->in_info) ==
1667 GST_VIDEO_INTERLACE_MODE_INTERLEAVED &&
1668 GST_VIDEO_INFO_FIELD_ORDER (&self->in_info) ==
1669 GST_VIDEO_FIELD_ORDER_UNKNOWN)) {
1670 if (!GST_BUFFER_FLAG_IS_SET (inbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED)) {
1671 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
1672 } else if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_VIDEO_BUFFER_FLAG_TFF)) {
1673 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
1675 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
1677 } else if (GST_VIDEO_INFO_FIELD_ORDER (&self->in_info) ==
1678 GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST) {
1679 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
1680 } else if (GST_VIDEO_INFO_FIELD_ORDER (&self->in_info) ==
1681 GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST) {
1682 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
1685 if (frame_foramt == D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE) {
1686 /* Progressive stream will produce only one frame per frame */
1687 self->num_output_per_input = 1;
1688 } else if (self->method != GST_D3D11_DEINTERLACE_METHOD_BLEND &&
1689 self->method != GST_D3D11_DEINTERLACE_METHOD_BOB) {
1690 /* Fill reference frames */
1691 for (i = 0; i < g_queue_get_length (&self->future_frame_queue) &&
1692 i < G_N_ELEMENTS (future_surfaces); i++) {
1693 GstBuffer *future_buf;
1694 ID3D11VideoProcessorInputView *future_piv;
1697 (GstBuffer *) g_queue_peek_nth (&self->future_frame_queue, i);
1698 future_piv = gst_d3d11_deinterace_get_piv_from_buffer (self, future_buf);
1700 GST_WARNING_OBJECT (self,
1701 "Couldn't get ID3D11VideoProcessorInputView from future "
1706 future_surfaces[i] = future_piv;
1710 for (i = 0; i < g_queue_get_length (&self->past_frame_queue) &&
1711 i < G_N_ELEMENTS (past_surfaces); i++) {
1712 GstBuffer *past_buf;
1713 ID3D11VideoProcessorInputView *past_piv;
1715 past_buf = (GstBuffer *) g_queue_peek_nth (&self->past_frame_queue, i);
1716 past_piv = gst_d3d11_deinterace_get_piv_from_buffer (self, past_buf);
1718 GST_WARNING_OBJECT (self,
1719 "Couldn't get ID3D11VideoProcessorInputView from past "
1724 past_surfaces[i] = past_piv;
1729 proc_stream.Enable = TRUE;
1730 proc_stream.pInputSurface = piv;
1731 proc_stream.InputFrameOrField = self->input_index;
1732 /* FIXME: This is wrong for inverse telechin case */
1733 /* OutputIndex == 0 for the first field, and 1 for the second field */
1734 if (self->num_output_per_input == 2) {
1735 if (trans->segment.rate < 0.0) {
1736 /* Process the second frame first in case of reverse playback */
1737 proc_stream.OutputIndex = self->first_output ? 1 : 0;
1739 proc_stream.OutputIndex = self->first_output ? 0 : 1;
1742 proc_stream.OutputIndex = 0;
1745 if (future_frames) {
1746 proc_stream.FutureFrames = future_frames;
1747 proc_stream.ppFutureSurfaces = future_surfaces;
1751 proc_stream.PastFrames = past_frames;
1752 proc_stream.ppPastSurfaces = past_surfaces;
1755 gst_d3d11_device_lock (self->device);
1756 self->video_context->VideoProcessorSetStreamFrameFormat (self->video_proc, 0,
1759 hr = self->video_context->VideoProcessorBlt (self->video_proc, pov, 0,
1761 gst_d3d11_device_unlock (self->device);
1763 if (!gst_d3d11_result (hr, self->device)) {
1764 GST_ERROR_OBJECT (self, "Failed to perform deinterlacing");
1765 return GST_FLOW_ERROR;
1772 gst_d3d11_deinterlace_sink_event (GstBaseTransform * trans, GstEvent * event)
1774 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1776 switch (GST_EVENT_TYPE (event)) {
1777 case GST_EVENT_STREAM_START:
1778 /* stream-start means discont stream from previous one. Drain pending
1780 GST_DEBUG_OBJECT (self, "Have stream-start, drain frames if any");
1781 gst_d3d11_deinterlace_drain (self);
1783 case GST_EVENT_CAPS:{
1784 GstPad *sinkpad = GST_BASE_TRANSFORM_SINK_PAD (trans);
1787 prev_caps = gst_pad_get_current_caps (sinkpad);
1790 gst_event_parse_caps (event, &caps);
1791 /* If caps is updated, drain pending frames */
1792 if (!gst_caps_is_equal (prev_caps, caps)) {
1793 GST_DEBUG_OBJECT (self, "Caps updated from %" GST_PTR_FORMAT " to %"
1794 GST_PTR_FORMAT, prev_caps, caps);
1795 gst_d3d11_deinterlace_drain (self);
1798 gst_caps_unref (prev_caps);
1802 case GST_EVENT_SEGMENT:
1803 /* new segment would mean that temporal discontinuity */
1804 case GST_EVENT_SEGMENT_DONE:
1806 GST_DEBUG_OBJECT (self, "Have event %s, drain frames if any",
1807 GST_EVENT_TYPE_NAME (event));
1808 gst_d3d11_deinterlace_drain (self);
1810 case GST_EVENT_FLUSH_STOP:
1811 GST_D3D11_DEINTERLACE_LOCK (self);
1812 gst_d3d11_deinterlace_reset_history (self);
1813 GST_D3D11_DEINTERLACE_UNLOCK (self);
1819 return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
1822 /* FIXME: might be job of basetransform */
1823 static GstFlowReturn
1824 gst_d3d11_deinterlace_drain (GstD3D11Deinterlace * self)
1826 GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (self);
1827 GstFlowReturn ret = GST_FLOW_OK;
1828 GstBuffer *outbuf = NULL;
1830 GST_D3D11_DEINTERLACE_LOCK (self);
1831 if (gst_base_transform_is_passthrough (trans)) {
1832 /* If we were passthrough, nothing to do */
1834 } else if (!g_queue_get_length (&self->future_frame_queue)) {
1835 /* No pending data, nothing to do */
1839 while (g_queue_get_length (&self->future_frame_queue)) {
1840 gst_d3d11_deinterlace_submit_future_frame (self, NULL);
1841 if (!self->to_process)
1847 ret = gst_d3d11_deinterlace_generate_output (trans, &outbuf);
1848 if (outbuf != NULL) {
1849 /* Release lock during push buffer */
1850 GST_D3D11_DEINTERLACE_UNLOCK (self);
1851 ret = gst_pad_push (trans->srcpad, outbuf);
1852 GST_D3D11_DEINTERLACE_LOCK (self);
1854 } while (ret == GST_FLOW_OK && outbuf != NULL);
1858 gst_d3d11_deinterlace_reset_history (self);
1859 GST_D3D11_DEINTERLACE_UNLOCK (self);
1865 * SECTION:element-d3d11deinterlace
1866 * @title: d3d11deinterlace
1867 * @short_description: A Direct3D11 based deinterlace element
1869 * Deinterlacing interlaced video frames to progressive video frames by using
1870 * ID3D11VideoProcessor API.
1872 * ## Example launch line
1874 * gst-launch-1.0 filesrc location=/path/to/h264/file ! parsebin ! d3d11h264dec ! d3d11deinterlace ! d3d11videosink
1881 /* GstD3D11DeinterlaceBin */
1892 PROP_BIN_SUPPORTED_METHODS,
1895 typedef struct _GstD3D11DeinterlaceBin
1902 GstElement *deinterlace;
1903 GstElement *in_convert;
1904 GstElement *out_convert;
1906 GstElement *download;
1907 } GstD3D11DeinterlaceBin;
1909 typedef struct _GstD3D11DeinterlaceBinClass
1911 GstBinClass parent_class;
1915 } GstD3D11DeinterlaceBinClass;
1917 static GstElementClass *bin_parent_class = NULL;
1918 #define GST_D3D11_DEINTERLACE_BIN(object) ((GstD3D11DeinterlaceBin *) (object))
1919 #define GST_D3D11_DEINTERLACE_BIN_GET_CLASS(object) \
1920 (G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object), \
1921 GstD3D11DeinterlaceBinClass))
1923 #define GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE(format) \
1925 "format = (string) " format ", " \
1926 "width = (int) [64, 8192], " \
1927 "height = (int) [64, 8192] "
1929 #define GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES(features,format) \
1930 "video/x-raw(" features "), " \
1931 "format = (string) " format ", " \
1932 "width = (int) [64, 8192], " \
1933 "height = (int) [64, 8192] "
1935 static GstStaticPadTemplate bin_sink_template_caps =
1936 GST_STATIC_PAD_TEMPLATE ("sink",
1939 GST_STATIC_CAPS (GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
1940 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, GST_D3D11_SINK_FORMATS) "; "
1941 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
1942 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY ","
1943 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
1944 GST_D3D11_SINK_FORMATS) "; "
1945 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE (GST_D3D11_SINK_FORMATS) "; "
1946 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
1947 (GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY ","
1948 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
1949 GST_D3D11_SINK_FORMATS)
1952 static GstStaticPadTemplate bin_src_template_caps =
1953 GST_STATIC_PAD_TEMPLATE ("src",
1956 GST_STATIC_CAPS (GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
1957 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, GST_D3D11_SRC_FORMATS) "; "
1958 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
1959 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY ","
1960 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
1961 GST_D3D11_SRC_FORMATS) "; "
1962 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE (GST_D3D11_SRC_FORMATS) "; "
1963 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
1964 (GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY ","
1965 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
1966 GST_D3D11_SRC_FORMATS)
1969 static void gst_d3d11_deinterlace_bin_set_property (GObject * object,
1970 guint prop_id, const GValue * value, GParamSpec * pspec);
1971 static void gst_d3d11_deinterlace_bin_get_property (GObject * object,
1972 guint prop_id, GValue * value, GParamSpec * pspec);
1975 gst_d3d11_deinterlace_bin_class_init (GstD3D11DeinterlaceBinClass * klass,
1978 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
1979 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
1980 GstD3D11DeinterlaceClassData *cdata = (GstD3D11DeinterlaceClassData *) data;
1983 bin_parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
1985 gobject_class->get_property = gst_d3d11_deinterlace_bin_get_property;
1986 gobject_class->set_property = gst_d3d11_deinterlace_bin_set_property;
1989 g_object_class_install_property (gobject_class, PROP_BIN_QOS,
1990 g_param_spec_boolean ("qos", "QoS", "Handle Quality-of-Service events",
1991 FALSE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
1994 g_object_class_install_property (gobject_class, PROP_BIN_ADAPTER,
1995 g_param_spec_uint ("adapter", "Adapter",
1996 "DXGI Adapter index for creating device",
1997 0, G_MAXUINT32, cdata->adapter,
1998 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
1999 g_object_class_install_property (gobject_class, PROP_BIN_DEVICE_ID,
2000 g_param_spec_uint ("device-id", "Device Id",
2001 "DXGI Device ID", 0, G_MAXUINT32, 0,
2002 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
2003 g_object_class_install_property (gobject_class, PROP_BIN_VENDOR_ID,
2004 g_param_spec_uint ("vendor-id", "Vendor Id",
2005 "DXGI Vendor ID", 0, G_MAXUINT32, 0,
2006 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
2007 g_object_class_install_property (gobject_class, PROP_BIN_METHOD,
2008 g_param_spec_flags ("method", "Method",
2009 "Deinterlace Method. Use can set multiple methods as a flagset "
2010 "and element will select one of method automatically. "
2011 "If deinterlacing device failed to deinterlace with given mode, "
2012 "fallback might happen by the device",
2013 GST_TYPE_D3D11_DEINTERLACE_METHOD, cdata->device_caps.default_method,
2014 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
2015 GST_PARAM_MUTABLE_READY)));
2016 g_object_class_install_property (gobject_class, PROP_BIN_SUPPORTED_METHODS,
2017 g_param_spec_flags ("supported-methods", "Supported Methods",
2018 "Set of supported deinterlace methods by device",
2019 GST_TYPE_D3D11_DEINTERLACE_METHOD,
2020 cdata->device_caps.supported_methods,
2021 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
2023 long_name = g_strdup_printf ("Direct3D11 %s Deinterlacer Bin",
2024 cdata->description);
2025 gst_element_class_set_metadata (element_class, long_name,
2026 "Filter/Effect/Video/Deinterlace/Hardware",
2027 "A Direct3D11 based deinterlacer bin",
2028 "Seungha Yang <seungha@centricular.com>");
2031 gst_element_class_add_static_pad_template (element_class,
2032 &bin_sink_template_caps);
2033 gst_element_class_add_static_pad_template (element_class,
2034 &bin_src_template_caps);
2036 klass->adapter = cdata->adapter;
2037 klass->child_type = cdata->deinterlace_type;
2039 gst_d3d11_deinterlace_class_data_unref (cdata);
2043 gst_d3d11_deinterlace_bin_init (GstD3D11DeinterlaceBin * self)
2045 GstD3D11DeinterlaceBinClass *klass =
2046 GST_D3D11_DEINTERLACE_BIN_GET_CLASS (self);
2049 self->deinterlace = (GstElement *) g_object_new (klass->child_type,
2050 "name", "deinterlace", NULL);
2051 self->in_convert = gst_element_factory_make ("d3d11colorconvert", NULL);
2052 self->out_convert = gst_element_factory_make ("d3d11colorconvert", NULL);
2053 self->upload = gst_element_factory_make ("d3d11upload", NULL);
2054 self->download = gst_element_factory_make ("d3d11download", NULL);
2056 /* Specify DXGI adapter index to use */
2057 g_object_set (G_OBJECT (self->in_convert), "adapter", klass->adapter, NULL);
2058 g_object_set (G_OBJECT (self->out_convert), "adapter", klass->adapter, NULL);
2059 g_object_set (G_OBJECT (self->upload), "adapter", klass->adapter, NULL);
2060 g_object_set (G_OBJECT (self->download), "adapter", klass->adapter, NULL);
2062 gst_bin_add_many (GST_BIN_CAST (self), self->upload, self->in_convert,
2063 self->deinterlace, self->out_convert, self->download, NULL);
2064 gst_element_link_many (self->upload, self->in_convert, self->deinterlace,
2065 self->out_convert, self->download, NULL);
2067 pad = gst_element_get_static_pad (self->upload, "sink");
2068 self->sinkpad = gst_ghost_pad_new ("sink", pad);
2069 gst_element_add_pad (GST_ELEMENT_CAST (self), self->sinkpad);
2070 gst_object_unref (pad);
2072 pad = gst_element_get_static_pad (self->download, "src");
2073 self->srcpad = gst_ghost_pad_new ("src", pad);
2074 gst_element_add_pad (GST_ELEMENT_CAST (self), self->srcpad);
2075 gst_object_unref (pad);
2079 gst_d3d11_deinterlace_bin_set_property (GObject * object, guint prop_id,
2080 const GValue * value, GParamSpec * pspec)
2082 GstD3D11DeinterlaceBin *self = GST_D3D11_DEINTERLACE_BIN (object);
2084 g_object_set_property (G_OBJECT (self->deinterlace), pspec->name, value);
2088 gst_d3d11_deinterlace_bin_get_property (GObject * object, guint prop_id,
2089 GValue * value, GParamSpec * pspec)
2091 GstD3D11DeinterlaceBin *self = GST_D3D11_DEINTERLACE_BIN (object);
2093 g_object_get_property (G_OBJECT (self->deinterlace), pspec->name, value);
2097 gst_d3d11_deinterlace_register (GstPlugin * plugin, GstD3D11Device * device,
2103 gchar *feature_name;
2105 GTypeInfo type_info = {
2106 sizeof (GstD3D11DeinterlaceClass),
2109 (GClassInitFunc) gst_d3d11_deinterlace_class_init,
2112 sizeof (GstD3D11Deinterlace),
2114 (GInstanceInitFunc) gst_d3d11_deinterlace_init,
2116 GTypeInfo bin_type_info = {
2117 sizeof (GstD3D11DeinterlaceBinClass),
2120 (GClassInitFunc) gst_d3d11_deinterlace_bin_class_init,
2123 sizeof (GstD3D11DeinterlaceBin),
2125 (GInstanceInitFunc) gst_d3d11_deinterlace_bin_init,
2127 GstCaps *sink_caps = NULL;
2128 GstCaps *src_caps = NULL;
2129 GstCaps *caps = NULL;
2130 GstCapsFeatures *caps_features;
2131 ID3D11Device *device_handle;
2132 ID3D11DeviceContext *context_handle;
2134 ComPtr<ID3D11VideoDevice> video_device;
2135 ComPtr<ID3D11VideoContext> video_context;
2136 ComPtr<ID3D11VideoProcessorEnumerator> video_proc_enum;
2137 ComPtr<ID3D11VideoProcessorEnumerator1> video_proc_enum1;
2140 D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc;
2141 D3D11_VIDEO_PROCESSOR_CAPS proc_caps = { 0, };
2142 UINT supported_methods = 0;
2143 GstD3D11DeinterlaceMethod default_method;
2148 /* NOTE: processor might be able to handle other formats.
2149 * However, not all YUV formats can be used for render target.
2150 * For instance, DXGI_FORMAT_Y210 and DXGI_FORMAT_Y410 formats cannot be
2151 * render target. In practice, interlaced stream would output of video
2152 * decoders, so NV12/P010/P016 can cover most of real-world use case.
2154 DXGI_FORMAT formats_to_check[] = {
2155 DXGI_FORMAT_NV12, /* NV12 */
2156 DXGI_FORMAT_P010, /* P010_10LE */
2157 DXGI_FORMAT_P016, /* P016_LE */
2159 GValue *supported_formats = NULL;
2160 GstD3D11DeinterlaceClassData *cdata;
2161 guint max_past_frames = 0;
2162 guint max_future_frames = 0;
2165 device_handle = gst_d3d11_device_get_device_handle (device);
2166 context_handle = gst_d3d11_device_get_device_context_handle (device);
2168 hr = device_handle->QueryInterface (IID_PPV_ARGS (&video_device));
2169 if (!gst_d3d11_result (hr, device))
2172 hr = context_handle->QueryInterface (IID_PPV_ARGS (&video_context));
2173 if (!gst_d3d11_result (hr, device))
2176 memset (&desc, 0, sizeof (D3D11_VIDEO_PROCESSOR_CONTENT_DESC));
2177 desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
2178 desc.InputWidth = 320;
2179 desc.InputHeight = 240;
2180 desc.OutputWidth = 320;
2181 desc.OutputHeight = 240;
2182 desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
2184 hr = video_device->CreateVideoProcessorEnumerator (&desc, &video_proc_enum);
2185 if (!gst_d3d11_result (hr, device))
2188 /* We need ID3D11VideoProcessorEnumerator1 interface to check conversion
2189 * capability of device via CheckVideoProcessorFormatConversion() */
2190 hr = video_proc_enum.As (&video_proc_enum1);
2191 if (!gst_d3d11_result (hr, device))
2194 hr = video_proc_enum->GetVideoProcessorCaps (&proc_caps);
2195 if (!gst_d3d11_result (hr, device))
2198 for (i = 0; i < proc_caps.RateConversionCapsCount; i++) {
2199 D3D11_VIDEO_PROCESSOR_RATE_CONVERSION_CAPS rate_conv_caps = { 0, };
2201 hr = video_proc_enum->GetVideoProcessorRateConversionCaps (i,
2206 supported_methods |= rate_conv_caps.ProcessorCaps;
2207 max_past_frames = MAX (max_past_frames, rate_conv_caps.PastFrames);
2208 max_future_frames = MAX (max_future_frames, rate_conv_caps.FutureFrames);
2211 if (supported_methods == 0)
2214 #define IS_SUPPORTED_METHOD(flags,val) (flags & val) == val
2215 blend = IS_SUPPORTED_METHOD (supported_methods,
2216 GST_D3D11_DEINTERLACE_METHOD_BLEND);
2217 bob = IS_SUPPORTED_METHOD (supported_methods,
2218 GST_D3D11_DEINTERLACE_METHOD_BOB);
2219 adaptive = IS_SUPPORTED_METHOD (supported_methods,
2220 GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE);
2221 mocomp = IS_SUPPORTED_METHOD (supported_methods,
2222 GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION);
2223 #undef IS_SUPPORTED_METHOD
2225 if (!blend && !bob && !adaptive && !mocomp)
2228 /* Drop all not supported methods from flags */
2229 supported_methods = supported_methods &
2230 (GST_D3D11_DEINTERLACE_METHOD_BLEND | GST_D3D11_DEINTERLACE_METHOD_BOB |
2231 GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE |
2232 GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION);
2234 /* Prefer bob, it's equivalent to "linear" which is default mode of
2235 * software deinterlace element, also it's fallback mode
2236 * for our "adaptive" and "mocomp" modes. Note that since Direct3D12, "blend"
2237 * mode is no more supported, instead "bob" and "custom" mode are suported
2240 default_method = GST_D3D11_DEINTERLACE_METHOD_BOB;
2241 } else if (adaptive) {
2242 default_method = GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE;
2243 } else if (mocomp) {
2244 default_method = GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION;
2246 default_method = GST_D3D11_DEINTERLACE_METHOD_BLEND;
2248 /* Programming error */
2249 g_return_if_reached ();
2252 for (i = 0; i < G_N_ELEMENTS (formats_to_check); i++) {
2254 GValue val = G_VALUE_INIT;
2255 GstVideoFormat format;
2256 BOOL supported = FALSE;
2258 hr = video_proc_enum->CheckVideoProcessorFormat (formats_to_check[i],
2263 /* D3D11 video processor can support other conversion at once,
2264 * including color format conversion.
2265 * But not all combinations of in/out pairs can be supported.
2266 * To make things simple, this element will do only deinterlacing
2267 * (might not be optimal in terms of processing power/resource though) */
2269 /* D3D11_VIDEO_PROCESSOR_FORMAT_SUPPORT_INPUT = 0x1,
2270 * D3D11_VIDEO_PROCESSOR_FORMAT_SUPPORT_OUTPUT = 0x2,
2271 * MinGW header might not be defining the above enum values */
2272 if ((flags & 0x3) != 0x3)
2275 format = gst_d3d11_dxgi_format_to_gst (formats_to_check[i]);
2276 /* This is programming error! */
2277 if (format == GST_VIDEO_FORMAT_UNKNOWN) {
2278 GST_ERROR ("Couldn't convert DXGI format %d to video format",
2279 formats_to_check[i]);
2283 hr = video_proc_enum1->CheckVideoProcessorFormatConversion
2284 (formats_to_check[i], DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709,
2285 formats_to_check[i], DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709,
2287 if (FAILED (hr) || !supported)
2290 if (!supported_formats) {
2291 supported_formats = g_new0 (GValue, 1);
2292 g_value_init (supported_formats, GST_TYPE_LIST);
2295 g_value_init (&val, G_TYPE_STRING);
2296 g_value_set_static_string (&val, gst_video_format_to_string (format));
2297 gst_value_list_append_and_take_value (supported_formats, &val);
2300 if (!supported_formats)
2303 caps = gst_caps_new_empty_simple ("video/x-raw");
2304 /* FIXME: Check supported resolution, it would be different from
2305 * supported max texture dimension */
2306 gst_caps_set_simple (caps,
2307 "width", GST_TYPE_INT_RANGE, 64, 8192,
2308 "height", GST_TYPE_INT_RANGE, 64, 8192, NULL);
2309 gst_caps_set_value (caps, "format", supported_formats);
2310 g_value_unset (supported_formats);
2311 g_free (supported_formats);
2313 /* TODO: Add alternating deinterlace */
2314 src_caps = gst_caps_copy (caps);
2315 caps_features = gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY,
2317 gst_caps_set_features_simple (src_caps, caps_features);
2319 caps_features = gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY,
2320 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, NULL);
2321 gst_caps_set_features_simple (caps, caps_features);
2322 gst_caps_append (src_caps, caps);
2324 sink_caps = gst_caps_copy (src_caps);
2326 GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
2327 GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
2329 cdata = gst_d3d11_deinterlace_class_data_new ();
2330 cdata->sink_caps = sink_caps;
2331 cdata->src_caps = src_caps;
2332 cdata->device_caps.supported_methods =
2333 (GstD3D11DeinterlaceMethod) supported_methods;
2334 cdata->device_caps.default_method = default_method;
2335 cdata->device_caps.max_past_frames = max_past_frames;
2336 cdata->device_caps.max_future_frames = max_future_frames;
2338 g_object_get (device, "adapter", &cdata->adapter,
2339 "device-id", &cdata->device_id, "vendor-id", &cdata->vendor_id,
2340 "description", &cdata->description, NULL);
2341 type_info.class_data = cdata;
2342 bin_type_info.class_data = gst_d3d11_deinterlace_class_data_ref (cdata);
2344 type_name = g_strdup ("GstD3D11Deinterlace");
2345 feature_name = g_strdup ("d3d11deinterlaceelement");
2347 while (g_type_from_name (type_name)) {
2350 g_free (feature_name);
2351 type_name = g_strdup_printf ("GstD3D11Device%dDeinterlace", index);
2352 feature_name = g_strdup_printf ("d3d11device%ddeinterlaceelement", index);
2355 type = g_type_register_static (GST_TYPE_BASE_TRANSFORM,
2356 type_name, &type_info, (GTypeFlags) 0);
2357 cdata->deinterlace_type = type;
2359 if (!gst_element_register (plugin, feature_name, GST_RANK_NONE, type))
2360 GST_WARNING ("Failed to register plugin '%s'", type_name);
2363 g_free (feature_name);
2365 /* Register wrapper bin */
2367 type_name = g_strdup ("GstD3D11DeinterlaceBin");
2368 feature_name = g_strdup ("d3d11deinterlace");
2370 while (g_type_from_name (type_name)) {
2373 g_free (feature_name);
2374 type_name = g_strdup_printf ("GstD3D11Device%dDeinterlaceBin", index);
2375 feature_name = g_strdup_printf ("d3d11device%ddeinterlace", index);
2378 bin_type = g_type_register_static (GST_TYPE_BIN,
2379 type_name, &bin_type_info, (GTypeFlags) 0);
2381 /* make lower rank than default device */
2382 if (rank > 0 && index != 0)
2385 if (!gst_element_register (plugin, feature_name, rank, bin_type))
2386 GST_WARNING ("Failed to register plugin '%s'", type_name);
2389 g_free (feature_name);