2 * Copyright (C) 2021 Seungha Yang <seungha@centricular.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
21 * SECTION:element-d3d11deinterlaceelement
22 * @title: d3d11deinterlaceelement
24 * Deinterlacing interlaced video frames to progressive video frames by using
25 * ID3D11VideoProcessor API. Depending on the hardware it runs on,
26 * this element will only support a very limited set of video formats.
27 * Use #d3d11deinterlace instead, which will take care of conversion.
37 #include <gst/video/video.h>
38 #include <gst/base/gstbasetransform.h>
40 #include "gstd3d11deinterlace.h"
41 #include "gstd3d11pluginutils.h"
45 GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_deinterlace_debug);
46 #define GST_CAT_DEFAULT gst_d3d11_deinterlace_debug
49 using namespace Microsoft::WRL;
52 /* Deinterlacing Methods:
53 * Direct3D11 provides Blend, Bob, Adaptive, Motion Compensation, and
54 * Inverse Telecine methods. But depending on video processor device,
55 * some of method might not be supported.
56 * - Blend: the two fields of a interlaced frame are blended into a single
57 * progressive frame. Output rate will be half of input (e.g., 60i -> 30p)
58 * but due to the way of framerate signalling of GStreamer, that is, it uses
59 * frame rate, not field rate for interlaced stream, in/output framerate
60 * of caps will be identical.
61 * - Bob: missing field lines are interpolated from the lines above and below.
62 * Output rate will be the same as that of input (e.g., 60i -> 60p).
63 * In order words, video processor will generate two frames from two field
64 * of a intelaced frame.
65 * - Adaptive, Motion Compensation: future and past frames are used for
66 * reference frame for deinterlacing process. User should provide sufficent
67 * number of reference frames, otherwise processor device will fallback to
70 * Direct3D11 doesn't provide a method for explicit deinterlacing method
71 * selection. Instead, it could be done indirectly.
72 * - Blend: sets output rate as half via VideoProcessorSetStreamOutputRate().
73 * - Bob: sets output rate as normal. And performs VideoProcessorBlt() twice per
74 * a interlaced frame. D3D11_VIDEO_PROCESSOR_STREAM::OutputIndex needs to be
75 * incremented per field (e.g., OutputIndex = 0 for the first field,
76 * and 1 for the second field).
77 * - Adaptive, Motion Compensation: in addition to the requirement of Bob,
78 * user should provide reference frames via
79 * D3D11_VIDEO_PROCESSOR_STREAM::ppPastSurfaces and
80 * D3D11_VIDEO_PROCESSOR_STREAM::ppFutureSurfaces
85 GST_D3D11_DEINTERLACE_METHOD_BLEND =
86 D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BLEND,
87 GST_D3D11_DEINTERLACE_METHOD_BOB =
88 D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BOB,
89 GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE =
90 D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_ADAPTIVE,
91 GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION =
92 D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_MOTION_COMPENSATION,
94 /* TODO: INVERSE_TELECINE */
95 } GstD3D11DeinterlaceMethod;
98 * GstD3D11DeinterlaceMethod:
100 * Deinterlacing method
104 #define GST_TYPE_D3D11_DEINTERLACE_METHOD (gst_d3d11_deinterlace_method_type())
107 gst_d3d11_deinterlace_method_type (void)
109 static gsize method_type = 0;
111 if (g_once_init_enter (&method_type)) {
112 static const GFlagsValue method_types[] = {
113 {GST_D3D11_DEINTERLACE_METHOD_BLEND,
114 "Blend: Blending top/bottom field pictures into one frame. "
115 "Framerate will be preserved (e.g., 60i -> 30p)", "blend"},
116 {GST_D3D11_DEINTERLACE_METHOD_BOB,
117 "Bob: Interpolating missing lines by using the adjacent lines. "
118 "Framerate will be doubled (e,g, 60i -> 60p)", "bob"},
119 {GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE,
120 "Adaptive: Interpolating missing lines by using spatial/temporal references. "
121 "Framerate will be doubled (e,g, 60i -> 60p)",
123 {GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION,
124 "Motion Compensation: Recreating missing lines by using motion vector. "
125 "Framerate will be doubled (e,g, 60i -> 60p)", "mocomp"},
128 GType tmp = g_flags_register_static ("GstD3D11DeinterlaceMethod",
130 g_once_init_leave (&method_type, tmp);
133 return (GType) method_type;
138 GstD3D11DeinterlaceMethod supported_methods;
139 GstD3D11DeinterlaceMethod default_method;
141 guint max_past_frames;
142 guint max_future_frames;
143 } GstD3D11DeinterlaceDeviceCaps;
147 GType deinterlace_type;
156 GstD3D11DeinterlaceDeviceCaps device_caps;
159 } GstD3D11DeinterlaceClassData;
161 static GstD3D11DeinterlaceClassData *
162 gst_d3d11_deinterlace_class_data_new (void)
164 GstD3D11DeinterlaceClassData *self = g_new0 (GstD3D11DeinterlaceClassData, 1);
171 static GstD3D11DeinterlaceClassData *
172 gst_d3d11_deinterlace_class_data_ref (GstD3D11DeinterlaceClassData * data)
174 g_assert (data != NULL);
176 g_atomic_int_add (&data->ref_count, 1);
182 gst_d3d11_deinterlace_class_data_unref (GstD3D11DeinterlaceClassData * data)
184 g_assert (data != NULL);
186 if (g_atomic_int_dec_and_test (&data->ref_count)) {
187 gst_clear_caps (&data->sink_caps);
188 gst_clear_caps (&data->src_caps);
189 g_free (data->description);
201 PROP_SUPPORTED_METHODS,
204 /* hardcoded maximum queue size for each past/future frame queue */
205 #define MAX_NUM_REFERENCES 2
207 typedef struct _GstD3D11Deinterlace
209 GstBaseTransform parent;
211 GstVideoInfo in_info;
212 GstVideoInfo out_info;
213 /* Calculated buffer duration by using upstream framerate */
214 GstClockTime default_buffer_duration;
216 GstD3D11Device *device;
218 ID3D11VideoDevice *video_device;
219 ID3D11VideoContext *video_context;
220 ID3D11VideoProcessorEnumerator *video_enum;
221 ID3D11VideoProcessor *video_proc;
223 GstD3D11DeinterlaceMethod method;
226 GQueue past_frame_queue;
227 GQueue future_frame_queue;
228 GstBuffer *to_process;
230 guint max_past_frames;
231 guint max_future_frames;
233 /* D3D11_VIDEO_PROCESSOR_STREAM::InputFrameOrField */
236 /* Clear/Update per submit_input_buffer() */
237 guint num_output_per_input;
238 guint num_transformed;
239 gboolean first_output;
241 GstBufferPool *fallback_in_pool;
242 GstBufferPool *fallback_out_pool;
243 } GstD3D11Deinterlace;
245 typedef struct _GstD3D11DeinterlaceClass
247 GstBaseTransformClass parent_class;
253 GstD3D11DeinterlaceDeviceCaps device_caps;
254 } GstD3D11DeinterlaceClass;
256 static GstElementClass *parent_class = NULL;
258 #define GST_D3D11_DEINTERLACE(object) ((GstD3D11Deinterlace *) (object))
259 #define GST_D3D11_DEINTERLACE_GET_CLASS(object) \
260 (G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object), \
261 GstD3D11DeinterlaceClass))
262 #define GST_D3D11_DEINTERLACE_LOCK(self) \
263 g_rec_mutex_lock (&GST_D3D11_DEINTERLACE (self)->lock);
264 #define GST_D3D11_DEINTERLACE_UNLOCK(self) \
265 g_rec_mutex_unlock (&GST_D3D11_DEINTERLACE (self)->lock);
268 gst_d3d11_deinterlace_update_method (GstD3D11Deinterlace * self);
269 static void gst_d3d11_deinterlace_reset (GstD3D11Deinterlace * self);
270 static GstFlowReturn gst_d3d11_deinterlace_drain (GstD3D11Deinterlace * self);
272 /* GObjectClass vfunc */
273 static void gst_d3d11_deinterlace_get_property (GObject * object,
274 guint prop_id, GValue * value, GParamSpec * pspec);
275 static void gst_d3d11_deinterlace_set_property (GObject * object, guint prop_id,
276 const GValue * value, GParamSpec * pspec);
277 static void gst_d3d11_deinterlace_finalize (GObject * object);
279 /* GstElementClass vfunc */
280 static void gst_d3d11_deinterlace_set_context (GstElement * element,
281 GstContext * context);
283 /* GstBaseTransformClass vfunc */
284 static gboolean gst_d3d11_deinterlace_start (GstBaseTransform * trans);
285 static gboolean gst_d3d11_deinterlace_stop (GstBaseTransform * trans);
286 static gboolean gst_d3d11_deinterlace_query (GstBaseTransform * trans,
287 GstPadDirection direction, GstQuery * query);
288 static GstCaps *gst_d3d11_deinterlace_transform_caps (GstBaseTransform * trans,
289 GstPadDirection direction, GstCaps * caps, GstCaps * filter);
290 static GstCaps *gst_d3d11_deinterlace_fixate_caps (GstBaseTransform * trans,
291 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps);
293 gst_d3d11_deinterlace_propose_allocation (GstBaseTransform * trans,
294 GstQuery * decide_query, GstQuery * query);
296 gst_d3d11_deinterlace_decide_allocation (GstBaseTransform * trans,
298 static gboolean gst_d3d11_deinterlace_set_caps (GstBaseTransform * trans,
299 GstCaps * incaps, GstCaps * outcaps);
301 gst_d3d11_deinterlace_submit_input_buffer (GstBaseTransform * trans,
302 gboolean is_discont, GstBuffer * input);
304 gst_d3d11_deinterlace_generate_output (GstBaseTransform * trans,
305 GstBuffer ** outbuf);
307 gst_d3d11_deinterlace_transform (GstBaseTransform * trans, GstBuffer * inbuf,
309 static gboolean gst_d3d11_deinterlace_sink_event (GstBaseTransform * trans,
311 static void gst_d3d11_deinterlace_before_transform (GstBaseTransform * trans,
315 gst_d3d11_deinterlace_class_init (GstD3D11DeinterlaceClass * klass,
318 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
319 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
320 GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
321 GstD3D11DeinterlaceClassData *cdata = (GstD3D11DeinterlaceClassData *) data;
324 parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
326 gobject_class->get_property = gst_d3d11_deinterlace_get_property;
327 gobject_class->set_property = gst_d3d11_deinterlace_set_property;
328 gobject_class->finalize = gst_d3d11_deinterlace_finalize;
330 g_object_class_install_property (gobject_class, PROP_ADAPTER,
331 g_param_spec_uint ("adapter", "Adapter",
332 "DXGI Adapter index for creating device",
333 0, G_MAXUINT32, cdata->adapter,
334 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
335 g_object_class_install_property (gobject_class, PROP_DEVICE_ID,
336 g_param_spec_uint ("device-id", "Device Id",
337 "DXGI Device ID", 0, G_MAXUINT32, 0,
338 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
339 g_object_class_install_property (gobject_class, PROP_VENDOR_ID,
340 g_param_spec_uint ("vendor-id", "Vendor Id",
341 "DXGI Vendor ID", 0, G_MAXUINT32, 0,
342 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
343 g_object_class_install_property (gobject_class, PROP_METHOD,
344 g_param_spec_flags ("method", "Method",
345 "Deinterlace Method. Use can set multiple methods as a flagset "
346 "and element will select one of method automatically. "
347 "If deinterlacing device failed to deinterlace with given mode, "
348 "fallback might happen by the device",
349 GST_TYPE_D3D11_DEINTERLACE_METHOD, cdata->device_caps.default_method,
350 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
351 GST_PARAM_MUTABLE_READY)));
352 g_object_class_install_property (gobject_class, PROP_SUPPORTED_METHODS,
353 g_param_spec_flags ("supported-methods", "Supported Methods",
354 "Set of supported deinterlace methods by device",
355 GST_TYPE_D3D11_DEINTERLACE_METHOD,
356 cdata->device_caps.supported_methods,
357 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
359 element_class->set_context =
360 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_set_context);
362 long_name = g_strdup_printf ("Direct3D11 %s Deinterlacer",
364 gst_element_class_set_metadata (element_class, long_name,
365 "Filter/Effect/Video/Deinterlace/Hardware",
366 "A Direct3D11 based deinterlacer",
367 "Seungha Yang <seungha@centricular.com>");
370 gst_element_class_add_pad_template (element_class,
371 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
373 gst_element_class_add_pad_template (element_class,
374 gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
377 trans_class->passthrough_on_same_caps = TRUE;
379 trans_class->start = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_start);
380 trans_class->stop = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_stop);
381 trans_class->query = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_query);
382 trans_class->transform_caps =
383 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_transform_caps);
384 trans_class->fixate_caps =
385 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_fixate_caps);
386 trans_class->propose_allocation =
387 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_propose_allocation);
388 trans_class->decide_allocation =
389 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_decide_allocation);
390 trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_set_caps);
391 trans_class->submit_input_buffer =
392 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_submit_input_buffer);
393 trans_class->generate_output =
394 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_generate_output);
395 trans_class->transform = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_transform);
396 trans_class->sink_event =
397 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_sink_event);
398 trans_class->before_transform =
399 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_before_transform);
401 klass->adapter = cdata->adapter;
402 klass->device_id = cdata->device_id;
403 klass->vendor_id = cdata->vendor_id;
404 klass->device_caps = cdata->device_caps;
406 gst_d3d11_deinterlace_class_data_unref (cdata);
408 gst_type_mark_as_plugin_api (GST_TYPE_D3D11_DEINTERLACE_METHOD,
409 (GstPluginAPIFlags) 0);
413 gst_d3d11_deinterlace_init (GstD3D11Deinterlace * self)
415 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
417 self->method = klass->device_caps.default_method;
418 self->default_buffer_duration = GST_CLOCK_TIME_NONE;
419 gst_d3d11_deinterlace_update_method (self);
421 g_queue_init (&self->past_frame_queue);
422 g_queue_init (&self->future_frame_queue);
423 g_rec_mutex_init (&self->lock);
427 gst_d3d11_deinterlace_get_property (GObject * object, guint prop_id,
428 GValue * value, GParamSpec * pspec)
430 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (object);
431 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (object);
435 g_value_set_uint (value, klass->adapter);
438 g_value_set_uint (value, klass->device_id);
441 g_value_set_uint (value, klass->vendor_id);
444 g_value_set_flags (value, self->method);
446 case PROP_SUPPORTED_METHODS:
447 g_value_set_flags (value, klass->device_caps.supported_methods);
450 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
456 gst_d3d11_deinterlace_update_method (GstD3D11Deinterlace * self)
458 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
459 GstD3D11DeinterlaceMethod requested_method = self->method;
460 gboolean updated = TRUE;
462 /* Verify whether requested method is supported */
463 if ((self->method & klass->device_caps.supported_methods) == 0) {
464 #ifndef GST_DISABLE_GST_DEBUG
465 gchar *supported, *requested;
467 supported = g_flags_to_string (GST_TYPE_D3D11_DEINTERLACE_METHOD,
468 klass->device_caps.supported_methods);
469 requested = g_flags_to_string (GST_TYPE_D3D11_DEINTERLACE_METHOD,
470 klass->device_caps.supported_methods);
472 GST_WARNING_OBJECT (self,
473 "Requested method %s is not supported (supported: %s)",
474 requested, supported);
480 self->method = klass->device_caps.default_method;
485 /* Drop not supported methods */
486 self->method = (GstD3D11DeinterlaceMethod)
487 (klass->device_caps.supported_methods & self->method);
489 /* Single method was requested? */
490 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BLEND ||
491 self->method == GST_D3D11_DEINTERLACE_METHOD_BOB ||
492 self->method == GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE ||
493 self->method == GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION) {
494 if (self->method == requested_method)
497 /* Pick single method from requested */
498 if ((self->method & GST_D3D11_DEINTERLACE_METHOD_BOB) ==
499 GST_D3D11_DEINTERLACE_METHOD_BOB) {
500 self->method = GST_D3D11_DEINTERLACE_METHOD_BOB;
501 } else if ((self->method & GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE) ==
502 GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE) {
503 self->method = GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE;
504 } else if ((self->method & GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION)
505 == GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION) {
506 self->method = GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION;
507 } else if ((self->method & GST_D3D11_DEINTERLACE_METHOD_BLEND) ==
508 GST_D3D11_DEINTERLACE_METHOD_BLEND) {
509 self->method = GST_D3D11_DEINTERLACE_METHOD_BLEND;
511 self->method = klass->device_caps.default_method;
512 g_assert_not_reached ();
517 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BLEND) {
518 /* Both methods don't use reference frame for deinterlacing */
519 self->max_past_frames = self->max_future_frames = 0;
520 } else if (self->method == GST_D3D11_DEINTERLACE_METHOD_BOB) {
521 /* To calculate timestamp and duration of output fraems, we will hold one
522 * future frame even though processor device will not use reference */
523 self->max_past_frames = 0;
524 self->max_future_frames = 1;
526 /* FIXME: how many frames should be allowed? also, this needs to be
528 self->max_past_frames = MIN (klass->device_caps.max_past_frames,
531 /* Likewise Bob, we need at least one future frame for timestamp/duration
533 self->max_future_frames =
534 MAX (MIN (klass->device_caps.max_future_frames, MAX_NUM_REFERENCES), 1);
541 gst_d3d11_deinterlace_set_property (GObject * object, guint prop_id,
542 const GValue * value, GParamSpec * pspec)
544 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (object);
548 gboolean notify_update = FALSE;
550 GST_OBJECT_LOCK (self);
551 self->method = (GstD3D11DeinterlaceMethod) g_value_get_flags (value);
552 notify_update = gst_d3d11_deinterlace_update_method (self);
553 GST_OBJECT_UNLOCK (self);
556 g_object_notify (object, "method");
560 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
566 gst_d3d11_deinterlace_finalize (GObject * object)
568 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (object);
570 g_rec_mutex_clear (&self->lock);
572 G_OBJECT_CLASS (parent_class)->finalize (object);
576 gst_d3d11_deinterlace_set_context (GstElement * element, GstContext * context)
578 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (element);
579 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
581 gst_d3d11_handle_set_context (element, context, klass->adapter,
584 GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
588 gst_d3d11_deinterlace_open (GstD3D11Deinterlace * self)
590 ID3D11VideoDevice *video_device;
591 ID3D11VideoContext *video_context;
593 video_device = gst_d3d11_device_get_video_device_handle (self->device);
595 GST_ERROR_OBJECT (self, "ID3D11VideoDevice is not availale");
599 video_context = gst_d3d11_device_get_video_context_handle (self->device);
600 if (!video_context) {
601 GST_ERROR_OBJECT (self, "ID3D11VideoContext is not available");
605 self->video_device = video_device;
606 video_device->AddRef ();
608 self->video_context = video_context;
609 video_context->AddRef ();
614 /* Must be called with lock taken */
616 gst_d3d11_deinterlace_reset_history (GstD3D11Deinterlace * self)
618 self->input_index = 0;
619 self->num_output_per_input = 1;
620 self->num_transformed = 0;
621 self->first_output = TRUE;
623 g_queue_clear_full (&self->past_frame_queue,
624 (GDestroyNotify) gst_buffer_unref);
625 g_queue_clear_full (&self->future_frame_queue,
626 (GDestroyNotify) gst_buffer_unref);
627 gst_clear_buffer (&self->to_process);
631 gst_d3d11_deinterlace_reset (GstD3D11Deinterlace * self)
633 GST_D3D11_DEINTERLACE_LOCK (self);
634 if (self->fallback_in_pool) {
635 gst_buffer_pool_set_active (self->fallback_in_pool, FALSE);
636 gst_object_unref (self->fallback_in_pool);
637 self->fallback_in_pool = NULL;
640 if (self->fallback_out_pool) {
641 gst_buffer_pool_set_active (self->fallback_out_pool, FALSE);
642 gst_object_unref (self->fallback_out_pool);
643 self->fallback_out_pool = NULL;
646 GST_D3D11_CLEAR_COM (self->video_enum);
647 GST_D3D11_CLEAR_COM (self->video_proc);
649 gst_d3d11_deinterlace_reset_history (self);
650 self->default_buffer_duration = GST_CLOCK_TIME_NONE;
652 GST_D3D11_DEINTERLACE_UNLOCK (self);
656 gst_d3d11_deinterlace_close (GstD3D11Deinterlace * self)
658 gst_d3d11_deinterlace_reset (self);
660 GST_D3D11_CLEAR_COM (self->video_device);
661 GST_D3D11_CLEAR_COM (self->video_context);
663 gst_clear_object (&self->device);
667 gst_d3d11_deinterlace_start (GstBaseTransform * trans)
669 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
670 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
672 if (!gst_d3d11_ensure_element_data (GST_ELEMENT_CAST (self), klass->adapter,
674 GST_ERROR_OBJECT (self, "Couldn't create d3d11device");
678 if (!gst_d3d11_deinterlace_open (self)) {
679 GST_ERROR_OBJECT (self, "Couldn't open video device");
680 gst_d3d11_deinterlace_close (self);
688 gst_d3d11_deinterlace_stop (GstBaseTransform * trans)
690 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
692 gst_d3d11_deinterlace_close (self);
698 gst_d3d11_deinterlace_query (GstBaseTransform * trans,
699 GstPadDirection direction, GstQuery * query)
701 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
703 switch (GST_QUERY_TYPE (query)) {
704 case GST_QUERY_CONTEXT:
705 if (gst_d3d11_handle_context_query (GST_ELEMENT_CAST (self),
706 query, self->device)) {
714 return GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction,
719 gst_d3d11_deinterlace_remove_interlace_info (GstCaps * caps,
720 gboolean remove_framerate)
726 GstCapsFeatures *feature =
727 gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY);
729 res = gst_caps_new_empty ();
731 n = gst_caps_get_size (caps);
732 for (i = 0; i < n; i++) {
733 st = gst_caps_get_structure (caps, i);
734 f = gst_caps_get_features (caps, i);
736 /* If this is already expressed by the existing caps
737 * skip this structure */
738 if (i > 0 && gst_caps_is_subset_structure_full (res, st, f))
741 st = gst_structure_copy (st);
742 /* Only remove format info for the cases when we can actually convert */
743 if (!gst_caps_features_is_any (f)
744 && gst_caps_features_is_equal (f, feature)) {
745 if (remove_framerate) {
746 gst_structure_remove_fields (st, "interlace-mode", "field-order",
749 gst_structure_remove_fields (st, "interlace-mode", "field-order", NULL);
753 gst_caps_append_structure_full (res, st, gst_caps_features_copy (f));
756 gst_caps_features_free (feature);
762 gst_d3d11_deinterlace_transform_caps (GstBaseTransform * trans,
763 GstPadDirection direction, GstCaps * caps, GstCaps * filter)
765 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
769 /* Get all possible caps that we can transform to */
770 tmp = gst_d3d11_deinterlace_remove_interlace_info (caps,
771 /* Non-blend mode will double framerate */
772 self->method != GST_D3D11_DEINTERLACE_METHOD_BLEND);
775 tmp2 = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
776 gst_caps_unref (tmp);
782 GST_DEBUG_OBJECT (trans, "transformed %" GST_PTR_FORMAT " into %"
783 GST_PTR_FORMAT, caps, result);
789 gst_d3d11_deinterlace_fixate_caps (GstBaseTransform * trans,
790 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
792 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
797 const gchar *interlace_mode;
799 othercaps = gst_caps_truncate (othercaps);
800 othercaps = gst_caps_make_writable (othercaps);
802 if (direction == GST_PAD_SRC)
803 return gst_caps_fixate (othercaps);
805 tmp = gst_caps_copy (caps);
806 tmp = gst_caps_fixate (tmp);
808 if (!gst_video_info_from_caps (&info, tmp)) {
809 GST_WARNING_OBJECT (self, "Invalid caps %" GST_PTR_FORMAT, caps);
810 gst_caps_unref (tmp);
812 return gst_caps_fixate (othercaps);
815 s = gst_caps_get_structure (tmp, 0);
816 if (gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d)) {
817 /* for non-blend method, output framerate will be doubled */
818 if (self->method != GST_D3D11_DEINTERLACE_METHOD_BLEND &&
819 GST_VIDEO_INFO_IS_INTERLACED (&info)) {
823 gst_caps_set_simple (othercaps,
824 "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
827 interlace_mode = gst_structure_get_string (s, "interlace-mode");
828 if (g_strcmp0 ("progressive", interlace_mode) == 0) {
829 /* Just forward interlace-mode=progressive.
830 * By this way, basetransform will enable passthrough for non-interlaced
832 gst_caps_set_simple (othercaps,
833 "interlace-mode", G_TYPE_STRING, "progressive", NULL);
836 gst_caps_unref (tmp);
838 return gst_caps_fixate (othercaps);
842 gst_d3d11_deinterlace_propose_allocation (GstBaseTransform * trans,
843 GstQuery * decide_query, GstQuery * query)
845 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
847 GstBufferPool *pool = NULL;
850 GstStructure *config;
852 GstD3D11AllocationParams *d3d11_params;
853 guint min_buffers = 0;
855 if (!GST_BASE_TRANSFORM_CLASS (parent_class)->propose_allocation (trans,
856 decide_query, query))
859 /* passthrough, we're done */
860 if (decide_query == NULL)
863 gst_query_parse_allocation (query, &caps, NULL);
868 if (!gst_video_info_from_caps (&info, caps))
871 n_pools = gst_query_get_n_allocation_pools (query);
872 for (i = 0; i < n_pools; i++) {
873 gst_query_parse_nth_allocation_pool (query, i, &pool, NULL, NULL, NULL);
875 if (!GST_IS_D3D11_BUFFER_POOL (pool)) {
876 gst_clear_object (&pool);
878 GstD3D11BufferPool *dpool = GST_D3D11_BUFFER_POOL (pool);
879 if (dpool->device != self->device)
880 gst_clear_object (&pool);
886 pool = gst_d3d11_buffer_pool_new (self->device);
888 config = gst_buffer_pool_get_config (pool);
889 gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
891 d3d11_params = gst_buffer_pool_config_get_d3d11_allocation_params (config);
893 d3d11_params = gst_d3d11_allocation_params_new (self->device, &info,
894 (GstD3D11AllocationFlags) 0, D3D11_BIND_RENDER_TARGET);
896 d3d11_params->desc[0].BindFlags |= D3D11_BIND_RENDER_TARGET;
899 gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params);
900 gst_d3d11_allocation_params_free (d3d11_params);
902 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BOB) {
903 /* For non-blend methods, we will produce two progressive frames from
904 * a single interlaced frame. To determine timestamp and duration,
905 * we might need to hold one past frame if buffer duration is unknown */
907 } else if (self->method == GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE ||
908 self->method == GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION) {
909 /* For advanced deinterlacing methods, we will hold more frame so that
910 * device can use them as reference frames */
912 min_buffers += self->max_past_frames;
913 min_buffers += self->max_future_frames;
914 /* And one for current frame */
917 /* we will hold at least one frame for timestamp/duration calculation */
918 min_buffers = MAX (min_buffers, 2);
921 /* size will be updated by d3d11 buffer pool */
922 gst_buffer_pool_config_set_params (config, caps, 0, min_buffers, 0);
924 if (!gst_buffer_pool_set_config (pool, config))
927 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
928 gst_query_add_allocation_meta (query,
929 GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL);
931 /* d3d11 buffer pool will update buffer size based on allocated texture,
932 * get size from config again */
933 config = gst_buffer_pool_get_config (pool);
934 gst_buffer_pool_config_get_params (config, nullptr, &size, nullptr, nullptr);
935 gst_structure_free (config);
937 gst_query_add_allocation_pool (query, pool, size, min_buffers, 0);
939 gst_object_unref (pool);
946 GST_ERROR_OBJECT (self, "failed to set config");
947 gst_object_unref (pool);
953 gst_d3d11_deinterlace_decide_allocation (GstBaseTransform * trans,
956 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
957 GstCaps *outcaps = NULL;
958 GstBufferPool *pool = NULL;
959 guint size, min = 0, max = 0;
960 GstStructure *config;
961 GstD3D11AllocationParams *d3d11_params;
962 gboolean update_pool = FALSE;
965 gst_query_parse_allocation (query, &outcaps, NULL);
970 if (!gst_video_info_from_caps (&info, outcaps))
973 size = GST_VIDEO_INFO_SIZE (&info);
975 if (gst_query_get_n_allocation_pools (query) > 0) {
976 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
978 if (!GST_IS_D3D11_BUFFER_POOL (pool)) {
979 gst_clear_object (&pool);
981 GstD3D11BufferPool *dpool = GST_D3D11_BUFFER_POOL (pool);
982 if (dpool->device != self->device)
983 gst_clear_object (&pool);
991 pool = gst_d3d11_buffer_pool_new (self->device);
993 config = gst_buffer_pool_get_config (pool);
994 gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
996 d3d11_params = gst_buffer_pool_config_get_d3d11_allocation_params (config);
998 d3d11_params = gst_d3d11_allocation_params_new (self->device, &info,
999 (GstD3D11AllocationFlags) 0, D3D11_BIND_RENDER_TARGET);
1001 d3d11_params->desc[0].BindFlags |= D3D11_BIND_RENDER_TARGET;
1004 gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params);
1005 gst_d3d11_allocation_params_free (d3d11_params);
1007 gst_buffer_pool_config_set_params (config, outcaps, size, min, max);
1008 gst_buffer_pool_set_config (pool, config);
1010 /* d3d11 buffer pool will update buffer size based on allocated texture,
1011 * get size from config again */
1012 config = gst_buffer_pool_get_config (pool);
1013 gst_buffer_pool_config_get_params (config, nullptr, &size, nullptr, nullptr);
1014 gst_structure_free (config);
1017 gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
1019 gst_query_add_allocation_pool (query, pool, size, min, max);
1021 gst_object_unref (pool);
1023 return GST_BASE_TRANSFORM_CLASS (parent_class)->decide_allocation (trans,
1028 gst_d3d11_deinterlace_prepare_fallback_pool (GstD3D11Deinterlace * self,
1029 GstCaps * in_caps, GstVideoInfo * in_info, GstCaps * out_caps,
1030 GstVideoInfo * out_info)
1032 GstD3D11AllocationParams *d3d11_params;
1034 /* Clearing potentially remaining resource here would be redundant.
1035 * Just to be safe enough */
1036 g_queue_clear_full (&self->past_frame_queue,
1037 (GDestroyNotify) gst_buffer_unref);
1038 g_queue_clear_full (&self->future_frame_queue,
1039 (GDestroyNotify) gst_buffer_unref);
1041 if (self->fallback_in_pool) {
1042 gst_buffer_pool_set_active (self->fallback_in_pool, FALSE);
1043 gst_object_unref (self->fallback_in_pool);
1044 self->fallback_in_pool = NULL;
1047 if (self->fallback_out_pool) {
1048 gst_buffer_pool_set_active (self->fallback_out_pool, FALSE);
1049 gst_object_unref (self->fallback_out_pool);
1050 self->fallback_out_pool = NULL;
1053 /* Empty bind flag is allowed for video processor input */
1054 d3d11_params = gst_d3d11_allocation_params_new (self->device, in_info,
1055 (GstD3D11AllocationFlags) 0, 0);
1056 self->fallback_in_pool = gst_d3d11_buffer_pool_new_with_options (self->device,
1057 in_caps, d3d11_params, 0, 0);
1058 gst_d3d11_allocation_params_free (d3d11_params);
1060 if (!self->fallback_in_pool) {
1061 GST_ERROR_OBJECT (self, "Failed to create input fallback buffer pool");
1065 /* For processor output, render target bind flag is required */
1066 d3d11_params = gst_d3d11_allocation_params_new (self->device, out_info,
1067 (GstD3D11AllocationFlags) 0, D3D11_BIND_RENDER_TARGET);
1068 self->fallback_out_pool =
1069 gst_d3d11_buffer_pool_new_with_options (self->device,
1070 out_caps, d3d11_params, 0, 0);
1071 gst_d3d11_allocation_params_free (d3d11_params);
1073 if (!self->fallback_out_pool) {
1074 GST_ERROR_OBJECT (self, "Failed to create output fallback buffer pool");
1075 gst_clear_object (&self->fallback_out_pool);
1083 gst_d3d11_deinterlace_set_caps (GstBaseTransform * trans,
1084 GstCaps * incaps, GstCaps * outcaps)
1086 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1087 GstVideoInfo in_info, out_info;
1089 ComPtr<ID3D11VideoProcessorEnumerator> video_enum;
1090 ComPtr<ID3D11VideoProcessor> video_proc;
1092 D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc;
1093 D3D11_VIDEO_PROCESSOR_CAPS proc_caps;
1094 D3D11_VIDEO_PROCESSOR_RATE_CONVERSION_CAPS rate_conv_caps;
1095 D3D11_VIDEO_PROCESSOR_OUTPUT_RATE output_rate =
1096 D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL;
1101 if (gst_base_transform_is_passthrough (trans))
1104 if (!gst_video_info_from_caps (&in_info, incaps)) {
1105 GST_ERROR_OBJECT (self, "Invalid input caps %" GST_PTR_FORMAT, incaps);
1109 if (!gst_video_info_from_caps (&out_info, outcaps)) {
1110 GST_ERROR_OBJECT (self, "Invalid output caps %" GST_PTR_FORMAT, outcaps);
1114 self->in_info = in_info;
1115 self->out_info = out_info;
1117 /* Calculate expected buffer duration. We might need to reference this value
1118 * when buffer duration is unknown */
1119 if (GST_VIDEO_INFO_FPS_N (&in_info) > 0 &&
1120 GST_VIDEO_INFO_FPS_D (&in_info) > 0) {
1121 self->default_buffer_duration =
1122 gst_util_uint64_scale_int (GST_SECOND, GST_VIDEO_INFO_FPS_D (&in_info),
1123 GST_VIDEO_INFO_FPS_N (&in_info));
1125 /* Assume 25 fps. We need this for reporting latency at least */
1126 self->default_buffer_duration =
1127 gst_util_uint64_scale_int (GST_SECOND, 1, 25);
1130 gst_d3d11_deinterlace_reset (self);
1133 if (!GST_VIDEO_INFO_IS_INTERLACED (&in_info)) {
1134 gst_base_transform_set_passthrough (trans, TRUE);
1139 /* TFF or BFF is not important here, this is just for enumerating
1140 * available deinterlace devices */
1141 memset (&desc, 0, sizeof (D3D11_VIDEO_PROCESSOR_CONTENT_DESC));
1143 desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
1144 if (GST_VIDEO_INFO_FIELD_ORDER (&in_info) ==
1145 GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST)
1146 desc.InputFrameFormat =
1147 D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
1148 desc.InputWidth = GST_VIDEO_INFO_WIDTH (&in_info);
1149 desc.InputHeight = GST_VIDEO_INFO_HEIGHT (&in_info);
1150 desc.OutputWidth = GST_VIDEO_INFO_WIDTH (&out_info);
1151 desc.OutputHeight = GST_VIDEO_INFO_HEIGHT (&out_info);
1152 desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
1154 hr = self->video_device->CreateVideoProcessorEnumerator (&desc, &video_enum);
1155 if (!gst_d3d11_result (hr, self->device)) {
1156 GST_ERROR_OBJECT (self, "Couldn't create VideoProcessorEnumerator");
1160 hr = video_enum->GetVideoProcessorCaps (&proc_caps);
1161 if (!gst_d3d11_result (hr, self->device)) {
1162 GST_ERROR_OBJECT (self, "Couldn't query processor caps");
1166 /* Shouldn't happen, we checked this already during plugin_init */
1167 if (proc_caps.RateConversionCapsCount == 0) {
1168 GST_ERROR_OBJECT (self, "Deinterlacing is not supported");
1172 for (i = 0; i < proc_caps.RateConversionCapsCount; i++) {
1173 hr = video_enum->GetVideoProcessorRateConversionCaps (i, &rate_conv_caps);
1177 if ((rate_conv_caps.ProcessorCaps & self->method) == self->method)
1181 if (i >= proc_caps.RateConversionCapsCount) {
1182 GST_ERROR_OBJECT (self, "Deinterlacing method 0x%x is not supported",
1187 hr = self->video_device->CreateVideoProcessor (video_enum.Get (),
1189 if (!gst_d3d11_result (hr, self->device)) {
1190 GST_ERROR_OBJECT (self, "Couldn't create processor");
1194 if (!gst_d3d11_deinterlace_prepare_fallback_pool (self, incaps, &in_info,
1195 outcaps, &out_info)) {
1196 GST_ERROR_OBJECT (self, "Couldn't prepare fallback buffer pool");
1200 self->video_enum = video_enum.Detach ();
1201 self->video_proc = video_proc.Detach ();
1205 rect.right = GST_VIDEO_INFO_WIDTH (&self->in_info);
1206 rect.bottom = GST_VIDEO_INFO_HEIGHT (&self->in_info);
1208 /* Blending seems to be considered as half rate. See also
1209 * https://docs.microsoft.com/en-us/windows/win32/api/d3d12video/ns-d3d12video-d3d12_video_process_input_stream_rate */
1210 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BLEND)
1211 output_rate = D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_HALF;
1213 gst_d3d11_device_lock (self->device);
1214 self->video_context->VideoProcessorSetStreamSourceRect (self->video_proc,
1216 self->video_context->VideoProcessorSetStreamDestRect (self->video_proc,
1218 self->video_context->VideoProcessorSetOutputTargetRect (self->video_proc,
1220 self->video_context->
1221 VideoProcessorSetStreamAutoProcessingMode (self->video_proc, 0, FALSE);
1222 self->video_context->VideoProcessorSetStreamOutputRate (self->video_proc, 0,
1223 output_rate, TRUE, NULL);
1224 gst_d3d11_device_unlock (self->device);
1229 static ID3D11VideoProcessorInputView *
1230 gst_d3d11_deinterace_get_piv_from_buffer (GstD3D11Deinterlace * self,
1234 GstD3D11Memory *dmem;
1235 ID3D11VideoProcessorInputView *piv;
1237 if (gst_buffer_n_memory (buffer) != 1) {
1238 GST_WARNING_OBJECT (self, "Input buffer has more than one memory");
1242 mem = gst_buffer_peek_memory (buffer, 0);
1243 if (!gst_is_d3d11_memory (mem)) {
1244 GST_WARNING_OBJECT (self, "Input buffer is holding non-D3D11 memory");
1248 dmem = (GstD3D11Memory *) mem;
1249 if (dmem->device != self->device) {
1250 GST_WARNING_OBJECT (self,
1251 "Input D3D11 memory was allocated by other device");
1255 piv = gst_d3d11_memory_get_processor_input_view (dmem,
1256 self->video_device, self->video_enum);
1258 GST_WARNING_OBJECT (self, "ID3D11VideoProcessorInputView is unavailable");
1266 gst_d3d11_deinterlace_ensure_input_buffer (GstD3D11Deinterlace * self,
1269 GstD3D11Memory *dmem;
1270 ID3D11VideoProcessorInputView *piv;
1271 GstBuffer *new_buf = NULL;
1276 piv = gst_d3d11_deinterace_get_piv_from_buffer (self, input);
1280 if (!self->fallback_in_pool ||
1281 !gst_buffer_pool_set_active (self->fallback_in_pool, TRUE) ||
1282 gst_buffer_pool_acquire_buffer (self->fallback_in_pool, &new_buf,
1283 NULL) != GST_FLOW_OK) {
1284 GST_ERROR_OBJECT (self, "Fallback input buffer is unavailable");
1285 gst_buffer_unref (input);
1290 if (!gst_d3d11_buffer_copy_into (new_buf, input, &self->in_info)) {
1291 GST_ERROR_OBJECT (self, "Couldn't copy input buffer to fallback buffer");
1292 gst_buffer_unref (new_buf);
1293 gst_buffer_unref (input);
1298 dmem = (GstD3D11Memory *) gst_buffer_peek_memory (new_buf, 0);
1299 piv = gst_d3d11_memory_get_processor_input_view (dmem,
1300 self->video_device, self->video_enum);
1302 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorInputView is unavailable");
1303 gst_buffer_unref (new_buf);
1304 gst_buffer_unref (input);
1309 /* copy metadata, default implemenation of baseclass will copy everything
1311 GST_BASE_TRANSFORM_CLASS (parent_class)->copy_metadata
1312 (GST_BASE_TRANSFORM_CAST (self), input, new_buf);
1314 gst_buffer_unref (input);
1319 static GstFlowReturn
1320 gst_d3d11_deinterlace_submit_future_frame (GstD3D11Deinterlace * self,
1323 GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (self);
1326 /* push tail and pop head, so that head frame can be the nearest frame
1327 * of current frame */
1329 g_queue_push_tail (&self->future_frame_queue, buffer);
1331 len = g_queue_get_length (&self->future_frame_queue);
1333 g_assert (len <= self->max_future_frames + 1);
1335 if (self->to_process) {
1336 GST_WARNING_OBJECT (self, "Found uncleared processing buffer");
1337 gst_clear_buffer (&self->to_process);
1340 if (len > self->max_future_frames ||
1341 /* NULL means drain */
1342 (buffer == NULL && len > 0)) {
1343 GstClockTime cur_timestmap = GST_CLOCK_TIME_NONE;
1344 GstClockTime duration = GST_CLOCK_TIME_NONE;
1345 GstBuffer *next_buf;
1348 (GstBuffer *) g_queue_pop_head (&self->future_frame_queue);
1350 /* For non-blend methods, we will produce two frames from a single
1351 * interlaced frame. So, sufficiently correct buffer duration is required
1352 * to set timestamp for the second output frame */
1353 if (self->method != GST_D3D11_DEINTERLACE_METHOD_BLEND) {
1354 if (GST_BUFFER_PTS_IS_VALID (self->to_process)) {
1355 cur_timestmap = GST_BUFFER_PTS (self->to_process);
1357 cur_timestmap = GST_BUFFER_DTS (self->to_process);
1360 /* Ensure buffer duration */
1361 next_buf = (GstBuffer *) g_queue_peek_head (&self->future_frame_queue);
1362 if (next_buf && GST_CLOCK_STIME_IS_VALID (cur_timestmap)) {
1363 GstClockTime next_timestamp;
1365 if (GST_BUFFER_PTS_IS_VALID (next_buf)) {
1366 next_timestamp = GST_BUFFER_PTS (next_buf);
1368 next_timestamp = GST_BUFFER_DTS (next_buf);
1371 if (GST_CLOCK_STIME_IS_VALID (next_timestamp)) {
1372 if (trans->segment.rate >= 0.0 && next_timestamp > cur_timestmap) {
1373 duration = next_timestamp - cur_timestmap;
1374 } else if (trans->segment.rate < 0.0
1375 && next_timestamp < cur_timestmap) {
1376 duration = cur_timestmap - next_timestamp;
1381 /* Make sure that we can update buffer duration safely */
1382 self->to_process = gst_buffer_make_writable (self->to_process);
1383 if (GST_CLOCK_TIME_IS_VALID (duration)) {
1384 GST_BUFFER_DURATION (self->to_process) = duration;
1386 GST_BUFFER_DURATION (self->to_process) = self->default_buffer_duration;
1389 /* Bonus points, DTS doesn't make sense for raw video frame */
1390 GST_BUFFER_PTS (self->to_process) = cur_timestmap;
1391 GST_BUFFER_DTS (self->to_process) = GST_CLOCK_TIME_NONE;
1393 /* And mark the number of output frames for this input frame */
1394 self->num_output_per_input = 2;
1396 self->num_output_per_input = 1;
1399 self->first_output = TRUE;
1405 static GstFlowReturn
1406 gst_d3d11_deinterlace_submit_input_buffer (GstBaseTransform * trans,
1407 gboolean is_discont, GstBuffer * input)
1409 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1413 /* Let baseclass handle QoS first */
1414 ret = GST_BASE_TRANSFORM_CLASS (parent_class)->submit_input_buffer (trans,
1416 if (ret != GST_FLOW_OK)
1419 if (gst_base_transform_is_passthrough (trans))
1422 /* at this moment, baseclass must hold queued_buf */
1423 g_assert (trans->queued_buf != NULL);
1425 /* Check if we can use this buffer directly. If not, copy this into
1426 * our fallback buffer */
1427 buf = trans->queued_buf;
1428 trans->queued_buf = NULL;
1430 buf = gst_d3d11_deinterlace_ensure_input_buffer (self, buf);
1432 GST_ERROR_OBJECT (self, "Invalid input buffer");
1433 return GST_FLOW_ERROR;
1436 return gst_d3d11_deinterlace_submit_future_frame (self, buf);
1439 static ID3D11VideoProcessorOutputView *
1440 gst_d3d11_deinterace_get_pov_from_buffer (GstD3D11Deinterlace * self,
1444 GstD3D11Memory *dmem;
1445 ID3D11VideoProcessorOutputView *pov;
1447 if (gst_buffer_n_memory (buffer) != 1) {
1448 GST_WARNING_OBJECT (self, "Output buffer has more than one memory");
1452 mem = gst_buffer_peek_memory (buffer, 0);
1453 if (!gst_is_d3d11_memory (mem)) {
1454 GST_WARNING_OBJECT (self, "Output buffer is holding non-D3D11 memory");
1458 dmem = (GstD3D11Memory *) mem;
1459 if (dmem->device != self->device) {
1460 GST_WARNING_OBJECT (self,
1461 "Output D3D11 memory was allocated by other device");
1465 pov = gst_d3d11_memory_get_processor_output_view (dmem,
1466 self->video_device, self->video_enum);
1468 GST_WARNING_OBJECT (self, "ID3D11VideoProcessorOutputView is unavailable");
1476 gst_d3d11_deinterlace_ensure_output_buffer (GstD3D11Deinterlace * self,
1479 GstD3D11Memory *dmem;
1480 ID3D11VideoProcessorOutputView *pov;
1481 GstBuffer *new_buf = NULL;
1483 pov = gst_d3d11_deinterace_get_pov_from_buffer (self, output);
1487 if (!self->fallback_out_pool ||
1488 !gst_buffer_pool_set_active (self->fallback_out_pool, TRUE) ||
1489 gst_buffer_pool_acquire_buffer (self->fallback_out_pool, &new_buf,
1490 NULL) != GST_FLOW_OK) {
1491 GST_ERROR_OBJECT (self, "Fallback output buffer is unavailable");
1492 gst_buffer_unref (output);
1497 dmem = (GstD3D11Memory *) gst_buffer_peek_memory (new_buf, 0);
1498 pov = gst_d3d11_memory_get_processor_output_view (dmem,
1499 self->video_device, self->video_enum);
1501 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorOutputView is unavailable");
1502 gst_buffer_unref (new_buf);
1503 gst_buffer_unref (output);
1508 /* copy metadata, default implemenation of baseclass will copy everything
1510 GST_BASE_TRANSFORM_CLASS (parent_class)->copy_metadata
1511 (GST_BASE_TRANSFORM_CAST (self), output, new_buf);
1513 gst_buffer_unref (output);
1518 static GstFlowReturn
1519 gst_d3d11_deinterlace_submit_past_frame (GstD3D11Deinterlace * self,
1522 /* push head and pop tail, so that head frame can be the nearest frame
1523 * of current frame */
1524 g_queue_push_head (&self->past_frame_queue, buffer);
1525 while (g_queue_get_length (&self->past_frame_queue) > self->max_past_frames) {
1526 GstBuffer *to_drop =
1527 (GstBuffer *) g_queue_pop_tail (&self->past_frame_queue);
1530 gst_buffer_unref (to_drop);
1536 static GstFlowReturn
1537 gst_d3d11_deinterlace_generate_output (GstBaseTransform * trans,
1538 GstBuffer ** outbuf)
1540 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1541 GstFlowReturn ret = GST_FLOW_OK;
1543 GstBuffer *buf = NULL;
1545 if (gst_base_transform_is_passthrough (trans)) {
1546 return GST_BASE_TRANSFORM_CLASS (parent_class)->generate_output (trans,
1551 inbuf = self->to_process;
1556 GST_BASE_TRANSFORM_CLASS (parent_class)->prepare_output_buffer (trans,
1559 if (ret != GST_FLOW_OK || !buf) {
1560 GST_WARNING_OBJECT (trans, "could not get buffer from pool: %s",
1561 gst_flow_get_name (ret));
1566 g_assert (inbuf != buf);
1568 buf = gst_d3d11_deinterlace_ensure_output_buffer (self, buf);
1570 GST_ERROR_OBJECT (self, "Failed to allocate output buffer to process");
1572 return GST_FLOW_ERROR;
1575 ret = gst_d3d11_deinterlace_transform (trans, inbuf, buf);
1576 if (ret != GST_FLOW_OK) {
1577 gst_buffer_unref (buf);
1581 g_assert (self->num_output_per_input == 1 || self->num_output_per_input == 2);
1583 /* Update timestamp and buffer duration.
1584 * Here, PTS and duration of inbuf must be valid,
1585 * unless there's programing error, since we updated timestamp and duration
1586 * already around submit_input_buffer() */
1587 if (self->num_output_per_input == 2) {
1588 if (!GST_BUFFER_DURATION_IS_VALID (inbuf)) {
1589 GST_LOG_OBJECT (self, "Input buffer duration is unknown");
1590 } else if (!GST_BUFFER_PTS_IS_VALID (inbuf)) {
1591 GST_LOG_OBJECT (self, "Input buffer timestamp is unknown");
1593 GstClockTime duration = GST_BUFFER_DURATION (inbuf) / 2;
1594 gboolean second_field = FALSE;
1596 if (self->first_output) {
1597 /* For reverse playback, first output is the second field */
1598 if (trans->segment.rate < 0)
1599 second_field = TRUE;
1601 second_field = FALSE;
1603 if (trans->segment.rate < 0)
1604 second_field = FALSE;
1606 second_field = TRUE;
1609 GST_BUFFER_DURATION (buf) = duration;
1611 GST_BUFFER_PTS (buf) = GST_BUFFER_PTS (buf) + duration;
1617 self->first_output = FALSE;
1618 self->num_transformed++;
1619 /* https://docs.microsoft.com/en-us/windows/win32/api/d3d12video/ns-d3d12video-d3d12_video_process_input_stream_rate */
1620 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BLEND) {
1621 self->input_index += 2;
1623 self->input_index++;
1626 if (self->num_output_per_input <= self->num_transformed) {
1627 /* Move processed frame to past_frame queue */
1628 gst_d3d11_deinterlace_submit_past_frame (self, self->to_process);
1629 self->to_process = NULL;
1635 static GstFlowReturn
1636 gst_d3d11_deinterlace_transform (GstBaseTransform * trans, GstBuffer * inbuf,
1639 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1640 ID3D11VideoProcessorInputView *piv;
1641 ID3D11VideoProcessorOutputView *pov;
1642 D3D11_VIDEO_FRAME_FORMAT frame_foramt = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
1643 D3D11_VIDEO_PROCESSOR_STREAM proc_stream = { 0, };
1644 ID3D11VideoProcessorInputView *future_surfaces[MAX_NUM_REFERENCES] =
1646 ID3D11VideoProcessorInputView *past_surfaces[MAX_NUM_REFERENCES] = { NULL, };
1647 guint future_frames = 0;
1648 guint past_frames = 0;
1652 /* Input/output buffer must be holding valid D3D11 memory here,
1653 * as we checked it already in submit_input_buffer() and generate_output() */
1654 piv = gst_d3d11_deinterace_get_piv_from_buffer (self, inbuf);
1656 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorInputView is unavailable");
1657 return GST_FLOW_ERROR;
1660 pov = gst_d3d11_deinterace_get_pov_from_buffer (self, outbuf);
1662 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorOutputView is unavailable");
1663 return GST_FLOW_ERROR;
1666 /* Check field order */
1667 if (GST_VIDEO_INFO_INTERLACE_MODE (&self->in_info) ==
1668 GST_VIDEO_INTERLACE_MODE_MIXED ||
1669 (GST_VIDEO_INFO_INTERLACE_MODE (&self->in_info) ==
1670 GST_VIDEO_INTERLACE_MODE_INTERLEAVED &&
1671 GST_VIDEO_INFO_FIELD_ORDER (&self->in_info) ==
1672 GST_VIDEO_FIELD_ORDER_UNKNOWN)) {
1673 if (!GST_BUFFER_FLAG_IS_SET (inbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED)) {
1674 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
1675 } else if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_VIDEO_BUFFER_FLAG_TFF)) {
1676 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
1678 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
1680 } else if (GST_VIDEO_INFO_FIELD_ORDER (&self->in_info) ==
1681 GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST) {
1682 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
1683 } else if (GST_VIDEO_INFO_FIELD_ORDER (&self->in_info) ==
1684 GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST) {
1685 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
1688 if (frame_foramt == D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE) {
1689 /* Progressive stream will produce only one frame per frame */
1690 self->num_output_per_input = 1;
1691 } else if (self->method != GST_D3D11_DEINTERLACE_METHOD_BLEND &&
1692 self->method != GST_D3D11_DEINTERLACE_METHOD_BOB) {
1693 /* Fill reference frames */
1694 for (i = 0; i < g_queue_get_length (&self->future_frame_queue) &&
1695 i < G_N_ELEMENTS (future_surfaces); i++) {
1696 GstBuffer *future_buf;
1697 ID3D11VideoProcessorInputView *future_piv;
1700 (GstBuffer *) g_queue_peek_nth (&self->future_frame_queue, i);
1701 future_piv = gst_d3d11_deinterace_get_piv_from_buffer (self, future_buf);
1703 GST_WARNING_OBJECT (self,
1704 "Couldn't get ID3D11VideoProcessorInputView from future "
1709 future_surfaces[i] = future_piv;
1713 for (i = 0; i < g_queue_get_length (&self->past_frame_queue) &&
1714 i < G_N_ELEMENTS (past_surfaces); i++) {
1715 GstBuffer *past_buf;
1716 ID3D11VideoProcessorInputView *past_piv;
1718 past_buf = (GstBuffer *) g_queue_peek_nth (&self->past_frame_queue, i);
1719 past_piv = gst_d3d11_deinterace_get_piv_from_buffer (self, past_buf);
1721 GST_WARNING_OBJECT (self,
1722 "Couldn't get ID3D11VideoProcessorInputView from past "
1727 past_surfaces[i] = past_piv;
1732 proc_stream.Enable = TRUE;
1733 proc_stream.pInputSurface = piv;
1734 proc_stream.InputFrameOrField = self->input_index;
1735 /* FIXME: This is wrong for inverse telechin case */
1736 /* OutputIndex == 0 for the first field, and 1 for the second field */
1737 if (self->num_output_per_input == 2) {
1738 if (trans->segment.rate < 0.0) {
1739 /* Process the second frame first in case of reverse playback */
1740 proc_stream.OutputIndex = self->first_output ? 1 : 0;
1742 proc_stream.OutputIndex = self->first_output ? 0 : 1;
1745 proc_stream.OutputIndex = 0;
1748 if (future_frames) {
1749 proc_stream.FutureFrames = future_frames;
1750 proc_stream.ppFutureSurfaces = future_surfaces;
1754 proc_stream.PastFrames = past_frames;
1755 proc_stream.ppPastSurfaces = past_surfaces;
1758 gst_d3d11_device_lock (self->device);
1759 self->video_context->VideoProcessorSetStreamFrameFormat (self->video_proc, 0,
1762 hr = self->video_context->VideoProcessorBlt (self->video_proc, pov, 0,
1764 gst_d3d11_device_unlock (self->device);
1766 if (!gst_d3d11_result (hr, self->device)) {
1767 GST_ERROR_OBJECT (self, "Failed to perform deinterlacing");
1768 return GST_FLOW_ERROR;
1775 gst_d3d11_deinterlace_sink_event (GstBaseTransform * trans, GstEvent * event)
1777 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1779 switch (GST_EVENT_TYPE (event)) {
1780 case GST_EVENT_STREAM_START:
1781 /* stream-start means discont stream from previous one. Drain pending
1783 GST_DEBUG_OBJECT (self, "Have stream-start, drain frames if any");
1784 gst_d3d11_deinterlace_drain (self);
1786 case GST_EVENT_CAPS:{
1787 GstPad *sinkpad = GST_BASE_TRANSFORM_SINK_PAD (trans);
1790 prev_caps = gst_pad_get_current_caps (sinkpad);
1793 gst_event_parse_caps (event, &caps);
1794 /* If caps is updated, drain pending frames */
1795 if (!gst_caps_is_equal (prev_caps, caps)) {
1796 GST_DEBUG_OBJECT (self, "Caps updated from %" GST_PTR_FORMAT " to %"
1797 GST_PTR_FORMAT, prev_caps, caps);
1798 gst_d3d11_deinterlace_drain (self);
1801 gst_caps_unref (prev_caps);
1805 case GST_EVENT_SEGMENT:
1806 /* new segment would mean that temporal discontinuity */
1807 case GST_EVENT_SEGMENT_DONE:
1809 GST_DEBUG_OBJECT (self, "Have event %s, drain frames if any",
1810 GST_EVENT_TYPE_NAME (event));
1811 gst_d3d11_deinterlace_drain (self);
1813 case GST_EVENT_FLUSH_STOP:
1814 GST_D3D11_DEINTERLACE_LOCK (self);
1815 gst_d3d11_deinterlace_reset_history (self);
1816 GST_D3D11_DEINTERLACE_UNLOCK (self);
1822 return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
1826 gst_d3d11_deinterlace_before_transform (GstBaseTransform * trans,
1829 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1830 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
1831 GstD3D11Memory *dmem;
1833 GstCaps *in_caps = NULL;
1834 GstCaps *out_caps = NULL;
1837 mem = gst_buffer_peek_memory (buffer, 0);
1838 if (!gst_is_d3d11_memory (mem)) {
1839 GST_ELEMENT_ERROR (self, CORE, FAILED, (NULL), ("Invalid memory"));
1843 dmem = GST_D3D11_MEMORY_CAST (mem);
1844 /* Same device, nothing to do */
1845 if (dmem->device == self->device)
1848 g_object_get (dmem->device, "adapter", &adapter, NULL);
1849 /* We have per-GPU deinterlace elements because of different capability
1850 * per GPU. so, cannot accept other GPU at the moment */
1851 if (adapter != klass->adapter)
1854 GST_INFO_OBJECT (self, "Updating device %" GST_PTR_FORMAT " -> %"
1855 GST_PTR_FORMAT, self->device, dmem->device);
1857 /* Drain buffers before updating device */
1858 gst_d3d11_deinterlace_drain (self);
1860 gst_object_unref (self->device);
1861 self->device = (GstD3D11Device *) gst_object_ref (dmem->device);
1863 in_caps = gst_pad_get_current_caps (GST_BASE_TRANSFORM_SINK_PAD (trans));
1865 GST_WARNING_OBJECT (self, "sinkpad has null caps");
1869 out_caps = gst_pad_get_current_caps (GST_BASE_TRANSFORM_SRC_PAD (trans));
1871 GST_WARNING_OBJECT (self, "Has no configured output caps");
1875 gst_d3d11_deinterlace_set_caps (trans, in_caps, out_caps);
1877 /* Mark reconfigure so that we can update pool */
1878 gst_base_transform_reconfigure_src (trans);
1881 gst_clear_caps (&in_caps);
1882 gst_clear_caps (&out_caps);
1887 /* FIXME: might be job of basetransform */
1888 static GstFlowReturn
1889 gst_d3d11_deinterlace_drain (GstD3D11Deinterlace * self)
1891 GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (self);
1892 GstFlowReturn ret = GST_FLOW_OK;
1893 GstBuffer *outbuf = NULL;
1895 GST_D3D11_DEINTERLACE_LOCK (self);
1896 if (gst_base_transform_is_passthrough (trans)) {
1897 /* If we were passthrough, nothing to do */
1899 } else if (!g_queue_get_length (&self->future_frame_queue)) {
1900 /* No pending data, nothing to do */
1904 while (g_queue_get_length (&self->future_frame_queue)) {
1905 gst_d3d11_deinterlace_submit_future_frame (self, NULL);
1906 if (!self->to_process)
1912 ret = gst_d3d11_deinterlace_generate_output (trans, &outbuf);
1913 if (outbuf != NULL) {
1914 /* Release lock during push buffer */
1915 GST_D3D11_DEINTERLACE_UNLOCK (self);
1916 ret = gst_pad_push (trans->srcpad, outbuf);
1917 GST_D3D11_DEINTERLACE_LOCK (self);
1919 } while (ret == GST_FLOW_OK && outbuf != NULL);
1923 gst_d3d11_deinterlace_reset_history (self);
1924 GST_D3D11_DEINTERLACE_UNLOCK (self);
1930 * SECTION:element-d3d11deinterlace
1931 * @title: d3d11deinterlace
1932 * @short_description: A Direct3D11 based deinterlace element
1934 * Deinterlacing interlaced video frames to progressive video frames by using
1935 * ID3D11VideoProcessor API.
1937 * ## Example launch line
1939 * gst-launch-1.0 filesrc location=/path/to/h264/file ! parsebin ! d3d11h264dec ! d3d11deinterlace ! d3d11videosink
1946 /* GstD3D11DeinterlaceBin */
1957 PROP_BIN_SUPPORTED_METHODS,
1960 typedef struct _GstD3D11DeinterlaceBin
1967 GstElement *deinterlace;
1968 GstElement *in_convert;
1969 GstElement *out_convert;
1971 GstElement *download;
1972 } GstD3D11DeinterlaceBin;
1974 typedef struct _GstD3D11DeinterlaceBinClass
1976 GstBinClass parent_class;
1980 } GstD3D11DeinterlaceBinClass;
1982 static GstElementClass *bin_parent_class = NULL;
1983 #define GST_D3D11_DEINTERLACE_BIN(object) ((GstD3D11DeinterlaceBin *) (object))
1984 #define GST_D3D11_DEINTERLACE_BIN_GET_CLASS(object) \
1985 (G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object), \
1986 GstD3D11DeinterlaceBinClass))
1988 #define GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE(format) \
1990 "format = (string) " format ", " \
1991 "width = (int) [1, 8192], " \
1992 "height = (int) [1, 8192] "
1994 #define GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES(features,format) \
1995 "video/x-raw(" features "), " \
1996 "format = (string) " format ", " \
1997 "width = (int) [1, 8192], " \
1998 "height = (int) [1, 8192] "
2000 static GstStaticPadTemplate bin_sink_template_caps =
2001 GST_STATIC_PAD_TEMPLATE ("sink",
2004 GST_STATIC_CAPS (GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
2005 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, GST_D3D11_SINK_FORMATS) "; "
2006 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
2007 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY ","
2008 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
2009 GST_D3D11_SINK_FORMATS) "; "
2010 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE (GST_D3D11_SINK_FORMATS) "; "
2011 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
2012 (GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY ","
2013 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
2014 GST_D3D11_SINK_FORMATS)
2017 static GstStaticPadTemplate bin_src_template_caps =
2018 GST_STATIC_PAD_TEMPLATE ("src",
2021 GST_STATIC_CAPS (GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
2022 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, GST_D3D11_SRC_FORMATS) "; "
2023 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
2024 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY ","
2025 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
2026 GST_D3D11_SRC_FORMATS) "; "
2027 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE (GST_D3D11_SRC_FORMATS) "; "
2028 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
2029 (GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY ","
2030 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
2031 GST_D3D11_SRC_FORMATS)
2034 static void gst_d3d11_deinterlace_bin_set_property (GObject * object,
2035 guint prop_id, const GValue * value, GParamSpec * pspec);
2036 static void gst_d3d11_deinterlace_bin_get_property (GObject * object,
2037 guint prop_id, GValue * value, GParamSpec * pspec);
2040 gst_d3d11_deinterlace_bin_class_init (GstD3D11DeinterlaceBinClass * klass,
2043 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
2044 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
2045 GstD3D11DeinterlaceClassData *cdata = (GstD3D11DeinterlaceClassData *) data;
2048 bin_parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
2050 gobject_class->get_property = gst_d3d11_deinterlace_bin_get_property;
2051 gobject_class->set_property = gst_d3d11_deinterlace_bin_set_property;
2054 g_object_class_install_property (gobject_class, PROP_BIN_QOS,
2055 g_param_spec_boolean ("qos", "QoS", "Handle Quality-of-Service events",
2056 FALSE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
2059 g_object_class_install_property (gobject_class, PROP_BIN_ADAPTER,
2060 g_param_spec_uint ("adapter", "Adapter",
2061 "DXGI Adapter index for creating device",
2062 0, G_MAXUINT32, cdata->adapter,
2063 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
2064 g_object_class_install_property (gobject_class, PROP_BIN_DEVICE_ID,
2065 g_param_spec_uint ("device-id", "Device Id",
2066 "DXGI Device ID", 0, G_MAXUINT32, 0,
2067 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
2068 g_object_class_install_property (gobject_class, PROP_BIN_VENDOR_ID,
2069 g_param_spec_uint ("vendor-id", "Vendor Id",
2070 "DXGI Vendor ID", 0, G_MAXUINT32, 0,
2071 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
2072 g_object_class_install_property (gobject_class, PROP_BIN_METHOD,
2073 g_param_spec_flags ("method", "Method",
2074 "Deinterlace Method. Use can set multiple methods as a flagset "
2075 "and element will select one of method automatically. "
2076 "If deinterlacing device failed to deinterlace with given mode, "
2077 "fallback might happen by the device",
2078 GST_TYPE_D3D11_DEINTERLACE_METHOD, cdata->device_caps.default_method,
2079 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
2080 GST_PARAM_MUTABLE_READY)));
2081 g_object_class_install_property (gobject_class, PROP_BIN_SUPPORTED_METHODS,
2082 g_param_spec_flags ("supported-methods", "Supported Methods",
2083 "Set of supported deinterlace methods by device",
2084 GST_TYPE_D3D11_DEINTERLACE_METHOD,
2085 cdata->device_caps.supported_methods,
2086 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
2088 long_name = g_strdup_printf ("Direct3D11 %s Deinterlacer Bin",
2089 cdata->description);
2090 gst_element_class_set_metadata (element_class, long_name,
2091 "Filter/Effect/Video/Deinterlace/Hardware",
2092 "A Direct3D11 based deinterlacer bin",
2093 "Seungha Yang <seungha@centricular.com>");
2096 gst_element_class_add_static_pad_template (element_class,
2097 &bin_sink_template_caps);
2098 gst_element_class_add_static_pad_template (element_class,
2099 &bin_src_template_caps);
2101 klass->adapter = cdata->adapter;
2102 klass->child_type = cdata->deinterlace_type;
2104 gst_d3d11_deinterlace_class_data_unref (cdata);
2108 gst_d3d11_deinterlace_bin_init (GstD3D11DeinterlaceBin * self)
2110 GstD3D11DeinterlaceBinClass *klass =
2111 GST_D3D11_DEINTERLACE_BIN_GET_CLASS (self);
2114 self->deinterlace = (GstElement *) g_object_new (klass->child_type,
2115 "name", "deinterlace", NULL);
2116 self->in_convert = gst_element_factory_make ("d3d11colorconvert", NULL);
2117 self->out_convert = gst_element_factory_make ("d3d11colorconvert", NULL);
2118 self->upload = gst_element_factory_make ("d3d11upload", NULL);
2119 self->download = gst_element_factory_make ("d3d11download", NULL);
2121 /* Specify DXGI adapter index to use */
2122 g_object_set (G_OBJECT (self->in_convert), "adapter", klass->adapter, NULL);
2123 g_object_set (G_OBJECT (self->out_convert), "adapter", klass->adapter, NULL);
2124 g_object_set (G_OBJECT (self->upload), "adapter", klass->adapter, NULL);
2125 g_object_set (G_OBJECT (self->download), "adapter", klass->adapter, NULL);
2127 gst_bin_add_many (GST_BIN_CAST (self), self->upload, self->in_convert,
2128 self->deinterlace, self->out_convert, self->download, NULL);
2129 gst_element_link_many (self->upload, self->in_convert, self->deinterlace,
2130 self->out_convert, self->download, NULL);
2132 pad = gst_element_get_static_pad (self->upload, "sink");
2133 self->sinkpad = gst_ghost_pad_new ("sink", pad);
2134 gst_element_add_pad (GST_ELEMENT_CAST (self), self->sinkpad);
2135 gst_object_unref (pad);
2137 pad = gst_element_get_static_pad (self->download, "src");
2138 self->srcpad = gst_ghost_pad_new ("src", pad);
2139 gst_element_add_pad (GST_ELEMENT_CAST (self), self->srcpad);
2140 gst_object_unref (pad);
2144 gst_d3d11_deinterlace_bin_set_property (GObject * object, guint prop_id,
2145 const GValue * value, GParamSpec * pspec)
2147 GstD3D11DeinterlaceBin *self = GST_D3D11_DEINTERLACE_BIN (object);
2149 g_object_set_property (G_OBJECT (self->deinterlace), pspec->name, value);
2153 gst_d3d11_deinterlace_bin_get_property (GObject * object, guint prop_id,
2154 GValue * value, GParamSpec * pspec)
2156 GstD3D11DeinterlaceBin *self = GST_D3D11_DEINTERLACE_BIN (object);
2158 g_object_get_property (G_OBJECT (self->deinterlace), pspec->name, value);
2162 gst_d3d11_deinterlace_register (GstPlugin * plugin, GstD3D11Device * device,
2168 gchar *feature_name;
2170 GTypeInfo type_info = {
2171 sizeof (GstD3D11DeinterlaceClass),
2174 (GClassInitFunc) gst_d3d11_deinterlace_class_init,
2177 sizeof (GstD3D11Deinterlace),
2179 (GInstanceInitFunc) gst_d3d11_deinterlace_init,
2181 GTypeInfo bin_type_info = {
2182 sizeof (GstD3D11DeinterlaceBinClass),
2185 (GClassInitFunc) gst_d3d11_deinterlace_bin_class_init,
2188 sizeof (GstD3D11DeinterlaceBin),
2190 (GInstanceInitFunc) gst_d3d11_deinterlace_bin_init,
2192 GstCaps *sink_caps = NULL;
2193 GstCaps *src_caps = NULL;
2194 GstCaps *caps = NULL;
2195 GstCapsFeatures *caps_features;
2196 ID3D11Device *device_handle;
2197 ID3D11DeviceContext *context_handle;
2199 ComPtr<ID3D11VideoDevice> video_device;
2200 ComPtr<ID3D11VideoContext> video_context;
2201 ComPtr<ID3D11VideoProcessorEnumerator> video_proc_enum;
2202 ComPtr<ID3D11VideoProcessorEnumerator1> video_proc_enum1;
2205 D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc;
2206 D3D11_VIDEO_PROCESSOR_CAPS proc_caps = { 0, };
2207 UINT supported_methods = 0;
2208 GstD3D11DeinterlaceMethod default_method;
2213 /* NOTE: processor might be able to handle other formats.
2214 * However, not all YUV formats can be used for render target.
2215 * For instance, DXGI_FORMAT_Y210 and DXGI_FORMAT_Y410 formats cannot be
2216 * render target. In practice, interlaced stream would output of video
2217 * decoders, so NV12/P010/P016 can cover most of real-world use case.
2219 DXGI_FORMAT formats_to_check[] = {
2220 DXGI_FORMAT_NV12, /* NV12 */
2221 DXGI_FORMAT_P010, /* P010_10LE */
2222 DXGI_FORMAT_P016, /* P016_LE */
2224 GValue *supported_formats = NULL;
2225 GstD3D11DeinterlaceClassData *cdata;
2226 guint max_past_frames = 0;
2227 guint max_future_frames = 0;
2230 device_handle = gst_d3d11_device_get_device_handle (device);
2231 context_handle = gst_d3d11_device_get_device_context_handle (device);
2233 hr = device_handle->QueryInterface (IID_PPV_ARGS (&video_device));
2234 if (!gst_d3d11_result (hr, device))
2237 hr = context_handle->QueryInterface (IID_PPV_ARGS (&video_context));
2238 if (!gst_d3d11_result (hr, device))
2241 memset (&desc, 0, sizeof (D3D11_VIDEO_PROCESSOR_CONTENT_DESC));
2242 desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
2243 desc.InputWidth = 320;
2244 desc.InputHeight = 240;
2245 desc.OutputWidth = 320;
2246 desc.OutputHeight = 240;
2247 desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
2249 hr = video_device->CreateVideoProcessorEnumerator (&desc, &video_proc_enum);
2250 if (!gst_d3d11_result (hr, device))
2253 /* We need ID3D11VideoProcessorEnumerator1 interface to check conversion
2254 * capability of device via CheckVideoProcessorFormatConversion() */
2255 hr = video_proc_enum.As (&video_proc_enum1);
2256 if (!gst_d3d11_result (hr, device))
2259 hr = video_proc_enum->GetVideoProcessorCaps (&proc_caps);
2260 if (!gst_d3d11_result (hr, device))
2263 for (i = 0; i < proc_caps.RateConversionCapsCount; i++) {
2264 D3D11_VIDEO_PROCESSOR_RATE_CONVERSION_CAPS rate_conv_caps = { 0, };
2266 hr = video_proc_enum->GetVideoProcessorRateConversionCaps (i,
2271 supported_methods |= rate_conv_caps.ProcessorCaps;
2272 max_past_frames = MAX (max_past_frames, rate_conv_caps.PastFrames);
2273 max_future_frames = MAX (max_future_frames, rate_conv_caps.FutureFrames);
2276 if (supported_methods == 0)
2279 #define IS_SUPPORTED_METHOD(flags,val) (flags & val) == val
2280 blend = IS_SUPPORTED_METHOD (supported_methods,
2281 GST_D3D11_DEINTERLACE_METHOD_BLEND);
2282 bob = IS_SUPPORTED_METHOD (supported_methods,
2283 GST_D3D11_DEINTERLACE_METHOD_BOB);
2284 adaptive = IS_SUPPORTED_METHOD (supported_methods,
2285 GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE);
2286 mocomp = IS_SUPPORTED_METHOD (supported_methods,
2287 GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION);
2288 #undef IS_SUPPORTED_METHOD
2290 if (!blend && !bob && !adaptive && !mocomp)
2293 /* Drop all not supported methods from flags */
2294 supported_methods = supported_methods &
2295 (GST_D3D11_DEINTERLACE_METHOD_BLEND | GST_D3D11_DEINTERLACE_METHOD_BOB |
2296 GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE |
2297 GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION);
2299 /* Prefer bob, it's equivalent to "linear" which is default mode of
2300 * software deinterlace element, also it's fallback mode
2301 * for our "adaptive" and "mocomp" modes. Note that since Direct3D12, "blend"
2302 * mode is no more supported, instead "bob" and "custom" mode are suported
2305 default_method = GST_D3D11_DEINTERLACE_METHOD_BOB;
2306 } else if (adaptive) {
2307 default_method = GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE;
2308 } else if (mocomp) {
2309 default_method = GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION;
2311 default_method = GST_D3D11_DEINTERLACE_METHOD_BLEND;
2313 /* Programming error */
2314 g_return_if_reached ();
2317 for (i = 0; i < G_N_ELEMENTS (formats_to_check); i++) {
2319 GValue val = G_VALUE_INIT;
2320 GstVideoFormat format;
2321 BOOL supported = FALSE;
2323 hr = video_proc_enum->CheckVideoProcessorFormat (formats_to_check[i],
2328 /* D3D11 video processor can support other conversion at once,
2329 * including color format conversion.
2330 * But not all combinations of in/out pairs can be supported.
2331 * To make things simple, this element will do only deinterlacing
2332 * (might not be optimal in terms of processing power/resource though) */
2334 /* D3D11_VIDEO_PROCESSOR_FORMAT_SUPPORT_INPUT = 0x1,
2335 * D3D11_VIDEO_PROCESSOR_FORMAT_SUPPORT_OUTPUT = 0x2,
2336 * MinGW header might not be defining the above enum values */
2337 if ((flags & 0x3) != 0x3)
2340 format = gst_d3d11_dxgi_format_to_gst (formats_to_check[i]);
2341 /* This is programming error! */
2342 if (format == GST_VIDEO_FORMAT_UNKNOWN) {
2343 GST_ERROR ("Couldn't convert DXGI format %d to video format",
2344 formats_to_check[i]);
2348 hr = video_proc_enum1->CheckVideoProcessorFormatConversion
2349 (formats_to_check[i], DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709,
2350 formats_to_check[i], DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709,
2352 if (FAILED (hr) || !supported)
2355 if (!supported_formats) {
2356 supported_formats = g_new0 (GValue, 1);
2357 g_value_init (supported_formats, GST_TYPE_LIST);
2360 if (formats_to_check[i] == DXGI_FORMAT_P016) {
2361 /* This is used for P012 as well */
2362 g_value_init (&val, G_TYPE_STRING);
2363 g_value_set_static_string (&val,
2364 gst_video_format_to_string (GST_VIDEO_FORMAT_P012_LE));
2365 gst_value_list_append_and_take_value (supported_formats, &val);
2368 g_value_init (&val, G_TYPE_STRING);
2369 g_value_set_static_string (&val, gst_video_format_to_string (format));
2370 gst_value_list_append_and_take_value (supported_formats, &val);
2373 if (!supported_formats)
2376 caps = gst_caps_new_empty_simple ("video/x-raw");
2377 /* FIXME: Check supported resolution, it would be different from
2378 * supported max texture dimension */
2379 gst_caps_set_simple (caps,
2380 "width", GST_TYPE_INT_RANGE, 1, 8192,
2381 "height", GST_TYPE_INT_RANGE, 1, 8192, NULL);
2382 gst_caps_set_value (caps, "format", supported_formats);
2383 g_value_unset (supported_formats);
2384 g_free (supported_formats);
2386 /* TODO: Add alternating deinterlace */
2387 src_caps = gst_caps_copy (caps);
2388 caps_features = gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY,
2390 gst_caps_set_features_simple (src_caps, caps_features);
2392 caps_features = gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY,
2393 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, NULL);
2394 gst_caps_set_features_simple (caps, caps_features);
2395 gst_caps_append (src_caps, caps);
2397 sink_caps = gst_caps_copy (src_caps);
2399 GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
2400 GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
2402 cdata = gst_d3d11_deinterlace_class_data_new ();
2403 cdata->sink_caps = sink_caps;
2404 cdata->src_caps = src_caps;
2405 cdata->device_caps.supported_methods =
2406 (GstD3D11DeinterlaceMethod) supported_methods;
2407 cdata->device_caps.default_method = default_method;
2408 cdata->device_caps.max_past_frames = max_past_frames;
2409 cdata->device_caps.max_future_frames = max_future_frames;
2411 g_object_get (device, "adapter", &cdata->adapter,
2412 "device-id", &cdata->device_id, "vendor-id", &cdata->vendor_id,
2413 "description", &cdata->description, NULL);
2414 type_info.class_data = cdata;
2415 bin_type_info.class_data = gst_d3d11_deinterlace_class_data_ref (cdata);
2417 type_name = g_strdup ("GstD3D11Deinterlace");
2418 feature_name = g_strdup ("d3d11deinterlaceelement");
2420 while (g_type_from_name (type_name)) {
2423 g_free (feature_name);
2424 type_name = g_strdup_printf ("GstD3D11Device%dDeinterlace", index);
2425 feature_name = g_strdup_printf ("d3d11device%ddeinterlaceelement", index);
2428 type = g_type_register_static (GST_TYPE_BASE_TRANSFORM,
2429 type_name, &type_info, (GTypeFlags) 0);
2430 cdata->deinterlace_type = type;
2433 gst_element_type_set_skip_documentation (type);
2435 if (!gst_element_register (plugin, feature_name, GST_RANK_NONE, type))
2436 GST_WARNING ("Failed to register plugin '%s'", type_name);
2439 g_free (feature_name);
2441 /* Register wrapper bin */
2443 type_name = g_strdup ("GstD3D11DeinterlaceBin");
2444 feature_name = g_strdup ("d3d11deinterlace");
2446 while (g_type_from_name (type_name)) {
2449 g_free (feature_name);
2450 type_name = g_strdup_printf ("GstD3D11Device%dDeinterlaceBin", index);
2451 feature_name = g_strdup_printf ("d3d11device%ddeinterlace", index);
2454 bin_type = g_type_register_static (GST_TYPE_BIN,
2455 type_name, &bin_type_info, (GTypeFlags) 0);
2457 /* make lower rank than default device */
2458 if (rank > 0 && index != 0)
2462 gst_element_type_set_skip_documentation (bin_type);
2464 if (!gst_element_register (plugin, feature_name, rank, bin_type))
2465 GST_WARNING ("Failed to register plugin '%s'", type_name);
2468 g_free (feature_name);