2 * Copyright (C) 2011 David Schleef <ds@entropywave.com>
3 * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
18 * Boston, MA 02110-1335, USA.
21 * SECTION:element-decklinkvideosink
22 * @short_description: Outputs Video to a BlackMagic DeckLink Device
24 * Playout Video to a BlackMagic DeckLink Device.
30 * decklinkvideosink device-number=0 mode=1080p25
32 * Playout a 1080p25 test-video to the SDI-Out of Card 0. Devices are numbered
36 * Certain DechLink Cards like the Duo2 or the Quad2 contain two or four
37 * independent SDI units with two connectors each. These units can operate either
38 * in half- or in full-duplex mode.
40 * The Duplex-Mode of a Card can be configured using the `duplex-mode`-Property.
41 * Cards that to not support Duplex-Modes are not influenced by the property.
43 * ## Half-Duplex-Mode (default):
44 * By default decklinkvideosink will configure them into half-duplex mode, so that
45 * each connector acts as if it were an independent DeckLink Card which can either
46 * be used as an Input or as an Output. In this mode the Duo2 can be used as as 4 SDI
47 * In-/Outputs and the Quad2 as 8 SDI In-/Outputs.
51 * videotestsrc foreground-color=0x00ff0000 ! decklinkvideosink device-number=0 mode=1080p25 \
52 * videotestsrc foreground-color=0x0000ff00 ! decklinkvideosink device-number=1 mode=1080p25 \
53 * videotestsrc foreground-color=0x000000ff ! decklinkvideosink device-number=2 mode=1080p25 \
54 * videotestsrc foreground-color=0x00ffffff ! decklinkvideosink device-number=3 mode=1080p25
56 * Playout four Test-Screen with colored Snow on the first four units in the System
57 * (ie. the Connectors 1-4 of a Duo2 unit).
61 * videotestsrc is-live=true foreground-color=0x0000ff00 ! decklinkvideosink device-number=0 mode=1080p25 \
62 * decklinkvideosrc device-number=1 mode=1080p25 ! autovideosink \
63 * decklinkvideosrc device-number=2 mode=1080p25 ! autovideosink \
64 * videotestsrc is-live=true foreground-color=0x00ff0000 ! decklinkvideosink device-number=3 mode=1080p25
66 * Capture 1080p25 from the second and third unit in the System,
67 * Playout a Test-Screen with colored Snow on the first and fourth unit
68 * (ie. the Connectors 1-4 of a Duo2 unit).
70 * ## Device-Number-Mapping in Half-Duplex-Mode
71 * The device-number to connector-mapping is as follows for the Duo2
72 * - `device-number=0` SDI1
73 * - `device-number=1` SDI3
74 * - `device-number=2` SDI2
75 * - `device-number=3` SDI4
78 * - `device-number=0` SDI1
79 * - `device-number=1` SDI3
80 * - `device-number=2` SDI5
81 * - `device-number=3` SDI7
82 * - `device-number=4` SDI2
83 * - `device-number=5` SDI4
84 * - `device-number=6` SDI6
85 * - `device-number=7` SDI8
87 * ## Full-Duplex-Mode:
88 * When operating in full-duplex mode, two connectors of a unit are combined to
89 * a single device, performing keying with the second connection.
91 * ## Device-Number-Mapping in Full-Duplex-Mode
92 * The device-number to connector-mapping in full-duplex-mode is as follows for the Duo2
93 * - `device-number=0` SDI1 primary, SDI2 secondary
94 * - `device-number=1` SDI3 primaty, SDI4 secondary
97 * - `device-number=0` SDI1 primary, SDI2 secondary
98 * - `device-number=1` SDI3 primaty, SDI4 secondary
99 * - `device-number=2` SDI5 primary, SDI6 secondary
100 * - `device-number=3` SDI7 primary, SDI8 secondary
103 * Keying is the process of overlaing Video with an Alpha-Channel on top of an
104 * existing Video-Stream. The Duo2 and Quad2-Cards can perform two different
105 * Keying-Modes when operated in full-duplex mode. Both modes expect Video with
109 * In internal Keying-Mode the primary port becomes an Input and the secondary port
110 * an Output. The unit overlays Video played back from the Computer onto the Input
111 * and outputs the combined Video-Stream to the Output.
115 * videotestsrc foreground-color=0x00000000 background-color=0x00000000 ! \
116 * video/x-raw,format=BGRA,width=1920,height=1080 ! \
117 * decklinkvideosink device-number=0 duplex-mode=full keyer-mode=internal video-format=8bit-bgra mode=1080p25
121 * In external Keying-Mode the primary port outputs the alpha-chanel as the
122 * luma-value (key-channel). Transparent pixels are black, opaque pixels are white.
123 * The RGB-Component of the Video are output on the secondary channel.
127 * videotestsrc foreground-color=0x00000000 background-color=0x00000000 ! \
128 * video/x-raw,format=BGRA,width=1920,height=1080 ! \
129 * decklinkvideosink device-number=0 duplex-mode=full keyer-mode=external video-format=8bit-bgra mode=1080p25
137 #include "gstdecklinkvideosink.h"
140 GST_DEBUG_CATEGORY_STATIC (gst_decklink_video_sink_debug);
141 #define GST_CAT_DEFAULT gst_decklink_video_sink_debug
143 class GStreamerVideoOutputCallback:public IDeckLinkVideoOutputCallback
146 GStreamerVideoOutputCallback (GstDecklinkVideoSink * sink)
147 :IDeckLinkVideoOutputCallback (), m_refcount (1)
149 m_sink = GST_DECKLINK_VIDEO_SINK_CAST (gst_object_ref (sink));
150 g_mutex_init (&m_mutex);
153 virtual HRESULT WINAPI QueryInterface (REFIID, LPVOID *)
155 return E_NOINTERFACE;
158 virtual ULONG WINAPI AddRef (void)
162 g_mutex_lock (&m_mutex);
165 g_mutex_unlock (&m_mutex);
170 virtual ULONG WINAPI Release (void)
174 g_mutex_lock (&m_mutex);
177 g_mutex_unlock (&m_mutex);
186 virtual HRESULT WINAPI ScheduledFrameCompleted (IDeckLinkVideoFrame *
187 completedFrame, BMDOutputFrameCompletionResult result)
190 case bmdOutputFrameCompleted:
191 GST_LOG_OBJECT (m_sink, "Completed frame %p", completedFrame);
193 case bmdOutputFrameDisplayedLate:
194 GST_INFO_OBJECT (m_sink, "Late Frame %p", completedFrame);
196 case bmdOutputFrameDropped:
197 GST_INFO_OBJECT (m_sink, "Dropped Frame %p", completedFrame);
199 case bmdOutputFrameFlushed:
200 GST_DEBUG_OBJECT (m_sink, "Flushed Frame %p", completedFrame);
203 GST_INFO_OBJECT (m_sink, "Unknown Frame %p: %d", completedFrame,
211 virtual HRESULT WINAPI ScheduledPlaybackHasStopped (void)
213 GST_LOG_OBJECT (m_sink, "Scheduled playback stopped");
215 if (m_sink->output) {
216 g_mutex_lock (&m_sink->output->lock);
217 g_cond_signal (&m_sink->output->cond);
218 g_mutex_unlock (&m_sink->output->lock);
224 virtual ~ GStreamerVideoOutputCallback () {
225 gst_object_unref (m_sink);
226 g_mutex_clear (&m_mutex);
230 GstDecklinkVideoSink * m_sink;
242 PROP_TIMECODE_FORMAT,
245 PROP_HW_SERIAL_NUMBER,
250 static void gst_decklink_video_sink_set_property (GObject * object,
251 guint property_id, const GValue * value, GParamSpec * pspec);
252 static void gst_decklink_video_sink_get_property (GObject * object,
253 guint property_id, GValue * value, GParamSpec * pspec);
254 static void gst_decklink_video_sink_finalize (GObject * object);
256 static GstStateChangeReturn
257 gst_decklink_video_sink_change_state (GstElement * element,
258 GstStateChange transition);
259 static GstClock *gst_decklink_video_sink_provide_clock (GstElement * element);
261 static GstCaps *gst_decklink_video_sink_get_caps (GstBaseSink * bsink,
263 static gboolean gst_decklink_video_sink_set_caps (GstBaseSink * bsink,
265 static GstFlowReturn gst_decklink_video_sink_prepare (GstBaseSink * bsink,
267 static GstFlowReturn gst_decklink_video_sink_render (GstBaseSink * bsink,
269 static gboolean gst_decklink_video_sink_open (GstBaseSink * bsink);
270 static gboolean gst_decklink_video_sink_close (GstBaseSink * bsink);
271 static gboolean gst_decklink_video_sink_stop (GstDecklinkVideoSink * self);
272 static gboolean gst_decklink_video_sink_propose_allocation (GstBaseSink * bsink,
274 static gboolean gst_decklink_video_sink_event (GstBaseSink * bsink,
278 gst_decklink_video_sink_start_scheduled_playback (GstElement * element);
280 #define parent_class gst_decklink_video_sink_parent_class
281 G_DEFINE_TYPE (GstDecklinkVideoSink, gst_decklink_video_sink,
285 reset_framerate (GstCapsFeatures * features, GstStructure * structure,
288 gst_structure_set (structure, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
295 gst_decklink_video_sink_class_init (GstDecklinkVideoSinkClass * klass)
297 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
298 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
299 GstBaseSinkClass *basesink_class = GST_BASE_SINK_CLASS (klass);
302 gobject_class->set_property = gst_decklink_video_sink_set_property;
303 gobject_class->get_property = gst_decklink_video_sink_get_property;
304 gobject_class->finalize = gst_decklink_video_sink_finalize;
306 element_class->change_state =
307 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_change_state);
308 element_class->provide_clock =
309 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_provide_clock);
311 basesink_class->get_caps =
312 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_get_caps);
313 basesink_class->set_caps =
314 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_set_caps);
315 basesink_class->prepare = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_prepare);
316 basesink_class->render = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_render);
317 // FIXME: These are misnamed in basesink!
318 basesink_class->start = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_open);
319 basesink_class->stop = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_close);
320 basesink_class->propose_allocation =
321 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_propose_allocation);
322 basesink_class->event = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_event);
324 g_object_class_install_property (gobject_class, PROP_MODE,
325 g_param_spec_enum ("mode", "Playback Mode",
326 "Video Mode to use for playback",
327 GST_TYPE_DECKLINK_MODE, GST_DECKLINK_MODE_NTSC,
328 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
329 G_PARAM_CONSTRUCT)));
331 g_object_class_install_property (gobject_class, PROP_DEVICE_NUMBER,
332 g_param_spec_int ("device-number", "Device number",
333 "Output device instance to use", 0, G_MAXINT, 0,
334 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
335 G_PARAM_CONSTRUCT)));
337 g_object_class_install_property (gobject_class, PROP_VIDEO_FORMAT,
338 g_param_spec_enum ("video-format", "Video format",
339 "Video format type to use for playback",
340 GST_TYPE_DECKLINK_VIDEO_FORMAT, GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV,
341 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
342 G_PARAM_CONSTRUCT)));
344 g_object_class_install_property (gobject_class, PROP_DUPLEX_MODE,
345 g_param_spec_enum ("duplex-mode", "Duplex mode",
346 "Certain DeckLink devices such as the DeckLink Quad 2 and the "
347 "DeckLink Duo 2 support configuration of the duplex mode of "
348 "individual sub-devices."
349 "A sub-device configured as full-duplex will use two connectors, "
350 "which allows simultaneous capture and playback, internal keying, "
351 "and fill & key scenarios."
352 "A half-duplex sub-device will use a single connector as an "
353 "individual capture or playback channel.",
354 GST_TYPE_DECKLINK_DUPLEX_MODE, GST_DECKLINK_DUPLEX_MODE_HALF,
355 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
356 G_PARAM_CONSTRUCT)));
358 g_object_class_install_property (gobject_class, PROP_TIMECODE_FORMAT,
359 g_param_spec_enum ("timecode-format", "Timecode format",
360 "Timecode format type to use for playback",
361 GST_TYPE_DECKLINK_TIMECODE_FORMAT,
362 GST_DECKLINK_TIMECODE_FORMAT_RP188ANY,
363 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
364 G_PARAM_CONSTRUCT)));
366 g_object_class_install_property (gobject_class, PROP_KEYER_MODE,
367 g_param_spec_enum ("keyer-mode", "Keyer mode",
368 "Keyer mode to be enabled",
369 GST_TYPE_DECKLINK_KEYER_MODE,
370 GST_DECKLINK_KEYER_MODE_OFF,
371 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
372 G_PARAM_CONSTRUCT)));
374 g_object_class_install_property (gobject_class, PROP_KEYER_LEVEL,
375 g_param_spec_int ("keyer-level", "Keyer level",
376 "Keyer level", 0, 255, 255,
377 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
378 G_PARAM_CONSTRUCT)));
380 g_object_class_install_property (gobject_class, PROP_HW_SERIAL_NUMBER,
381 g_param_spec_string ("hw-serial-number", "Hardware serial number",
382 "The serial number (hardware ID) of the Decklink card",
383 NULL, (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
385 g_object_class_install_property (gobject_class, PROP_CC_LINE,
386 g_param_spec_int ("cc-line", "CC Line",
387 "Line number to use for inserting closed captions (0 = disabled)", 0,
389 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
390 G_PARAM_CONSTRUCT)));
392 g_object_class_install_property (gobject_class, PROP_AFD_BAR_LINE,
393 g_param_spec_int ("afd-bar-line", "AFD/Bar Line",
394 "Line number to use for inserting AFD/Bar data (0 = disabled)", 0,
396 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
397 G_PARAM_CONSTRUCT)));
399 templ_caps = gst_decklink_mode_get_template_caps (FALSE);
400 templ_caps = gst_caps_make_writable (templ_caps);
401 /* For output we support any framerate and only really care about timestamps */
402 gst_caps_map_in_place (templ_caps, reset_framerate, NULL);
403 gst_element_class_add_pad_template (element_class,
404 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, templ_caps));
405 gst_caps_unref (templ_caps);
407 gst_element_class_set_static_metadata (element_class, "Decklink Video Sink",
408 "Video/Sink/Hardware", "Decklink Sink",
409 "David Schleef <ds@entropywave.com>, "
410 "Sebastian Dröge <sebastian@centricular.com>");
412 GST_DEBUG_CATEGORY_INIT (gst_decklink_video_sink_debug, "decklinkvideosink",
413 0, "debug category for decklinkvideosink element");
417 gst_decklink_video_sink_init (GstDecklinkVideoSink * self)
419 self->mode = GST_DECKLINK_MODE_NTSC;
420 self->device_number = 0;
421 self->video_format = GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV;
422 self->duplex_mode = bmdDuplexModeHalf;
423 /* VITC is legacy, we should expect RP188 in modern use cases */
424 self->timecode_format = bmdTimecodeRP188Any;
425 self->caption_line = 0;
426 self->afd_bar_line = 0;
428 gst_base_sink_set_max_lateness (GST_BASE_SINK_CAST (self), 20 * GST_MSECOND);
429 gst_base_sink_set_qos_enabled (GST_BASE_SINK_CAST (self), TRUE);
433 gst_decklink_video_sink_set_property (GObject * object, guint property_id,
434 const GValue * value, GParamSpec * pspec)
436 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (object);
438 switch (property_id) {
440 self->mode = (GstDecklinkModeEnum) g_value_get_enum (value);
442 case PROP_DEVICE_NUMBER:
443 self->device_number = g_value_get_int (value);
445 case PROP_VIDEO_FORMAT:
446 self->video_format = (GstDecklinkVideoFormat) g_value_get_enum (value);
447 switch (self->video_format) {
448 case GST_DECKLINK_VIDEO_FORMAT_AUTO:
449 case GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV:
450 case GST_DECKLINK_VIDEO_FORMAT_10BIT_YUV:
451 case GST_DECKLINK_VIDEO_FORMAT_8BIT_ARGB:
452 case GST_DECKLINK_VIDEO_FORMAT_8BIT_BGRA:
455 GST_ELEMENT_WARNING (GST_ELEMENT (self), CORE, NOT_IMPLEMENTED,
456 ("Format %d not supported", self->video_format), (NULL));
460 case PROP_DUPLEX_MODE:
462 gst_decklink_duplex_mode_from_enum ((GstDecklinkDuplexMode)
463 g_value_get_enum (value));
465 case PROP_TIMECODE_FORMAT:
466 self->timecode_format =
467 gst_decklink_timecode_format_from_enum ((GstDecklinkTimecodeFormat)
468 g_value_get_enum (value));
470 case PROP_KEYER_MODE:
472 gst_decklink_keyer_mode_from_enum ((GstDecklinkKeyerMode)
473 g_value_get_enum (value));
475 case PROP_KEYER_LEVEL:
476 self->keyer_level = g_value_get_int (value);
479 self->caption_line = g_value_get_int (value);
481 case PROP_AFD_BAR_LINE:
482 self->afd_bar_line = g_value_get_int (value);
485 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
491 gst_decklink_video_sink_get_property (GObject * object, guint property_id,
492 GValue * value, GParamSpec * pspec)
494 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (object);
496 switch (property_id) {
498 g_value_set_enum (value, self->mode);
500 case PROP_DEVICE_NUMBER:
501 g_value_set_int (value, self->device_number);
503 case PROP_VIDEO_FORMAT:
504 g_value_set_enum (value, self->video_format);
506 case PROP_DUPLEX_MODE:
507 g_value_set_enum (value,
508 gst_decklink_duplex_mode_to_enum (self->duplex_mode));
510 case PROP_TIMECODE_FORMAT:
511 g_value_set_enum (value,
512 gst_decklink_timecode_format_to_enum (self->timecode_format));
514 case PROP_KEYER_MODE:
515 g_value_set_enum (value,
516 gst_decklink_keyer_mode_to_enum (self->keyer_mode));
518 case PROP_KEYER_LEVEL:
519 g_value_set_int (value, self->keyer_level);
521 case PROP_HW_SERIAL_NUMBER:
523 g_value_set_string (value, self->output->hw_serial_number);
525 g_value_set_string (value, NULL);
528 g_value_set_int (value, self->caption_line);
530 case PROP_AFD_BAR_LINE:
531 g_value_set_int (value, self->afd_bar_line);
534 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
540 gst_decklink_video_sink_finalize (GObject * object)
542 //GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (object);
544 G_OBJECT_CLASS (parent_class)->finalize (object);
548 gst_decklink_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
550 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
551 const GstDecklinkMode *mode;
553 BMDVideoOutputFlags flags;
556 GST_DEBUG_OBJECT (self, "Setting caps %" GST_PTR_FORMAT, caps);
558 if (!gst_video_info_from_caps (&info, caps))
562 g_mutex_lock (&self->output->lock);
563 if (self->output->video_enabled) {
564 if (self->info.finfo->format == info.finfo->format &&
565 self->info.width == info.width && self->info.height == info.height) {
566 // FIXME: We should also consider the framerate as it is used
567 // for mode selection below in auto mode
568 GST_DEBUG_OBJECT (self, "Nothing relevant has changed");
570 g_mutex_unlock (&self->output->lock);
573 GST_DEBUG_OBJECT (self, "Reconfiguration not supported at this point");
574 g_mutex_unlock (&self->output->lock);
578 g_mutex_unlock (&self->output->lock);
580 self->output->output->SetScheduledFrameCompletionCallback (new
581 GStreamerVideoOutputCallback (self));
583 if (self->mode == GST_DECKLINK_MODE_AUTO) {
585 mode = gst_decklink_find_mode_and_format_for_caps (caps, &f);
587 GST_WARNING_OBJECT (self,
588 "Failed to find compatible mode for caps %" GST_PTR_FORMAT, caps);
591 if (self->video_format != GST_DECKLINK_VIDEO_FORMAT_AUTO &&
592 gst_decklink_pixel_format_from_type (self->video_format) != f) {
593 GST_WARNING_OBJECT (self, "Failed to set pixel format to %d",
598 /* We don't have to give the format in EnableVideoOutput. Therefore,
599 * even if it's AUTO, we have it stored in self->info and set it in
600 * gst_decklink_video_sink_prepare */
601 mode = gst_decklink_get_mode (self->mode);
602 g_assert (mode != NULL);
605 /* enable or disable keyer */
606 if (self->output->keyer != NULL) {
607 if (self->keyer_mode == bmdKeyerModeOff) {
608 self->output->keyer->Disable ();
609 } else if (self->keyer_mode == bmdKeyerModeInternal) {
610 self->output->keyer->Enable (false);
611 self->output->keyer->SetLevel (self->keyer_level);
612 } else if (self->keyer_mode == bmdKeyerModeExternal) {
613 self->output->keyer->Enable (true);
614 self->output->keyer->SetLevel (self->keyer_level);
616 g_assert_not_reached ();
618 } else if (self->keyer_mode != bmdKeyerModeOff) {
619 GST_WARNING_OBJECT (self, "Failed to set keyer to mode %d",
623 /* The timecode_format itself is used when we embed the actual timecode data
624 * into the frame. Now we only need to know which of the two standards the
625 * timecode format will adhere to: VITC or RP188, and send the appropriate
626 * flag to EnableVideoOutput. The exact format is specified later.
628 * Note that this flag will have no effect in practice if the video stream
629 * does not contain timecode metadata.
631 if ((gint64) self->timecode_format ==
632 (gint64) GST_DECKLINK_TIMECODE_FORMAT_VITC
633 || (gint64) self->timecode_format ==
634 (gint64) GST_DECKLINK_TIMECODE_FORMAT_VITCFIELD2)
635 flags = bmdVideoOutputVITC;
637 flags = bmdVideoOutputRP188;
639 if (self->caption_line > 0 || self->afd_bar_line > 0)
640 flags = (BMDVideoOutputFlags) (flags | bmdVideoOutputVANC);
642 ret = self->output->output->EnableVideoOutput (mode->mode, flags);
644 GST_WARNING_OBJECT (self, "Failed to enable video output: 0x%08lx",
645 (unsigned long) ret);
650 g_mutex_lock (&self->output->lock);
651 self->output->mode = mode;
652 self->output->video_enabled = TRUE;
653 if (self->output->start_scheduled_playback)
654 self->output->start_scheduled_playback (self->output->videosink);
655 g_mutex_unlock (&self->output->lock);
657 if (self->vbiencoder) {
658 gst_video_vbi_encoder_free (self->vbiencoder);
659 self->vbiencoder = NULL;
660 self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
667 gst_decklink_video_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
669 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
670 GstCaps *mode_caps, *caps;
672 if (self->mode == GST_DECKLINK_MODE_AUTO
673 && self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO)
674 mode_caps = gst_decklink_mode_get_template_caps (FALSE);
675 else if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO)
676 mode_caps = gst_decklink_mode_get_caps_all_formats (self->mode, FALSE);
677 else if (self->mode == GST_DECKLINK_MODE_AUTO)
679 gst_decklink_pixel_format_get_caps (gst_decklink_pixel_format_from_type
680 (self->video_format), FALSE);
683 gst_decklink_mode_get_caps (self->mode,
684 gst_decklink_pixel_format_from_type (self->video_format), FALSE);
685 mode_caps = gst_caps_make_writable (mode_caps);
686 /* For output we support any framerate and only really care about timestamps */
687 gst_caps_map_in_place (mode_caps, reset_framerate, NULL);
691 gst_caps_intersect_full (filter, mode_caps, GST_CAPS_INTERSECT_FIRST);
692 gst_caps_unref (mode_caps);
701 gst_decklink_video_sink_render (GstBaseSink * bsink, GstBuffer * buffer)
707 gst_decklink_video_sink_convert_to_internal_clock (GstDecklinkVideoSink * self,
708 GstClockTime * timestamp, GstClockTime * duration)
711 GstClockTime internal_base, external_base, internal_offset;
713 g_assert (timestamp != NULL);
715 clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
716 GST_OBJECT_LOCK (self);
717 internal_base = self->internal_base_time;
718 external_base = self->external_base_time;
719 internal_offset = self->internal_time_offset;
720 GST_OBJECT_UNLOCK (self);
722 if (!clock || clock != self->output->clock) {
723 GstClockTime internal, external, rate_n, rate_d;
724 GstClockTime external_timestamp = *timestamp;
725 GstClockTime base_time;
727 gst_clock_get_calibration (self->output->clock, &internal, &external,
730 // Convert to the running time corresponding to both clock times
731 if (!GST_CLOCK_TIME_IS_VALID (internal_base) || internal < internal_base)
734 internal -= internal_base;
736 if (!GST_CLOCK_TIME_IS_VALID (external_base) || external < external_base)
739 external -= external_base;
741 // Convert timestamp to the "running time" since we started scheduled
742 // playback, that is the difference between the pipeline's base time
743 // and our own base time.
744 base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));
745 if (base_time > external_base)
748 base_time = external_base - base_time;
750 if (external_timestamp < base_time)
751 external_timestamp = 0;
753 external_timestamp = external_timestamp - base_time;
755 // Get the difference in the external time, note
756 // that the running time is external time.
757 // Then scale this difference and offset it to
758 // our internal time. Now we have the running time
759 // according to our internal clock.
761 // For the duration we just scale
763 gst_clock_unadjust_with_calibration (NULL, external_timestamp,
764 internal, external, rate_n, rate_d);
766 GST_LOG_OBJECT (self,
767 "Converted %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT " (internal: %"
768 GST_TIME_FORMAT " external %" GST_TIME_FORMAT " rate: %lf)",
769 GST_TIME_ARGS (external_timestamp), GST_TIME_ARGS (*timestamp),
770 GST_TIME_ARGS (internal), GST_TIME_ARGS (external),
771 ((gdouble) rate_n) / ((gdouble) rate_d));
774 GstClockTime external_duration = *duration;
776 *duration = gst_util_uint64_scale (external_duration, rate_d, rate_n);
778 GST_LOG_OBJECT (self,
779 "Converted duration %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
780 " (internal: %" GST_TIME_FORMAT " external %" GST_TIME_FORMAT
781 " rate: %lf)", GST_TIME_ARGS (external_duration),
782 GST_TIME_ARGS (*duration), GST_TIME_ARGS (internal),
783 GST_TIME_ARGS (external), ((gdouble) rate_n) / ((gdouble) rate_d));
786 GST_LOG_OBJECT (self, "No clock conversion needed, same clocks: %"
787 GST_TIME_FORMAT, GST_TIME_ARGS (*timestamp));
790 if (external_base != GST_CLOCK_TIME_NONE &&
791 internal_base != GST_CLOCK_TIME_NONE)
792 *timestamp += internal_offset;
794 *timestamp = gst_clock_get_internal_time (self->output->clock);
796 GST_DEBUG_OBJECT (self, "Output timestamp %" GST_TIME_FORMAT
797 " using clock epoch %" GST_TIME_FORMAT,
798 GST_TIME_ARGS (*timestamp), GST_TIME_ARGS (self->output->clock_epoch));
801 gst_object_unref (clock);
804 /* Copied from ext/closedcaption/gstccconverter.c */
805 /* Converts raw CEA708 cc_data and an optional timecode into CDP */
807 convert_cea708_cc_data_cea708_cdp_internal (GstDecklinkVideoSink * self,
808 const guint8 * cc_data, guint cc_data_len, guint8 * cdp, guint cdp_len,
809 const GstVideoTimeCodeMeta * tc_meta)
812 guint8 flags, checksum;
814 const GstDecklinkMode *mode = gst_decklink_get_mode (self->mode);
816 gst_byte_writer_init_with_data (&bw, cdp, cdp_len, FALSE);
817 gst_byte_writer_put_uint16_be_unchecked (&bw, 0x9669);
818 /* Write a length of 0 for now */
819 gst_byte_writer_put_uint8_unchecked (&bw, 0);
820 if (mode->fps_n == 24000 && mode->fps_d == 1001) {
821 gst_byte_writer_put_uint8_unchecked (&bw, 0x1f);
822 } else if (mode->fps_n == 24 && mode->fps_d == 1) {
823 gst_byte_writer_put_uint8_unchecked (&bw, 0x2f);
824 } else if (mode->fps_n == 25 && mode->fps_d == 1) {
825 gst_byte_writer_put_uint8_unchecked (&bw, 0x3f);
826 } else if (mode->fps_n == 30 && mode->fps_d == 1001) {
827 gst_byte_writer_put_uint8_unchecked (&bw, 0x4f);
828 } else if (mode->fps_n == 30 && mode->fps_d == 1) {
829 gst_byte_writer_put_uint8_unchecked (&bw, 0x5f);
830 } else if (mode->fps_n == 50 && mode->fps_d == 1) {
831 gst_byte_writer_put_uint8_unchecked (&bw, 0x6f);
832 } else if (mode->fps_n == 60000 && mode->fps_d == 1001) {
833 gst_byte_writer_put_uint8_unchecked (&bw, 0x7f);
834 } else if (mode->fps_n == 60 && mode->fps_d == 1) {
835 gst_byte_writer_put_uint8_unchecked (&bw, 0x8f);
837 g_assert_not_reached ();
840 /* ccdata_present | caption_service_active */
843 /* time_code_present */
850 gst_byte_writer_put_uint8_unchecked (&bw, flags);
852 gst_byte_writer_put_uint16_be_unchecked (&bw, self->cdp_hdr_sequence_cntr);
855 const GstVideoTimeCode *tc = &tc_meta->tc;
857 gst_byte_writer_put_uint8_unchecked (&bw, 0x71);
858 gst_byte_writer_put_uint8_unchecked (&bw, 0xc0 |
859 (((tc->hours % 10) & 0x3) << 4) |
860 ((tc->hours - (tc->hours % 10)) & 0xf));
862 gst_byte_writer_put_uint8_unchecked (&bw, 0x80 |
863 (((tc->minutes % 10) & 0x7) << 4) |
864 ((tc->minutes - (tc->minutes % 10)) & 0xf));
866 gst_byte_writer_put_uint8_unchecked (&bw,
868 2 ? 0x00 : 0x80) | (((tc->seconds %
869 10) & 0x7) << 4) | ((tc->seconds -
870 (tc->seconds % 10)) & 0xf));
872 gst_byte_writer_put_uint8_unchecked (&bw,
873 ((tc->config.flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME) ? 0x80 :
874 0x00) | (((tc->frames % 10) & 0x3) << 4) | ((tc->frames -
875 (tc->frames % 10)) & 0xf));
878 gst_byte_writer_put_uint8_unchecked (&bw, 0x72);
879 gst_byte_writer_put_uint8_unchecked (&bw, 0xe0 | cc_data_len / 3);
880 gst_byte_writer_put_data_unchecked (&bw, cc_data, cc_data_len);
882 gst_byte_writer_put_uint8_unchecked (&bw, 0x74);
883 gst_byte_writer_put_uint16_be_unchecked (&bw, self->cdp_hdr_sequence_cntr);
884 self->cdp_hdr_sequence_cntr++;
885 /* We calculate the checksum afterwards */
886 gst_byte_writer_put_uint8_unchecked (&bw, 0);
888 len = gst_byte_writer_get_pos (&bw);
889 gst_byte_writer_set_pos (&bw, 2);
890 gst_byte_writer_put_uint8_unchecked (&bw, len);
893 for (i = 0; i < len; i++) {
897 checksum = 256 - checksum;
898 cdp[len - 1] = checksum;
904 write_vbi (GstDecklinkVideoSink * self, GstBuffer * buffer,
905 BMDPixelFormat format, IDeckLinkMutableVideoFrame * frame,
906 GstVideoTimeCodeMeta * tc_meta)
908 IDeckLinkVideoFrameAncillary *vanc_frame = NULL;
909 gpointer iter = NULL;
910 GstVideoCaptionMeta *cc_meta;
912 gboolean got_captions = FALSE;
914 if (self->caption_line == 0 && self->afd_bar_line == 0)
917 if (self->vbiencoder == NULL) {
919 gst_video_vbi_encoder_new (self->info.finfo->format, self->info.width);
920 self->anc_vformat = self->info.finfo->format;
923 /* Put any closed captions into the configured line */
925 (GstVideoCaptionMeta *) gst_buffer_iterate_meta_filtered (buffer,
926 &iter, GST_VIDEO_CAPTION_META_API_TYPE))) {
927 switch (cc_meta->caption_type) {
928 case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:{
932 n = cc_meta->size / 2;
933 if (cc_meta->size > 46) {
934 GST_WARNING_OBJECT (self, "Too big raw CEA608 buffer");
938 /* This is the offset from line 9 for 525-line fields and from line
939 * 5 for 625-line fields.
941 * The highest bit is set for field 1 but not for field 0, but we
942 * have no way of knowning the field here
944 for (i = 0; i < n; i++) {
945 data[3 * i] = 0x80 | (self->info.height ==
946 525 ? self->caption_line - 9 : self->caption_line - 5);
947 data[3 * i + 1] = cc_meta->data[2 * i];
948 data[3 * i + 2] = cc_meta->data[2 * i + 1];
951 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
953 GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 >> 8,
954 GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 & 0xff, data, 3))
955 GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
961 case GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A:{
962 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
964 GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 >> 8,
965 GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 & 0xff, cc_meta->data,
967 GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
973 case GST_VIDEO_CAPTION_TYPE_CEA708_RAW:{
977 n = cc_meta->size / 3;
978 if (cc_meta->size > 46) {
979 GST_WARNING_OBJECT (self, "Too big raw CEA708 buffer");
983 n = convert_cea708_cc_data_cea708_cdp_internal (self, cc_meta->data,
984 cc_meta->size, data, sizeof (data), tc_meta);
985 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder, FALSE,
986 GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 >> 8,
987 GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 & 0xff, data, n))
988 GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
994 case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:{
995 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
997 GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 >> 8,
998 GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 & 0xff, cc_meta->data,
1000 GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
1002 got_captions = TRUE;
1007 GST_FIXME_OBJECT (self, "Caption type %d not supported",
1008 cc_meta->caption_type);
1014 if ((got_captions || self->afd_bar_line != 0)
1015 && self->output->output->CreateAncillaryData (format,
1016 &vanc_frame) == S_OK) {
1017 GstVideoAFDMeta *afd_meta = NULL, *afd_meta2 = NULL;
1018 GstVideoBarMeta *bar_meta = NULL, *bar_meta2 = NULL;
1021 guint8 afd_bar_data[8] = { 0, };
1022 guint8 afd_bar_data2[8] = { 0, };
1024 gboolean is_letterbox = 0;
1025 guint16 bar1 = 0, bar2 = 0;
1028 // Get any reasonable AFD/Bar metas for both fields
1031 gst_buffer_iterate_meta_filtered (buffer, &meta_iter,
1032 GST_VIDEO_AFD_META_API_TYPE))) {
1033 GstVideoAFDMeta *tmp_meta = (GstVideoAFDMeta *) meta;
1035 if (tmp_meta->field == 0 || !afd_meta || (afd_meta && afd_meta->field != 0
1036 && tmp_meta->field == 0))
1037 afd_meta = tmp_meta;
1038 if (tmp_meta->field == 1 || !afd_meta2 || (afd_meta2
1039 && afd_meta->field != 1 && tmp_meta->field == 1))
1040 afd_meta2 = tmp_meta;
1045 gst_buffer_iterate_meta_filtered (buffer, &meta_iter,
1046 GST_VIDEO_BAR_META_API_TYPE))) {
1047 GstVideoBarMeta *tmp_meta = (GstVideoBarMeta *) meta;
1049 if (tmp_meta->field == 0 || !bar_meta || (bar_meta && bar_meta->field != 0
1050 && tmp_meta->field == 0))
1051 bar_meta = tmp_meta;
1052 if (tmp_meta->field == 1 || !bar_meta2 || (bar_meta2
1053 && bar_meta->field != 1 && tmp_meta->field == 1))
1054 bar_meta2 = tmp_meta;
1057 for (i = 0; i < 2; i++) {
1058 guint8 *afd_bar_data_ptr;
1061 afd_bar_data_ptr = afd_bar_data;
1062 afd = afd_meta ? afd_meta->afd : 0;
1063 is_letterbox = bar_meta ? bar_meta->is_letterbox : FALSE;
1064 bar1 = bar_meta ? bar_meta->bar_data1 : 0;
1065 bar2 = bar_meta ? bar_meta->bar_data2 : 0;
1067 afd_bar_data_ptr = afd_bar_data2;
1068 afd = afd_meta2 ? afd_meta2->afd : 0;
1069 is_letterbox = bar_meta2 ? bar_meta2->is_letterbox : FALSE;
1070 bar1 = bar_meta2 ? bar_meta2->bar_data1 : 0;
1071 bar2 = bar_meta2 ? bar_meta2->bar_data2 : 0;
1074 /* See SMPTE 2016-3 Section 4 */
1076 if (self->mode < (gint) GST_DECKLINK_MODE_NTSC_WIDESCREEN) {
1077 afd_bar_data_ptr[0] = (afd << 3) | 0x0;
1079 afd_bar_data_ptr[0] = (afd << 3) | 0x4;
1083 afd_bar_data_ptr[3] = is_letterbox ? 0xc0 : 0x30;
1085 /* Bar value 1 and 2 */
1086 GST_WRITE_UINT16_BE (&afd_bar_data_ptr[4], bar1);
1087 GST_WRITE_UINT16_BE (&afd_bar_data_ptr[6], bar2);
1090 /* AFD on the same line as the captions */
1091 if (self->caption_line == self->afd_bar_line) {
1092 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
1093 FALSE, GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR >> 8,
1094 GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR & 0xff, afd_bar_data,
1095 sizeof (afd_bar_data)))
1096 GST_WARNING_OBJECT (self,
1097 "Couldn't add AFD/Bar data to ancillary data");
1100 /* FIXME: Add captions to the correct field? Captions for the second
1101 * field should probably be inserted into the second field */
1103 if (got_captions || self->caption_line == self->afd_bar_line) {
1104 if (vanc_frame->GetBufferForVerticalBlankingLine (self->caption_line,
1105 (void **) &vancdata) == S_OK) {
1106 gst_video_vbi_encoder_write_line (self->vbiencoder, vancdata);
1108 GST_WARNING_OBJECT (self,
1109 "Failed to get buffer for line %d ancillary data",
1110 self->caption_line);
1114 /* AFD on a different line than the captions */
1115 if (self->afd_bar_line != 0 && self->caption_line != self->afd_bar_line) {
1116 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
1117 FALSE, GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR >> 8,
1118 GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR & 0xff, afd_bar_data,
1119 sizeof (afd_bar_data)))
1120 GST_WARNING_OBJECT (self,
1121 "Couldn't add AFD/Bar data to ancillary data");
1123 if (vanc_frame->GetBufferForVerticalBlankingLine (self->afd_bar_line,
1124 (void **) &vancdata) == S_OK) {
1125 gst_video_vbi_encoder_write_line (self->vbiencoder, vancdata);
1127 GST_WARNING_OBJECT (self,
1128 "Failed to get buffer for line %d ancillary data",
1129 self->afd_bar_line);
1133 /* For interlaced video we need to also add AFD to the second field */
1134 if (GST_VIDEO_INFO_IS_INTERLACED (&self->info) && self->afd_bar_line != 0) {
1135 guint field2_offset;
1137 /* The VANC lines for the second field are at an offset, depending on
1138 * the format in use.
1140 switch (self->info.height) {
1142 /* NTSC: 525 / 2 + 1 */
1143 field2_offset = 263;
1146 /* PAL: 625 / 2 + 1 */
1147 field2_offset = 313;
1150 /* 1080i: 1125 / 2 + 1 */
1151 field2_offset = 563;
1154 g_assert_not_reached ();
1157 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
1158 FALSE, GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR >> 8,
1159 GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR & 0xff, afd_bar_data2,
1160 sizeof (afd_bar_data)))
1161 GST_WARNING_OBJECT (self,
1162 "Couldn't add AFD/Bar data to ancillary data");
1164 if (vanc_frame->GetBufferForVerticalBlankingLine (self->afd_bar_line +
1165 field2_offset, (void **) &vancdata) == S_OK) {
1166 gst_video_vbi_encoder_write_line (self->vbiencoder, vancdata);
1168 GST_WARNING_OBJECT (self,
1169 "Failed to get buffer for line %d ancillary data",
1170 self->afd_bar_line);
1174 if (frame->SetAncillaryData (vanc_frame) != S_OK) {
1175 GST_WARNING_OBJECT (self, "Failed to set ancillary data");
1178 vanc_frame->Release ();
1179 } else if (got_captions || self->afd_bar_line != 0) {
1180 GST_WARNING_OBJECT (self, "Failed to allocate ancillary data frame");
1184 static GstFlowReturn
1185 gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
1187 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
1188 GstVideoFrame vframe;
1189 IDeckLinkMutableVideoFrame *frame;
1190 guint8 *outdata, *indata;
1191 GstFlowReturn flow_ret;
1193 GstClockTime timestamp, duration;
1194 GstClockTime running_time, running_time_duration;
1195 GstClockTime latency, render_delay;
1196 GstClockTimeDiff ts_offset;
1198 GstDecklinkVideoFormat caps_format;
1199 BMDPixelFormat format;
1201 GstVideoTimeCodeMeta *tc_meta;
1203 GST_DEBUG_OBJECT (self, "Preparing buffer %" GST_PTR_FORMAT, buffer);
1205 // FIXME: Handle no timestamps
1206 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
1207 return GST_FLOW_ERROR;
1210 caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
1211 format = gst_decklink_pixel_format_from_type (caps_format);
1213 timestamp = GST_BUFFER_TIMESTAMP (buffer);
1214 duration = GST_BUFFER_DURATION (buffer);
1215 if (duration == GST_CLOCK_TIME_NONE) {
1217 gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
1221 gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
1222 GST_FORMAT_TIME, timestamp);
1223 running_time_duration =
1224 gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
1225 GST_FORMAT_TIME, timestamp + duration) - running_time;
1227 /* See gst_base_sink_adjust_time() */
1228 latency = gst_base_sink_get_latency (bsink);
1229 render_delay = gst_base_sink_get_render_delay (bsink);
1230 ts_offset = gst_base_sink_get_ts_offset (bsink);
1232 running_time += latency;
1234 if (ts_offset < 0) {
1235 ts_offset = -ts_offset;
1236 if ((GstClockTime) ts_offset < running_time)
1237 running_time -= ts_offset;
1241 running_time += ts_offset;
1244 if (running_time > render_delay)
1245 running_time -= render_delay;
1249 ret = self->output->output->CreateVideoFrame (self->info.width,
1250 self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
1253 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1254 (NULL), ("Failed to create video frame: 0x%08lx", (unsigned long) ret));
1255 return GST_FLOW_ERROR;
1258 if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
1259 GST_ERROR_OBJECT (self, "Failed to map video frame");
1260 flow_ret = GST_FLOW_ERROR;
1264 frame->GetBytes ((void **) &outdata);
1265 indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
1267 MIN (GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0), frame->GetRowBytes ());
1268 for (i = 0; i < self->info.height; i++) {
1269 memcpy (outdata, indata, stride);
1270 indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
1271 outdata += frame->GetRowBytes ();
1273 gst_video_frame_unmap (&vframe);
1275 tc_meta = gst_buffer_get_video_time_code_meta (buffer);
1277 BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
1280 if (((GstVideoTimeCodeFlags) (tc_meta->tc.
1281 config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
1282 bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
1284 bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
1285 if (tc_meta->tc.field_count == 2)
1286 bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFieldMark);
1288 tc_str = gst_video_time_code_to_string (&tc_meta->tc);
1289 ret = frame->SetTimecodeFromComponents (self->timecode_format,
1290 (uint8_t) tc_meta->tc.hours,
1291 (uint8_t) tc_meta->tc.minutes,
1292 (uint8_t) tc_meta->tc.seconds, (uint8_t) tc_meta->tc.frames, bflags);
1294 GST_ERROR_OBJECT (self,
1295 "Failed to set timecode %s to video frame: 0x%08lx", tc_str,
1296 (unsigned long) ret);
1297 flow_ret = GST_FLOW_ERROR;
1301 GST_DEBUG_OBJECT (self, "Set frame timecode to %s", tc_str);
1305 write_vbi (self, buffer, format, frame, tc_meta);
1307 gst_decklink_video_sink_convert_to_internal_clock (self, &running_time,
1308 &running_time_duration);
1310 GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
1311 " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
1312 GST_TIME_ARGS (running_time_duration));
1314 ret = self->output->output->ScheduleVideoFrame (frame,
1315 running_time, running_time_duration, GST_SECOND);
1317 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1318 (NULL), ("Failed to schedule frame: 0x%08lx", (unsigned long) ret));
1319 flow_ret = GST_FLOW_ERROR;
1323 flow_ret = GST_FLOW_OK;
1333 gst_decklink_video_sink_open (GstBaseSink * bsink)
1335 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
1336 const GstDecklinkMode *mode;
1338 GST_DEBUG_OBJECT (self, "Starting");
1341 gst_decklink_acquire_nth_output (self->device_number,
1342 GST_ELEMENT_CAST (self), FALSE);
1343 if (!self->output) {
1344 GST_ERROR_OBJECT (self, "Failed to acquire output");
1348 g_object_notify (G_OBJECT (self), "hw-serial-number");
1350 mode = gst_decklink_get_mode (self->mode);
1351 g_assert (mode != NULL);
1353 g_mutex_lock (&self->output->lock);
1354 self->output->mode = mode;
1355 self->output->start_scheduled_playback =
1356 gst_decklink_video_sink_start_scheduled_playback;
1357 self->output->clock_start_time = GST_CLOCK_TIME_NONE;
1358 self->output->clock_epoch += self->output->clock_last_time;
1359 self->output->clock_last_time = 0;
1360 self->output->clock_offset = 0;
1361 GST_OBJECT_LOCK (self);
1362 self->internal_base_time = GST_CLOCK_TIME_NONE;
1363 self->external_base_time = GST_CLOCK_TIME_NONE;
1364 GST_OBJECT_UNLOCK (self);
1365 g_mutex_unlock (&self->output->lock);
1371 gst_decklink_video_sink_close (GstBaseSink * bsink)
1373 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
1375 GST_DEBUG_OBJECT (self, "Closing");
1378 g_mutex_lock (&self->output->lock);
1379 self->output->mode = NULL;
1380 self->output->video_enabled = FALSE;
1381 if (self->output->start_scheduled_playback && self->output->videosink)
1382 self->output->start_scheduled_playback (self->output->videosink);
1383 g_mutex_unlock (&self->output->lock);
1385 self->output->output->DisableVideoOutput ();
1386 gst_decklink_release_nth_output (self->device_number,
1387 GST_ELEMENT_CAST (self), FALSE);
1388 self->output = NULL;
1395 gst_decklink_video_sink_stop (GstDecklinkVideoSink * self)
1397 GST_DEBUG_OBJECT (self, "Stopping");
1399 if (self->output && self->output->video_enabled) {
1400 g_mutex_lock (&self->output->lock);
1401 self->output->video_enabled = FALSE;
1402 g_mutex_unlock (&self->output->lock);
1404 self->output->output->DisableVideoOutput ();
1405 self->output->output->SetScheduledFrameCompletionCallback (NULL);
1408 if (self->vbiencoder) {
1409 gst_video_vbi_encoder_free (self->vbiencoder);
1410 self->vbiencoder = NULL;
1411 self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
1418 _wait_for_stop_notify (GstDecklinkVideoSink * self)
1420 bool active = false;
1422 self->output->output->IsScheduledPlaybackRunning (&active);
1424 /* cause sometimes decklink stops without notifying us... */
1425 guint64 wait_time = g_get_monotonic_time () + G_TIME_SPAN_SECOND;
1426 if (!g_cond_wait_until (&self->output->cond, &self->output->lock,
1428 GST_WARNING_OBJECT (self, "Failed to wait for stop notification");
1429 self->output->output->IsScheduledPlaybackRunning (&active);
1434 gst_decklink_video_sink_start_scheduled_playback (GstElement * element)
1436 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
1437 GstClockTime start_time;
1441 // Check if we're already started
1442 if (self->output->started) {
1443 GST_DEBUG_OBJECT (self, "Already started");
1446 // Check if we're ready to start:
1447 // we need video and audio enabled, if there is audio
1448 // and both of the two elements need to be set to PLAYING already
1449 if (!self->output->video_enabled) {
1450 GST_DEBUG_OBJECT (self,
1451 "Not starting scheduled playback yet: video not enabled yet!");
1455 if (self->output->audiosink && !self->output->audio_enabled) {
1456 GST_DEBUG_OBJECT (self,
1457 "Not starting scheduled playback yet: "
1458 "have audio but not enabled yet!");
1462 if ((GST_STATE (self) < GST_STATE_PAUSED
1463 && GST_STATE_PENDING (self) < GST_STATE_PAUSED)
1464 || (self->output->audiosink &&
1465 GST_STATE (self->output->audiosink) < GST_STATE_PAUSED
1466 && GST_STATE_PENDING (self->output->audiosink) < GST_STATE_PAUSED)) {
1467 GST_DEBUG_OBJECT (self,
1468 "Not starting scheduled playback yet: "
1469 "Elements are not set to PAUSED yet");
1472 // Need to unlock to get the clock time
1473 g_mutex_unlock (&self->output->lock);
1475 start_time = gst_clock_get_internal_time (self->output->clock);
1477 g_mutex_lock (&self->output->lock);
1478 // Check if someone else started in the meantime
1479 if (self->output->started) {
1484 self->output->output->IsScheduledPlaybackRunning (&active);
1486 GST_DEBUG_OBJECT (self, "Stopping scheduled playback");
1488 self->output->started = FALSE;
1490 res = self->output->output->StopScheduledPlayback (0, 0, 0);
1492 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1493 (NULL), ("Failed to stop scheduled playback: 0x%08lx",
1494 (unsigned long) res));
1497 // Wait until scheduled playback actually stopped
1498 _wait_for_stop_notify (self);
1501 GST_INFO_OBJECT (self,
1502 "Starting scheduled playback at %" GST_TIME_FORMAT,
1503 GST_TIME_ARGS (start_time));
1506 self->output->output->StartScheduledPlayback (start_time,
1509 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1510 (NULL), ("Failed to start scheduled playback: 0x%08lx",
1511 (unsigned long) res));
1515 self->output->started = TRUE;
1518 static GstStateChangeReturn
1519 gst_decklink_video_sink_stop_scheduled_playback (GstDecklinkVideoSink * self)
1521 GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
1522 GstClockTime start_time;
1525 if (!self->output->started)
1528 start_time = gst_clock_get_internal_time (self->output->clock);
1530 GST_INFO_OBJECT (self,
1531 "Stopping scheduled playback at %" GST_TIME_FORMAT,
1532 GST_TIME_ARGS (start_time));
1534 g_mutex_lock (&self->output->lock);
1535 self->output->started = FALSE;
1536 res = self->output->output->StopScheduledPlayback (start_time, 0, GST_SECOND);
1538 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1539 (NULL), ("Failed to stop scheduled playback: 0x%08lx", (unsigned long)
1541 ret = GST_STATE_CHANGE_FAILURE;
1544 // Wait until scheduled playback actually stopped
1545 _wait_for_stop_notify (self);
1547 g_mutex_unlock (&self->output->lock);
1548 GST_OBJECT_LOCK (self);
1549 self->internal_base_time = GST_CLOCK_TIME_NONE;
1550 self->external_base_time = GST_CLOCK_TIME_NONE;
1551 GST_OBJECT_UNLOCK (self);
1556 static GstStateChangeReturn
1557 gst_decklink_video_sink_change_state (GstElement * element,
1558 GstStateChange transition)
1560 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
1561 GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
1563 GST_DEBUG_OBJECT (self, "changing state: %s => %s",
1564 gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
1565 gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
1567 switch (transition) {
1568 case GST_STATE_CHANGE_READY_TO_PAUSED:
1569 self->vbiencoder = NULL;
1570 self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
1571 self->cdp_hdr_sequence_cntr = 0;
1573 g_mutex_lock (&self->output->lock);
1574 self->output->clock_epoch += self->output->clock_last_time;
1575 self->output->clock_last_time = 0;
1576 self->output->clock_offset = 0;
1577 g_mutex_unlock (&self->output->lock);
1578 gst_element_post_message (element,
1579 gst_message_new_clock_provide (GST_OBJECT_CAST (element),
1580 self->output->clock, TRUE));
1581 g_mutex_lock (&self->output->lock);
1582 if (self->output->start_scheduled_playback)
1583 self->output->start_scheduled_playback (self->output->videosink);
1584 g_mutex_unlock (&self->output->lock);
1586 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:{
1589 clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
1591 if (clock != self->output->clock) {
1592 gst_clock_set_master (self->output->clock, clock);
1595 GST_OBJECT_LOCK (self);
1596 if (self->external_base_time == GST_CLOCK_TIME_NONE
1597 || self->internal_base_time == GST_CLOCK_TIME_NONE) {
1598 self->external_base_time = gst_clock_get_internal_time (clock);
1599 self->internal_base_time =
1600 gst_clock_get_internal_time (self->output->clock);
1601 self->internal_time_offset = self->internal_base_time;
1602 } else if (GST_CLOCK_TIME_IS_VALID (self->internal_pause_time)) {
1603 self->internal_time_offset +=
1604 gst_clock_get_internal_time (self->output->clock) - self->internal_pause_time;
1607 GST_INFO_OBJECT (self, "clock has been set to %" GST_PTR_FORMAT
1608 ", updated base times - internal: %" GST_TIME_FORMAT
1609 " external: %" GST_TIME_FORMAT " internal offset %"
1610 GST_TIME_FORMAT, clock,
1611 GST_TIME_ARGS (self->internal_base_time),
1612 GST_TIME_ARGS (self->external_base_time),
1613 GST_TIME_ARGS (self->internal_time_offset));
1614 GST_OBJECT_UNLOCK (self);
1616 gst_object_unref (clock);
1618 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1619 (NULL), ("Need a clock to go to PLAYING"));
1620 ret = GST_STATE_CHANGE_FAILURE;
1624 case GST_STATE_CHANGE_PAUSED_TO_READY:
1625 if (gst_decklink_video_sink_stop_scheduled_playback (self) ==
1626 GST_STATE_CHANGE_FAILURE)
1627 ret = GST_STATE_CHANGE_FAILURE;
1629 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
1635 if (ret == GST_STATE_CHANGE_FAILURE)
1637 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1638 if (ret == GST_STATE_CHANGE_FAILURE)
1641 switch (transition) {
1642 case GST_STATE_CHANGE_PAUSED_TO_READY:{
1643 gst_element_post_message (element,
1644 gst_message_new_clock_lost (GST_OBJECT_CAST (element),
1645 self->output->clock));
1646 gst_clock_set_master (self->output->clock, NULL);
1647 // Reset calibration to make the clock reusable next time we use it
1648 gst_clock_set_calibration (self->output->clock, 0, 0, 1, 1);
1649 g_mutex_lock (&self->output->lock);
1650 self->output->clock_epoch += self->output->clock_last_time;
1651 self->output->clock_last_time = 0;
1652 self->output->clock_offset = 0;
1653 g_mutex_unlock (&self->output->lock);
1654 gst_decklink_video_sink_stop (self);
1655 GST_OBJECT_LOCK (self);
1656 self->internal_base_time = GST_CLOCK_TIME_NONE;
1657 self->external_base_time = GST_CLOCK_TIME_NONE;
1658 self->internal_pause_time = GST_CLOCK_TIME_NONE;
1659 GST_OBJECT_UNLOCK (self);
1662 case GST_STATE_CHANGE_READY_TO_PAUSED:{
1665 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
1667 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
1668 self->internal_pause_time = gst_clock_get_internal_time (self->output->clock);
1678 gst_decklink_video_sink_event (GstBaseSink * bsink, GstEvent * event)
1680 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
1682 switch (GST_EVENT_TYPE (event)) {
1683 case GST_EVENT_FLUSH_START:
1687 case GST_EVENT_FLUSH_STOP:
1689 gboolean reset_time;
1691 gst_event_parse_flush_stop (event, &reset_time);
1693 GST_OBJECT_LOCK (self);
1694 /* force a recalculation of clock base times */
1695 self->external_base_time = GST_CLOCK_TIME_NONE;
1696 self->internal_base_time = GST_CLOCK_TIME_NONE;
1697 GST_OBJECT_UNLOCK (self);
1705 return GST_BASE_SINK_CLASS (parent_class)->event (bsink, event);
1709 gst_decklink_video_sink_provide_clock (GstElement * element)
1711 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
1716 return GST_CLOCK_CAST (gst_object_ref (self->output->clock));
1720 gst_decklink_video_sink_propose_allocation (GstBaseSink * bsink,
1725 GstBufferPool *pool;
1728 gst_query_parse_allocation (query, &caps, NULL);
1733 if (!gst_video_info_from_caps (&info, caps))
1736 size = GST_VIDEO_INFO_SIZE (&info);
1738 if (gst_query_get_n_allocation_pools (query) == 0) {
1739 GstStructure *structure;
1740 GstAllocator *allocator = NULL;
1741 GstAllocationParams params = { (GstMemoryFlags) 0, 15, 0, 0 };
1743 if (gst_query_get_n_allocation_params (query) > 0)
1744 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
1746 gst_query_add_allocation_param (query, allocator, ¶ms);
1748 pool = gst_video_buffer_pool_new ();
1750 structure = gst_buffer_pool_get_config (pool);
1751 gst_buffer_pool_config_set_params (structure, caps, size, 0, 0);
1752 gst_buffer_pool_config_set_allocator (structure, allocator, ¶ms);
1755 gst_object_unref (allocator);
1757 if (!gst_buffer_pool_set_config (pool, structure))
1760 gst_query_add_allocation_pool (query, pool, size, 0, 0);
1761 gst_object_unref (pool);
1762 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
1769 GST_ERROR_OBJECT (bsink, "failed to set config");
1770 gst_object_unref (pool);