2 * Copyright (c) 2008 Benjamin Schmitz <vortex@wolpzone.de>
3 * Copyright (c) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
22 * SECTION:element-assrender
25 * Renders timestamped SSA/ASS subtitles on top of a video stream.
27 * ## Example launch line
29 * gst-launch-1.0 -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mpegaudioparse ! mpg123audiodec ! audioconvert ! autoaudiosink d. ! queue ! h264parse ! avdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink
30 * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
38 #include <gst/video/gstvideometa.h>
40 #include "gstassrender.h"
44 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_debug);
45 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_lib_debug);
46 #define GST_CAT_DEFAULT gst_ass_render_debug
48 /* Filter signals and props */
62 /* FIXME: video-blend.c doesn't support formats with more than 8 bit per
63 * component (which get unpacked into ARGB64 or AYUV64) yet, such as:
64 * v210, v216, UYVP, GRAY16_LE, GRAY16_BE */
65 #define FORMATS "{ BGRx, RGBx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, \
66 I420, YV12, AYUV, YUY2, UYVY, v308, Y41B, Y42B, Y444, \
67 NV12, NV21, A420, YUV9, YVU9, IYU1, GRAY8 }"
69 #define ASSRENDER_CAPS GST_VIDEO_CAPS_MAKE(FORMATS)
71 #define ASSRENDER_ALL_CAPS ASSRENDER_CAPS ";" \
72 GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
74 static GstStaticCaps sw_template_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
76 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
79 GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
82 static GstStaticPadTemplate video_sink_factory =
83 GST_STATIC_PAD_TEMPLATE ("video_sink",
86 GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
89 static GstStaticPadTemplate text_sink_factory =
90 GST_STATIC_PAD_TEMPLATE ("text_sink",
93 GST_STATIC_CAPS ("application/x-ass; application/x-ssa")
96 #define GST_ASS_RENDER_GET_LOCK(ass) (&GST_ASS_RENDER (ass)->lock)
97 #define GST_ASS_RENDER_GET_COND(ass) (&GST_ASS_RENDER (ass)->cond)
98 #define GST_ASS_RENDER_LOCK(ass) (g_mutex_lock (GST_ASS_RENDER_GET_LOCK (ass)))
99 #define GST_ASS_RENDER_UNLOCK(ass) (g_mutex_unlock (GST_ASS_RENDER_GET_LOCK (ass)))
100 #define GST_ASS_RENDER_WAIT(ass) (g_cond_wait (GST_ASS_RENDER_GET_COND (ass), GST_ASS_RENDER_GET_LOCK (ass)))
101 #define GST_ASS_RENDER_SIGNAL(ass) (g_cond_signal (GST_ASS_RENDER_GET_COND (ass)))
102 #define GST_ASS_RENDER_BROADCAST(ass)(g_cond_broadcast (GST_ASS_RENDER_GET_COND (ass)))
104 static void gst_ass_render_set_property (GObject * object, guint prop_id,
105 const GValue * value, GParamSpec * pspec);
106 static void gst_ass_render_get_property (GObject * object, guint prop_id,
107 GValue * value, GParamSpec * pspec);
109 static void gst_ass_render_finalize (GObject * object);
111 static GstStateChangeReturn gst_ass_render_change_state (GstElement * element,
112 GstStateChange transition);
114 #define gst_ass_render_parent_class parent_class
115 G_DEFINE_TYPE (GstAssRender, gst_ass_render, GST_TYPE_ELEMENT);
117 GST_DEBUG_CATEGORY_INIT (gst_ass_render_debug, "assrender", \
118 0, "ASS/SSA subtitle renderer");\
119 GST_DEBUG_CATEGORY_INIT (gst_ass_render_lib_debug, "assrender_library",\
120 0, "ASS/SSA subtitle renderer library");
121 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (assrender, "assrender",
122 GST_RANK_PRIMARY, GST_TYPE_ASS_RENDER, _do_init);
124 static GstCaps *gst_ass_render_get_videosink_caps (GstPad * pad,
125 GstAssRender * render, GstCaps * filter);
126 static GstCaps *gst_ass_render_get_src_caps (GstPad * pad,
127 GstAssRender * render, GstCaps * filter);
129 static gboolean gst_ass_render_setcaps_video (GstPad * pad,
130 GstAssRender * render, GstCaps * caps);
131 static gboolean gst_ass_render_setcaps_text (GstPad * pad,
132 GstAssRender * render, GstCaps * caps);
134 static GstFlowReturn gst_ass_render_chain_video (GstPad * pad,
135 GstObject * parent, GstBuffer * buf);
136 static GstFlowReturn gst_ass_render_chain_text (GstPad * pad,
137 GstObject * parent, GstBuffer * buf);
139 static gboolean gst_ass_render_event_video (GstPad * pad, GstObject * parent,
141 static gboolean gst_ass_render_event_text (GstPad * pad, GstObject * parent,
143 static gboolean gst_ass_render_event_src (GstPad * pad, GstObject * parent,
146 static gboolean gst_ass_render_query_video (GstPad * pad, GstObject * parent,
148 static gboolean gst_ass_render_query_src (GstPad * pad, GstObject * parent,
151 /* initialize the plugin's class */
153 gst_ass_render_class_init (GstAssRenderClass * klass)
155 GObjectClass *gobject_class = (GObjectClass *) klass;
156 GstElementClass *gstelement_class = (GstElementClass *) klass;
158 gobject_class->set_property = gst_ass_render_set_property;
159 gobject_class->get_property = gst_ass_render_get_property;
160 gobject_class->finalize = gst_ass_render_finalize;
162 g_object_class_install_property (gobject_class, PROP_ENABLE,
163 g_param_spec_boolean ("enable", "Enable",
164 "Enable rendering of subtitles", TRUE,
165 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
167 g_object_class_install_property (gobject_class, PROP_EMBEDDEDFONTS,
168 g_param_spec_boolean ("embeddedfonts", "Embedded Fonts",
169 "Extract and use fonts embedded in the stream", TRUE,
170 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
172 g_object_class_install_property (gobject_class, PROP_WAIT_TEXT,
173 g_param_spec_boolean ("wait-text", "Wait Text",
174 "Whether to wait for subtitles", TRUE,
175 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
177 gstelement_class->change_state =
178 GST_DEBUG_FUNCPTR (gst_ass_render_change_state);
180 gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
181 gst_element_class_add_static_pad_template (gstelement_class,
182 &video_sink_factory);
183 gst_element_class_add_static_pad_template (gstelement_class,
186 gst_element_class_set_static_metadata (gstelement_class, "ASS/SSA Render",
187 "Mixer/Video/Overlay/Subtitle",
188 "Renders ASS/SSA subtitles with libass",
189 "Benjamin Schmitz <vortex@wolpzone.de>, "
190 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
194 _libass_message_cb (gint level, const gchar * fmt, va_list args,
197 gchar *message = g_strdup_vprintf (fmt, args);
200 GST_CAT_ERROR_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
202 GST_CAT_WARNING_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
204 GST_CAT_INFO_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
206 GST_CAT_DEBUG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
208 GST_CAT_LOG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
214 gst_ass_render_init (GstAssRender * render)
216 GST_DEBUG_OBJECT (render, "init");
218 render->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
219 render->video_sinkpad =
220 gst_pad_new_from_static_template (&video_sink_factory, "video_sink");
221 render->text_sinkpad =
222 gst_pad_new_from_static_template (&text_sink_factory, "text_sink");
224 gst_pad_set_chain_function (render->video_sinkpad,
225 GST_DEBUG_FUNCPTR (gst_ass_render_chain_video));
226 gst_pad_set_chain_function (render->text_sinkpad,
227 GST_DEBUG_FUNCPTR (gst_ass_render_chain_text));
229 gst_pad_set_event_function (render->video_sinkpad,
230 GST_DEBUG_FUNCPTR (gst_ass_render_event_video));
231 gst_pad_set_event_function (render->text_sinkpad,
232 GST_DEBUG_FUNCPTR (gst_ass_render_event_text));
233 gst_pad_set_event_function (render->srcpad,
234 GST_DEBUG_FUNCPTR (gst_ass_render_event_src));
236 gst_pad_set_query_function (render->srcpad,
237 GST_DEBUG_FUNCPTR (gst_ass_render_query_src));
238 gst_pad_set_query_function (render->video_sinkpad,
239 GST_DEBUG_FUNCPTR (gst_ass_render_query_video));
241 GST_PAD_SET_PROXY_ALLOCATION (render->video_sinkpad);
243 gst_element_add_pad (GST_ELEMENT (render), render->srcpad);
244 gst_element_add_pad (GST_ELEMENT (render), render->video_sinkpad);
245 gst_element_add_pad (GST_ELEMENT (render), render->text_sinkpad);
247 gst_video_info_init (&render->info);
249 g_mutex_init (&render->lock);
250 g_cond_init (&render->cond);
252 render->renderer_init_ok = FALSE;
253 render->track_init_ok = FALSE;
254 render->enable = TRUE;
255 render->embeddedfonts = TRUE;
256 render->wait_text = FALSE;
258 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
259 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
261 g_mutex_init (&render->ass_mutex);
262 render->ass_library = ass_library_init ();
263 ass_set_message_cb (render->ass_library, _libass_message_cb, render);
264 ass_set_extract_fonts (render->ass_library, 1);
266 render->ass_renderer = ass_renderer_init (render->ass_library);
267 if (!render->ass_renderer) {
268 GST_WARNING_OBJECT (render, "cannot create renderer instance");
269 g_assert_not_reached ();
272 render->ass_track = NULL;
274 GST_DEBUG_OBJECT (render, "init complete");
278 gst_ass_render_finalize (GObject * object)
280 GstAssRender *render = GST_ASS_RENDER (object);
282 g_mutex_clear (&render->lock);
283 g_cond_clear (&render->cond);
285 if (render->ass_track) {
286 ass_free_track (render->ass_track);
289 if (render->ass_renderer) {
290 ass_renderer_done (render->ass_renderer);
293 if (render->ass_library) {
294 ass_library_done (render->ass_library);
297 g_mutex_clear (&render->ass_mutex);
299 G_OBJECT_CLASS (parent_class)->finalize (object);
303 gst_ass_render_reset_composition (GstAssRender * render)
305 if (render->composition) {
306 gst_video_overlay_composition_unref (render->composition);
307 render->composition = NULL;
312 gst_ass_render_set_property (GObject * object, guint prop_id,
313 const GValue * value, GParamSpec * pspec)
315 GstAssRender *render = GST_ASS_RENDER (object);
317 GST_ASS_RENDER_LOCK (render);
320 render->enable = g_value_get_boolean (value);
322 case PROP_EMBEDDEDFONTS:
323 render->embeddedfonts = g_value_get_boolean (value);
324 g_mutex_lock (&render->ass_mutex);
325 ass_set_extract_fonts (render->ass_library, render->embeddedfonts);
326 g_mutex_unlock (&render->ass_mutex);
329 render->wait_text = g_value_get_boolean (value);
332 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
335 GST_ASS_RENDER_UNLOCK (render);
339 gst_ass_render_get_property (GObject * object, guint prop_id,
340 GValue * value, GParamSpec * pspec)
342 GstAssRender *render = GST_ASS_RENDER (object);
344 GST_ASS_RENDER_LOCK (render);
347 g_value_set_boolean (value, render->enable);
349 case PROP_EMBEDDEDFONTS:
350 g_value_set_boolean (value, render->embeddedfonts);
353 g_value_set_boolean (value, render->wait_text);
356 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
359 GST_ASS_RENDER_UNLOCK (render);
362 /* Called with lock held */
364 gst_ass_render_pop_text (GstAssRender * render)
366 while (render->subtitle_pending) {
367 GST_DEBUG_OBJECT (render, "releasing text buffer %p",
368 render->subtitle_pending->data);
369 gst_buffer_unref (render->subtitle_pending->data);
370 render->subtitle_pending =
371 g_slist_delete_link (render->subtitle_pending,
372 render->subtitle_pending);
375 /* Let the text task know we used that buffer */
376 GST_ASS_RENDER_BROADCAST (render);
379 static GstStateChangeReturn
380 gst_ass_render_change_state (GstElement * element, GstStateChange transition)
382 GstAssRender *render = GST_ASS_RENDER (element);
383 GstStateChangeReturn ret;
385 switch (transition) {
386 case GST_STATE_CHANGE_PAUSED_TO_READY:
387 GST_ASS_RENDER_LOCK (render);
388 render->subtitle_flushing = TRUE;
389 render->video_flushing = TRUE;
390 gst_ass_render_pop_text (render);
391 GST_ASS_RENDER_UNLOCK (render);
397 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
398 if (ret == GST_STATE_CHANGE_FAILURE)
401 switch (transition) {
402 case GST_STATE_CHANGE_PAUSED_TO_READY:
403 g_mutex_lock (&render->ass_mutex);
404 if (render->ass_track)
405 ass_free_track (render->ass_track);
406 render->ass_track = NULL;
407 render->track_init_ok = FALSE;
408 render->renderer_init_ok = FALSE;
409 gst_ass_render_reset_composition (render);
410 g_mutex_unlock (&render->ass_mutex);
412 case GST_STATE_CHANGE_READY_TO_PAUSED:
413 GST_ASS_RENDER_LOCK (render);
414 render->subtitle_flushing = FALSE;
415 render->video_flushing = FALSE;
416 render->video_eos = FALSE;
417 render->subtitle_eos = FALSE;
418 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
419 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
420 GST_ASS_RENDER_UNLOCK (render);
431 gst_ass_render_query_src (GstPad * pad, GstObject * parent, GstQuery * query)
433 gboolean res = FALSE;
435 switch (GST_QUERY_TYPE (query)) {
438 GstCaps *filter, *caps;
440 gst_query_parse_caps (query, &filter);
441 caps = gst_ass_render_get_src_caps (pad, (GstAssRender *) parent, filter);
442 gst_query_set_caps_result (query, caps);
443 gst_caps_unref (caps);
448 res = gst_pad_query_default (pad, parent, query);
456 gst_ass_render_event_src (GstPad * pad, GstObject * parent, GstEvent * event)
458 GstAssRender *render = GST_ASS_RENDER (parent);
461 GST_DEBUG_OBJECT (render, "received src event %" GST_PTR_FORMAT, event);
463 /* FIXME: why not just always push it on text pad? */
464 if (render->track_init_ok) {
465 ret = gst_pad_push_event (render->video_sinkpad, gst_event_ref (event));
466 gst_pad_push_event (render->text_sinkpad, event);
468 ret = gst_pad_push_event (render->video_sinkpad, event);
475 * gst_ass_render_add_feature_and_intersect:
477 * Creates a new #GstCaps containing the (given caps +
478 * given caps feature) + (given caps intersected by the
481 * Returns: the new #GstCaps
484 gst_ass_render_add_feature_and_intersect (GstCaps * caps,
485 const gchar * feature, GstCaps * filter)
490 new_caps = gst_caps_copy (caps);
492 caps_size = gst_caps_get_size (new_caps);
493 for (i = 0; i < caps_size; i++) {
494 GstCapsFeatures *features = gst_caps_get_features (new_caps, i);
495 if (!gst_caps_features_is_any (features)) {
496 gst_caps_features_add (features, feature);
500 gst_caps_append (new_caps, gst_caps_intersect_full (caps,
501 filter, GST_CAPS_INTERSECT_FIRST));
507 * gst_ass_render_intersect_by_feature:
509 * Creates a new #GstCaps based on the following filtering rule.
511 * For each individual caps contained in given caps, if the
512 * caps uses the given caps feature, keep a version of the caps
513 * with the feature and an another one without. Otherwise, intersect
514 * the caps with the given filter.
516 * Returns: the new #GstCaps
519 gst_ass_render_intersect_by_feature (GstCaps * caps,
520 const gchar * feature, GstCaps * filter)
525 new_caps = gst_caps_new_empty ();
527 caps_size = gst_caps_get_size (caps);
528 for (i = 0; i < caps_size; i++) {
529 GstStructure *caps_structure = gst_caps_get_structure (caps, i);
530 GstCapsFeatures *caps_features =
531 gst_caps_features_copy (gst_caps_get_features (caps, i));
532 GstCaps *filtered_caps;
533 GstCaps *simple_caps =
534 gst_caps_new_full (gst_structure_copy (caps_structure), NULL);
535 gst_caps_set_features (simple_caps, 0, caps_features);
537 if (gst_caps_features_contains (caps_features, feature)) {
538 gst_caps_append (new_caps, gst_caps_copy (simple_caps));
540 gst_caps_features_remove (caps_features, feature);
541 filtered_caps = gst_caps_ref (simple_caps);
543 filtered_caps = gst_caps_intersect_full (simple_caps, filter,
544 GST_CAPS_INTERSECT_FIRST);
547 gst_caps_unref (simple_caps);
548 gst_caps_append (new_caps, filtered_caps);
555 gst_ass_render_get_videosink_caps (GstPad * pad, GstAssRender * render,
558 GstPad *srcpad = render->srcpad;
559 GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
562 /* filter caps + composition feature + filter caps
563 * filtered by the software caps. */
564 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
565 assrender_filter = gst_ass_render_add_feature_and_intersect (filter,
566 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
567 gst_caps_unref (sw_caps);
569 GST_DEBUG_OBJECT (render, "assrender filter %" GST_PTR_FORMAT,
573 peer_caps = gst_pad_peer_query_caps (srcpad, assrender_filter);
575 if (assrender_filter)
576 gst_caps_unref (assrender_filter);
580 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
582 if (gst_caps_is_any (peer_caps)) {
584 /* if peer returns ANY caps, return filtered src pad template caps */
585 caps = gst_caps_copy (gst_pad_get_pad_template_caps (srcpad));
588 /* duplicate caps which contains the composition into one version with
589 * the meta and one without. Filter the other caps by the software caps */
590 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
591 caps = gst_ass_render_intersect_by_feature (peer_caps,
592 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
593 gst_caps_unref (sw_caps);
596 gst_caps_unref (peer_caps);
599 /* no peer, our padtemplate is enough then */
600 caps = gst_pad_get_pad_template_caps (pad);
604 GstCaps *intersection = gst_caps_intersect_full (filter, caps,
605 GST_CAPS_INTERSECT_FIRST);
606 gst_caps_unref (caps);
610 GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
616 gst_ass_render_get_src_caps (GstPad * pad, GstAssRender * render,
619 GstPad *sinkpad = render->video_sinkpad;
620 GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
623 /* duplicate filter caps which contains the composition into one version
624 * with the meta and one without. Filter the other caps by the software
626 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
628 gst_ass_render_intersect_by_feature (filter,
629 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
630 gst_caps_unref (sw_caps);
633 peer_caps = gst_pad_peer_query_caps (sinkpad, assrender_filter);
635 if (assrender_filter)
636 gst_caps_unref (assrender_filter);
640 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
642 if (gst_caps_is_any (peer_caps)) {
644 /* if peer returns ANY caps, return filtered sink pad template caps */
645 caps = gst_caps_copy (gst_pad_get_pad_template_caps (sinkpad));
649 /* return upstream caps + composition feature + upstream caps
650 * filtered by the software caps. */
651 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
652 caps = gst_ass_render_add_feature_and_intersect (peer_caps,
653 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
654 gst_caps_unref (sw_caps);
657 gst_caps_unref (peer_caps);
660 /* no peer, our padtemplate is enough then */
661 caps = gst_pad_get_pad_template_caps (pad);
665 GstCaps *intersection;
668 gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
669 gst_caps_unref (caps);
673 GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
679 blit_bgra_premultiplied (GstAssRender * render, ASS_Image * ass_image,
680 guint8 * data, gint width, gint height, gint stride, gint x_off, gint y_off)
683 gint alpha, r, g, b, k;
691 memset (data, 0, stride * height);
694 dst_x = ass_image->dst_x + x_off;
695 dst_y = ass_image->dst_y + y_off;
697 w = MIN (ass_image->w, width - dst_x);
698 h = MIN (ass_image->h, height - dst_y);
699 if (w <= 0 || h <= 0)
702 alpha = 255 - (ass_image->color & 0xff);
706 r = ((ass_image->color) >> 24) & 0xff;
707 g = ((ass_image->color) >> 16) & 0xff;
708 b = ((ass_image->color) >> 8) & 0xff;
710 src = ass_image->bitmap;
711 dst = data + dst_y * stride + dst_x * 4;
713 src_skip = ass_image->stride - w;
714 dst_skip = stride - w * 4;
716 for (y = 0; y < h; y++) {
717 for (x = 0; x < w; x++) {
719 k = src[0] * alpha / 255;
722 dst[2] = (k * r) / 255;
723 dst[1] = (k * g) / 255;
724 dst[0] = (k * b) / 255;
726 dst[3] = k + (255 - k) * dst[3] / 255;
727 dst[2] = (k * r + (255 - k) * dst[2]) / 255;
728 dst[1] = (k * g + (255 - k) * dst[1]) / 255;
729 dst[0] = (k * b + (255 - k) * dst[0]) / 255;
740 ass_image = ass_image->next;
742 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter);
746 gst_ass_render_can_handle_caps (GstCaps * incaps)
748 static GstStaticCaps static_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
752 caps = gst_static_caps_get (&static_caps);
753 ret = gst_caps_is_subset (incaps, caps);
754 gst_caps_unref (caps);
760 gst_ass_render_update_render_size (GstAssRender * render)
762 gdouble video_aspect = (gdouble) render->info.width /
763 (gdouble) render->info.height;
764 gdouble window_aspect = (gdouble) render->window_width /
765 (gdouble) render->window_height;
767 /* render at the window size, with the video aspect ratio */
768 if (video_aspect >= window_aspect) {
769 render->ass_frame_width = render->window_width;
770 render->ass_frame_height = render->window_width / video_aspect;
772 render->ass_frame_width = render->window_height * video_aspect;
773 render->ass_frame_height = render->window_height;
778 gst_ass_render_negotiate (GstAssRender * render, GstCaps * caps)
780 gboolean upstream_has_meta = FALSE;
781 gboolean caps_has_meta = FALSE;
782 gboolean alloc_has_meta = FALSE;
783 gboolean attach = FALSE;
787 GstCaps *overlay_caps;
791 GST_DEBUG_OBJECT (render, "performing negotiation");
793 /* Clear cached composition */
794 gst_ass_render_reset_composition (render);
796 /* Clear any pending reconfigure flag */
797 gst_pad_check_reconfigure (render->srcpad);
800 caps = gst_pad_get_current_caps (render->video_sinkpad);
804 if (!caps || gst_caps_is_empty (caps))
807 /* Check if upstream caps have meta */
808 if ((f = gst_caps_get_features (caps, 0))) {
809 upstream_has_meta = gst_caps_features_contains (f,
810 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
813 /* Initialize dimensions */
814 width = render->info.width;
815 height = render->info.height;
817 if (upstream_has_meta) {
818 overlay_caps = gst_caps_ref (caps);
822 /* BaseTransform requires caps for the allocation query to work */
823 overlay_caps = gst_caps_copy (caps);
824 f = gst_caps_get_features (overlay_caps, 0);
825 gst_caps_features_add (f,
826 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
828 /* Then check if downstream accept overlay composition in caps */
829 /* FIXME: We should probably check if downstream *prefers* the
830 * overlay meta, and only enforce usage of it if we can't handle
831 * the format ourselves and thus would have to drop the overlays.
832 * Otherwise we should prefer what downstream wants here.
834 peercaps = gst_pad_peer_query_caps (render->srcpad, NULL);
835 caps_has_meta = gst_caps_can_intersect (peercaps, overlay_caps);
836 gst_caps_unref (peercaps);
838 GST_DEBUG ("caps have overlay meta %d", caps_has_meta);
841 if (upstream_has_meta || caps_has_meta) {
842 /* Send caps immediately, it's needed by GstBaseTransform to get a reply
843 * from allocation query */
844 ret = gst_pad_set_caps (render->srcpad, overlay_caps);
846 /* First check if the allocation meta has compositon */
847 query = gst_query_new_allocation (overlay_caps, FALSE);
849 if (!gst_pad_peer_query (render->srcpad, query)) {
850 /* no problem, we use the query defaults */
851 GST_DEBUG_OBJECT (render, "ALLOCATION query failed");
853 /* In case we were flushing, mark reconfigure and fail this method,
854 * will make it retry */
855 if (render->video_flushing)
859 alloc_has_meta = gst_query_find_allocation_meta (query,
860 GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, &alloc_index);
862 GST_DEBUG ("sink alloc has overlay meta %d", alloc_has_meta);
864 if (alloc_has_meta) {
865 const GstStructure *params;
867 gst_query_parse_nth_allocation_meta (query, alloc_index, ¶ms);
869 if (gst_structure_get (params, "width", G_TYPE_UINT, &width,
870 "height", G_TYPE_UINT, &height, NULL)) {
871 GST_DEBUG ("received window size: %dx%d", width, height);
872 g_assert (width != 0 && height != 0);
877 gst_query_unref (query);
880 /* Update render size if needed */
881 render->window_width = width;
882 render->window_height = height;
883 gst_ass_render_update_render_size (render);
885 /* For backward compatibility, we will prefer bliting if downstream
886 * allocation does not support the meta. In other case we will prefer
887 * attaching, and will fail the negotiation in the unlikely case we are
888 * force to blit, but format isn't supported. */
890 if (upstream_has_meta) {
892 } else if (caps_has_meta) {
893 if (alloc_has_meta) {
896 /* Don't attach unless we cannot handle the format */
897 attach = !gst_ass_render_can_handle_caps (caps);
900 ret = gst_ass_render_can_handle_caps (caps);
903 /* If we attach, then pick the overlay caps */
905 GST_DEBUG_OBJECT (render, "Using caps %" GST_PTR_FORMAT, overlay_caps);
906 /* Caps where already sent */
908 GST_DEBUG_OBJECT (render, "Using caps %" GST_PTR_FORMAT, caps);
909 ret = gst_pad_set_caps (render->srcpad, caps);
912 render->attach_compo_to_buffer = attach;
915 GST_DEBUG_OBJECT (render, "negotiation failed, schedule reconfigure");
916 gst_pad_mark_reconfigure (render->srcpad);
918 g_mutex_lock (&render->ass_mutex);
919 ass_set_frame_size (render->ass_renderer,
920 render->ass_frame_width, render->ass_frame_height);
921 ass_set_storage_size (render->ass_renderer,
922 render->info.width, render->info.height);
923 ass_set_pixel_aspect (render->ass_renderer,
924 (gdouble) render->info.par_n / (gdouble) render->info.par_d);
925 ass_set_font_scale (render->ass_renderer, 1.0);
926 ass_set_hinting (render->ass_renderer, ASS_HINTING_NONE);
928 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif", 1, NULL, 1);
929 ass_set_fonts (render->ass_renderer, NULL, "Sans", 1, NULL, 1);
930 ass_set_margins (render->ass_renderer, 0, 0, 0, 0);
931 ass_set_use_margins (render->ass_renderer, 0);
932 g_mutex_unlock (&render->ass_mutex);
934 render->renderer_init_ok = TRUE;
936 GST_DEBUG_OBJECT (render, "ass renderer setup complete");
939 gst_caps_unref (overlay_caps);
940 gst_caps_unref (caps);
943 gst_pad_mark_reconfigure (render->srcpad);
950 gst_caps_unref (caps);
951 gst_pad_mark_reconfigure (render->srcpad);
957 gst_ass_render_setcaps_video (GstPad * pad, GstAssRender * render,
963 if (!gst_video_info_from_caps (&info, caps))
968 ret = gst_ass_render_negotiate (render, caps);
970 GST_ASS_RENDER_LOCK (render);
972 if (!render->attach_compo_to_buffer && !gst_ass_render_can_handle_caps (caps)) {
973 GST_DEBUG_OBJECT (render, "unsupported caps %" GST_PTR_FORMAT, caps);
976 GST_ASS_RENDER_UNLOCK (render);
983 GST_ERROR_OBJECT (render, "could not parse caps");
989 gst_ass_render_setcaps_text (GstPad * pad, GstAssRender * render,
992 GstStructure *structure;
996 gboolean ret = FALSE;
998 structure = gst_caps_get_structure (caps, 0);
1000 GST_DEBUG_OBJECT (render, "text pad linked with caps: %" GST_PTR_FORMAT,
1003 value = gst_structure_get_value (structure, "codec_data");
1005 g_mutex_lock (&render->ass_mutex);
1006 if (value != NULL) {
1007 priv = gst_value_get_buffer (value);
1008 g_return_val_if_fail (priv != NULL, FALSE);
1010 gst_buffer_map (priv, &map, GST_MAP_READ);
1012 if (!render->ass_track)
1013 render->ass_track = ass_new_track (render->ass_library);
1015 ass_process_codec_private (render->ass_track, (char *) map.data, map.size);
1017 gst_buffer_unmap (priv, &map);
1019 GST_DEBUG_OBJECT (render, "ass track created");
1021 render->track_init_ok = TRUE;
1024 } else if (!render->ass_track) {
1025 render->ass_track = ass_new_track (render->ass_library);
1027 render->track_init_ok = TRUE;
1031 g_mutex_unlock (&render->ass_mutex);
1038 gst_ass_render_process_text (GstAssRender * render, GstBuffer * buffer,
1039 GstClockTime running_time, GstClockTime duration)
1042 gdouble pts_start, pts_end;
1044 pts_start = running_time;
1045 pts_start /= GST_MSECOND;
1047 pts_end /= GST_MSECOND;
1049 GST_DEBUG_OBJECT (render,
1050 "Processing subtitles with running time %" GST_TIME_FORMAT
1051 " and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time),
1052 GST_TIME_ARGS (duration));
1054 gst_buffer_map (buffer, &map, GST_MAP_READ);
1056 g_mutex_lock (&render->ass_mutex);
1057 ass_process_chunk (render->ass_track, (gchar *) map.data, map.size,
1058 pts_start, pts_end);
1059 g_mutex_unlock (&render->ass_mutex);
1061 gst_buffer_unmap (buffer, &map);
1064 static GstVideoOverlayComposition *
1065 gst_ass_render_composite_overlay (GstAssRender * render, ASS_Image * images)
1067 GstVideoOverlayComposition *composition;
1068 GstVideoOverlayRectangle *rectangle;
1069 GstVideoMeta *vmeta;
1077 gdouble hscale, vscale;
1085 /* find bounding box of all images, to limit the overlay rectangle size */
1086 for (image = images; image; image = image->next) {
1087 if (min_x > image->dst_x)
1088 min_x = image->dst_x;
1089 if (min_y > image->dst_y)
1090 min_y = image->dst_y;
1091 if (max_x < image->dst_x + image->w)
1092 max_x = image->dst_x + image->w;
1093 if (max_y < image->dst_y + image->h)
1094 max_y = image->dst_y + image->h;
1097 width = MIN (max_x - min_x, render->ass_frame_width);
1098 height = MIN (max_y - min_y, render->ass_frame_height);
1100 GST_DEBUG_OBJECT (render, "render overlay rectangle %dx%d%+d%+d",
1101 width, height, min_x, min_y);
1103 buffer = gst_buffer_new_and_alloc (4 * width * height);
1105 GST_ERROR_OBJECT (render, "Failed to allocate overlay buffer");
1109 vmeta = gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE,
1110 GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB, width, height);
1112 if (!gst_video_meta_map (vmeta, 0, &map, &data, &stride, GST_MAP_READWRITE)) {
1113 GST_ERROR_OBJECT (render, "Failed to map overlay buffer");
1114 gst_buffer_unref (buffer);
1118 blit_bgra_premultiplied (render, images, data, width, height, stride,
1120 gst_video_meta_unmap (vmeta, 0, &map);
1122 hscale = (gdouble) render->info.width / (gdouble) render->ass_frame_width;
1123 vscale = (gdouble) render->info.height / (gdouble) render->ass_frame_height;
1125 rectangle = gst_video_overlay_rectangle_new_raw (buffer,
1126 hscale * min_x, vscale * min_y, hscale * width, vscale * height,
1127 GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
1129 gst_buffer_unref (buffer);
1131 composition = gst_video_overlay_composition_new (rectangle);
1132 gst_video_overlay_rectangle_unref (rectangle);
1138 gst_ass_render_push_frame (GstAssRender * render, GstBuffer * video_frame)
1140 GstVideoFrame frame;
1142 if (!render->composition)
1145 video_frame = gst_buffer_make_writable (video_frame);
1147 if (render->attach_compo_to_buffer) {
1148 gst_buffer_add_video_overlay_composition_meta (video_frame,
1149 render->composition);
1153 if (!gst_video_frame_map (&frame, &render->info, video_frame,
1154 GST_MAP_READWRITE)) {
1155 GST_WARNING_OBJECT (render, "failed to map video frame for blending");
1159 gst_video_overlay_composition_blend (render->composition, &frame);
1160 gst_video_frame_unmap (&frame);
1163 return gst_pad_push (render->srcpad, video_frame);
1166 static GstFlowReturn
1167 gst_ass_render_chain_video (GstPad * pad, GstObject * parent,
1170 GstAssRender *render = GST_ASS_RENDER (parent);
1171 GstFlowReturn ret = GST_FLOW_OK;
1172 gboolean in_seg = FALSE;
1173 guint64 start, stop, clip_start = 0, clip_stop = 0;
1174 ASS_Image *ass_image;
1177 if (gst_pad_check_reconfigure (render->srcpad)) {
1178 if (!gst_ass_render_negotiate (render, NULL)) {
1179 gst_pad_mark_reconfigure (render->srcpad);
1180 if (GST_PAD_IS_FLUSHING (render->srcpad))
1181 goto flushing_no_unlock;
1183 goto not_negotiated;
1187 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1188 goto missing_timestamp;
1190 /* ignore buffers that are outside of the current segment */
1191 start = GST_BUFFER_TIMESTAMP (buffer);
1193 if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
1194 stop = GST_CLOCK_TIME_NONE;
1196 stop = start + GST_BUFFER_DURATION (buffer);
1199 /* segment_clip() will adjust start unconditionally to segment_start if
1200 * no stop time is provided, so handle this ourselves */
1201 if (stop == GST_CLOCK_TIME_NONE && start < render->video_segment.start)
1202 goto out_of_segment;
1205 gst_segment_clip (&render->video_segment, GST_FORMAT_TIME, start, stop,
1206 &clip_start, &clip_stop);
1209 goto out_of_segment;
1211 /* if the buffer is only partially in the segment, fix up stamps */
1212 if (clip_start != start || (stop != -1 && clip_stop != stop)) {
1213 GST_DEBUG_OBJECT (render, "clipping buffer timestamp/duration to segment");
1214 buffer = gst_buffer_make_writable (buffer);
1215 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1217 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1220 /* now, after we've done the clipping, fix up end time if there's no
1221 * duration (we only use those estimated values internally though, we
1222 * don't want to set bogus values on the buffer itself) */
1224 if (render->info.fps_n && render->info.fps_d) {
1225 GST_DEBUG_OBJECT (render, "estimating duration based on framerate");
1227 start + gst_util_uint64_scale_int (GST_SECOND, render->info.fps_d,
1228 render->info.fps_n);
1230 GST_WARNING_OBJECT (render, "no duration, assuming minimal duration");
1231 stop = start + 1; /* we need to assume some interval */
1237 GST_ASS_RENDER_LOCK (render);
1239 if (render->video_flushing)
1242 if (render->video_eos)
1245 if (render->renderer_init_ok && render->track_init_ok && render->enable) {
1246 /* Text pad linked, check if we have a text buffer queued */
1247 if (render->subtitle_pending) {
1248 GSList *subtitle_pending = render->subtitle_pending;
1249 GstClockTime text_start = GST_CLOCK_TIME_NONE;
1250 GstClockTime text_end = GST_CLOCK_TIME_NONE;
1251 GstClockTime text_running_time = GST_CLOCK_TIME_NONE;
1252 GstClockTime text_running_time_end = GST_CLOCK_TIME_NONE;
1253 GstClockTime vid_running_time, vid_running_time_end;
1258 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1260 vid_running_time_end =
1261 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1264 GST_LOG_OBJECT (render, "V : %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1265 GST_TIME_ARGS (vid_running_time),
1266 GST_TIME_ARGS (vid_running_time_end));
1268 if (subtitle_pending == NULL)
1269 GST_LOG_OBJECT (render, "T : no pending subtitles");
1271 while (subtitle_pending != NULL) {
1274 /* if the text buffer isn't stamped right, pop it off the
1275 * queue and display it for the current video frame only */
1276 if (!GST_BUFFER_TIMESTAMP_IS_VALID (subtitle_pending->data) ||
1277 !GST_BUFFER_DURATION_IS_VALID (subtitle_pending->data)) {
1278 GSList *bad = subtitle_pending;
1279 GST_WARNING_OBJECT (render,
1280 "Got text buffer with invalid timestamp or duration %"
1281 GST_PTR_FORMAT, bad->data);
1282 gst_buffer_unref (bad->data);
1283 subtitle_pending = bad->next;
1284 render->subtitle_pending =
1285 g_slist_delete_link (render->subtitle_pending, bad);
1286 GST_ASS_RENDER_BROADCAST (render);
1290 text_start = GST_BUFFER_TIMESTAMP (subtitle_pending->data);
1291 text_end = text_start + GST_BUFFER_DURATION (subtitle_pending->data);
1293 /* If timestamp and duration are valid */
1295 gst_segment_to_running_time (&render->subtitle_segment,
1296 GST_FORMAT_TIME, text_start);
1297 text_running_time_end =
1298 gst_segment_to_running_time (&render->subtitle_segment,
1299 GST_FORMAT_TIME, text_end);
1301 GST_LOG_OBJECT (render, "T%u: %" GST_TIME_FORMAT " - "
1302 "%" GST_TIME_FORMAT, n, GST_TIME_ARGS (text_running_time),
1303 GST_TIME_ARGS (text_running_time_end));
1306 if (text_running_time_end <= vid_running_time) {
1307 GSList *old = subtitle_pending;
1308 GST_DEBUG_OBJECT (render,
1309 "text buffer too old, popping %" GST_PTR_FORMAT, old->data);
1310 gst_buffer_unref (old->data);
1311 subtitle_pending = old->next;
1312 render->subtitle_pending =
1313 g_slist_delete_link (render->subtitle_pending, old);
1314 GST_ASS_RENDER_BROADCAST (render);
1318 if (render->need_process) {
1319 GST_DEBUG_OBJECT (render, "process text buffer");
1320 gst_ass_render_process_text (render, subtitle_pending->data,
1321 text_running_time, text_running_time_end - text_running_time);
1324 subtitle_pending = subtitle_pending->next;
1327 if (render->need_process) {
1328 render->need_process = FALSE;
1331 GST_ASS_RENDER_UNLOCK (render);
1333 /* libass needs timestamps in ms */
1334 timestamp = vid_running_time / GST_MSECOND;
1336 g_mutex_lock (&render->ass_mutex);
1337 ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
1338 timestamp, &changed);
1339 g_mutex_unlock (&render->ass_mutex);
1341 if ((!ass_image || changed) && render->composition) {
1342 GST_DEBUG_OBJECT (render, "release overlay (changed %d)", changed);
1343 gst_ass_render_reset_composition (render);
1346 if (ass_image != NULL) {
1347 if (!render->composition)
1348 render->composition = gst_ass_render_composite_overlay (render,
1351 GST_DEBUG_OBJECT (render, "nothing to render right now");
1354 /* Push the video frame */
1355 ret = gst_ass_render_push_frame (render, buffer);
1357 subtitle_pending = render->subtitle_pending;
1358 while (subtitle_pending != NULL) {
1360 text_start = GST_BUFFER_TIMESTAMP (subtitle_pending->data);
1361 text_end = text_start + GST_BUFFER_DURATION (subtitle_pending->data);
1363 text_running_time_end =
1364 gst_segment_to_running_time (&render->video_segment,
1365 GST_FORMAT_TIME, text_end);
1367 if (text_running_time_end <= vid_running_time_end) {
1368 GSList *old = subtitle_pending;
1369 GST_DEBUG_OBJECT (render,
1370 "finished text buffer, popping %" GST_PTR_FORMAT, old->data);
1371 GST_ASS_RENDER_LOCK (render);
1372 gst_buffer_unref (old->data);
1373 subtitle_pending = old->next;
1374 render->subtitle_pending =
1375 g_slist_delete_link (render->subtitle_pending, old);
1376 GST_ASS_RENDER_BROADCAST (render);
1377 GST_ASS_RENDER_UNLOCK (render);
1378 render->need_process = TRUE;
1379 if (g_slist_length (render->subtitle_pending) == 0) {
1380 render->need_process = FALSE;
1383 subtitle_pending = subtitle_pending->next;
1387 gboolean wait_for_text_buf = TRUE;
1389 if (render->subtitle_eos)
1390 wait_for_text_buf = FALSE;
1392 if (!render->wait_text)
1393 wait_for_text_buf = FALSE;
1395 /* Text pad linked, but no text buffer available - what now? */
1396 if (render->subtitle_segment.format == GST_FORMAT_TIME) {
1397 GstClockTime text_start_running_time, text_last_stop_running_time;
1398 GstClockTime vid_running_time;
1401 gst_segment_to_running_time (&render->video_segment,
1402 GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer));
1403 text_start_running_time =
1404 gst_segment_to_running_time (&render->subtitle_segment,
1405 GST_FORMAT_TIME, render->subtitle_segment.start);
1406 text_last_stop_running_time =
1407 gst_segment_to_running_time (&render->subtitle_segment,
1408 GST_FORMAT_TIME, render->subtitle_segment.position);
1410 if ((GST_CLOCK_TIME_IS_VALID (text_start_running_time) &&
1411 vid_running_time < text_start_running_time) ||
1412 (GST_CLOCK_TIME_IS_VALID (text_last_stop_running_time) &&
1413 vid_running_time < text_last_stop_running_time)) {
1414 wait_for_text_buf = FALSE;
1418 if (wait_for_text_buf) {
1419 GST_DEBUG_OBJECT (render, "no text buffer, need to wait for one");
1420 GST_ASS_RENDER_WAIT (render);
1421 GST_DEBUG_OBJECT (render, "resuming");
1422 GST_ASS_RENDER_UNLOCK (render);
1423 goto wait_for_text_buf;
1425 GST_ASS_RENDER_UNLOCK (render);
1426 GST_LOG_OBJECT (render, "no need to wait for a text buffer");
1427 ret = gst_pad_push (render->srcpad, buffer);
1431 GST_LOG_OBJECT (render, "rendering disabled, doing buffer passthrough");
1433 GST_ASS_RENDER_UNLOCK (render);
1434 ret = gst_pad_push (render->srcpad, buffer);
1438 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p ret=%d", buffer, ret);
1440 /* Update last_stop */
1441 render->video_segment.position = clip_start;
1447 GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
1448 gst_buffer_unref (buffer);
1453 GST_DEBUG_OBJECT (render, "not negotiated");
1454 gst_buffer_unref (buffer);
1455 return GST_FLOW_NOT_NEGOTIATED;
1459 GST_ASS_RENDER_UNLOCK (render);
1463 GST_DEBUG_OBJECT (render, "flushing, discarding buffer");
1464 gst_buffer_unref (buffer);
1465 return GST_FLOW_FLUSHING;
1469 GST_ASS_RENDER_UNLOCK (render);
1470 GST_DEBUG_OBJECT (render, "eos, discarding buffer");
1471 gst_buffer_unref (buffer);
1472 return GST_FLOW_EOS;
1476 GST_DEBUG_OBJECT (render, "buffer out of segment, discarding");
1477 gst_buffer_unref (buffer);
1482 static GstFlowReturn
1483 gst_ass_render_chain_text (GstPad * pad, GstObject * parent, GstBuffer * buffer)
1485 GstFlowReturn ret = GST_FLOW_OK;
1486 GstAssRender *render = GST_ASS_RENDER (parent);
1487 gboolean in_seg = FALSE;
1488 guint64 clip_start = 0, clip_stop = 0;
1490 GST_DEBUG_OBJECT (render, "entering chain for buffer %" GST_PTR_FORMAT,
1493 GST_ASS_RENDER_LOCK (render);
1495 if (render->subtitle_flushing) {
1496 GST_ASS_RENDER_UNLOCK (render);
1497 ret = GST_FLOW_FLUSHING;
1498 GST_LOG_OBJECT (render, "text flushing");
1502 if (render->subtitle_eos) {
1503 GST_ASS_RENDER_UNLOCK (render);
1505 GST_LOG_OBJECT (render, "text EOS");
1509 if (G_LIKELY (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))) {
1512 if (G_LIKELY (GST_BUFFER_DURATION_IS_VALID (buffer)))
1513 stop = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
1515 stop = GST_CLOCK_TIME_NONE;
1517 in_seg = gst_segment_clip (&render->subtitle_segment, GST_FORMAT_TIME,
1518 GST_BUFFER_TIMESTAMP (buffer), stop, &clip_start, &clip_stop);
1524 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1525 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1526 else if (GST_BUFFER_DURATION_IS_VALID (buffer))
1527 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1529 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1530 render->subtitle_segment.position = clip_start;
1532 GST_DEBUG_OBJECT (render, "New buffer arrived %" GST_PTR_FORMAT, buffer);
1533 render->subtitle_pending = g_slist_append (render->subtitle_pending,
1534 gst_buffer_ref (buffer));
1535 render->need_process = TRUE;
1537 /* in case the video chain is waiting for a text buffer, wake it up */
1538 GST_ASS_RENDER_BROADCAST (render);
1541 GST_ASS_RENDER_UNLOCK (render);
1544 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p", buffer);
1546 gst_buffer_unref (buffer);
1551 gst_ass_render_handle_tag_sample (GstAssRender * render, GstSample * sample)
1553 static const gchar *mimetypes[] = {
1554 "application/x-font-ttf",
1555 "application/x-font-otf",
1556 "application/x-truetype-font",
1557 "application/vnd.ms-opentype",
1563 static const gchar *extensions[] = {
1570 const GstStructure *structure;
1571 gboolean valid_mimetype, valid_extension;
1573 const gchar *mimetype, *filename;
1575 buf = gst_sample_get_buffer (sample);
1576 structure = gst_sample_get_info (sample);
1578 if (!buf || !structure)
1581 filename = gst_structure_get_string (structure, "filename");
1585 valid_mimetype = FALSE;
1586 valid_extension = FALSE;
1588 mimetype = gst_structure_get_string (structure, "mimetype");
1590 for (i = 0; i < G_N_ELEMENTS (mimetypes); i++) {
1591 if (strcmp (mimetype, mimetypes[i]) == 0) {
1592 valid_mimetype = TRUE;
1598 if (!valid_mimetype) {
1599 guint len = strlen (filename);
1600 const gchar *extension = filename + len - 4;
1601 for (i = 0; i < G_N_ELEMENTS (extensions); i++) {
1602 if (g_ascii_strcasecmp (extension, extensions[i]) == 0) {
1603 valid_extension = TRUE;
1609 if (valid_mimetype || valid_extension) {
1612 g_mutex_lock (&render->ass_mutex);
1613 gst_buffer_map (buf, &map, GST_MAP_READ);
1614 ass_add_font (render->ass_library, (gchar *) filename,
1615 (gchar *) map.data, map.size);
1616 gst_buffer_unmap (buf, &map);
1617 GST_DEBUG_OBJECT (render, "registered new font %s", filename);
1618 g_mutex_unlock (&render->ass_mutex);
1623 gst_ass_render_handle_tags (GstAssRender * render, GstTagList * taglist)
1630 tag_size = gst_tag_list_get_tag_size (taglist, GST_TAG_ATTACHMENT);
1631 if (tag_size > 0 && render->embeddedfonts) {
1635 GST_DEBUG_OBJECT (render, "TAG event has attachments");
1637 for (index = 0; index < tag_size; index++) {
1638 if (gst_tag_list_get_sample_index (taglist, GST_TAG_ATTACHMENT, index,
1640 gst_ass_render_handle_tag_sample (render, sample);
1641 gst_sample_unref (sample);
1648 gst_ass_render_event_video (GstPad * pad, GstObject * parent, GstEvent * event)
1650 gboolean ret = FALSE;
1651 GstAssRender *render = GST_ASS_RENDER (parent);
1653 GST_DEBUG_OBJECT (pad, "received video event %" GST_PTR_FORMAT, event);
1655 switch (GST_EVENT_TYPE (event)) {
1656 case GST_EVENT_CAPS:
1660 gst_event_parse_caps (event, &caps);
1661 ret = gst_ass_render_setcaps_video (pad, render, caps);
1662 gst_event_unref (event);
1665 case GST_EVENT_SEGMENT:
1669 GST_DEBUG_OBJECT (render, "received new segment");
1671 gst_event_copy_segment (event, &segment);
1673 if (segment.format == GST_FORMAT_TIME) {
1674 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT now: %" GST_SEGMENT_FORMAT,
1675 &render->video_segment);
1677 render->video_segment = segment;
1679 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT after: %" GST_SEGMENT_FORMAT,
1680 &render->video_segment);
1681 ret = gst_pad_event_default (pad, parent, event);
1683 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1684 ("received non-TIME newsegment event on video input"));
1686 gst_event_unref (event);
1692 GstTagList *taglist = NULL;
1694 /* tag events may contain attachments which might be fonts */
1695 GST_DEBUG_OBJECT (render, "got TAG event");
1697 gst_event_parse_tag (event, &taglist);
1698 gst_ass_render_handle_tags (render, taglist);
1699 ret = gst_pad_event_default (pad, parent, event);
1703 GST_ASS_RENDER_LOCK (render);
1704 GST_INFO_OBJECT (render, "video EOS");
1705 render->video_eos = TRUE;
1706 GST_ASS_RENDER_UNLOCK (render);
1707 ret = gst_pad_event_default (pad, parent, event);
1709 case GST_EVENT_FLUSH_START:
1710 GST_ASS_RENDER_LOCK (render);
1711 GST_INFO_OBJECT (render, "video flush start");
1712 render->video_flushing = TRUE;
1713 GST_ASS_RENDER_BROADCAST (render);
1714 GST_ASS_RENDER_UNLOCK (render);
1715 ret = gst_pad_event_default (pad, parent, event);
1717 case GST_EVENT_FLUSH_STOP:
1718 GST_ASS_RENDER_LOCK (render);
1719 GST_INFO_OBJECT (render, "video flush stop");
1720 render->video_flushing = FALSE;
1721 render->video_eos = FALSE;
1722 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
1723 GST_ASS_RENDER_UNLOCK (render);
1724 ret = gst_pad_event_default (pad, parent, event);
1727 ret = gst_pad_event_default (pad, parent, event);
1735 gst_ass_render_query_video (GstPad * pad, GstObject * parent, GstQuery * query)
1737 gboolean res = FALSE;
1739 switch (GST_QUERY_TYPE (query)) {
1740 case GST_QUERY_CAPS:
1742 GstCaps *filter, *caps;
1744 gst_query_parse_caps (query, &filter);
1746 gst_ass_render_get_videosink_caps (pad, (GstAssRender *) parent,
1748 gst_query_set_caps_result (query, caps);
1749 gst_caps_unref (caps);
1754 res = gst_pad_query_default (pad, parent, query);
1762 gst_ass_render_event_text (GstPad * pad, GstObject * parent, GstEvent * event)
1764 gboolean ret = FALSE;
1765 GstAssRender *render = GST_ASS_RENDER (parent);
1767 GST_DEBUG_OBJECT (pad, "received text event %" GST_PTR_FORMAT, event);
1769 switch (GST_EVENT_TYPE (event)) {
1770 case GST_EVENT_CAPS:
1774 gst_event_parse_caps (event, &caps);
1775 ret = gst_ass_render_setcaps_text (pad, render, caps);
1776 gst_event_unref (event);
1779 case GST_EVENT_SEGMENT:
1783 GST_ASS_RENDER_LOCK (render);
1784 render->subtitle_eos = FALSE;
1785 GST_ASS_RENDER_UNLOCK (render);
1787 gst_event_copy_segment (event, &segment);
1789 GST_ASS_RENDER_LOCK (render);
1790 if (segment.format == GST_FORMAT_TIME) {
1791 GST_DEBUG_OBJECT (render, "TEXT SEGMENT now: %" GST_SEGMENT_FORMAT,
1792 &render->subtitle_segment);
1794 render->subtitle_segment = segment;
1796 GST_DEBUG_OBJECT (render,
1797 "TEXT SEGMENT after: %" GST_SEGMENT_FORMAT,
1798 &render->subtitle_segment);
1800 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1801 ("received non-TIME newsegment event on subtitle input"));
1804 gst_event_unref (event);
1807 /* wake up the video chain, it might be waiting for a text buffer or
1808 * a text segment update */
1809 GST_ASS_RENDER_BROADCAST (render);
1810 GST_ASS_RENDER_UNLOCK (render);
1813 case GST_EVENT_GAP:{
1814 GstClockTime start, duration;
1816 gst_event_parse_gap (event, &start, &duration);
1817 if (GST_CLOCK_TIME_IS_VALID (duration))
1819 /* we do not expect another buffer until after gap,
1820 * so that is our position now */
1821 GST_ASS_RENDER_LOCK (render);
1822 render->subtitle_segment.position = start;
1824 /* wake up the video chain, it might be waiting for a text buffer or
1825 * a text segment update */
1826 GST_ASS_RENDER_BROADCAST (render);
1827 GST_ASS_RENDER_UNLOCK (render);
1829 gst_event_unref (event);
1833 case GST_EVENT_FLUSH_STOP:
1834 g_mutex_lock (&render->ass_mutex);
1835 if (render->ass_track) {
1836 ass_flush_events (render->ass_track);
1838 g_mutex_unlock (&render->ass_mutex);
1839 GST_ASS_RENDER_LOCK (render);
1840 GST_INFO_OBJECT (render, "text flush stop");
1841 render->subtitle_flushing = FALSE;
1842 render->subtitle_eos = FALSE;
1843 gst_ass_render_pop_text (render);
1844 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
1845 GST_ASS_RENDER_UNLOCK (render);
1846 gst_event_unref (event);
1849 case GST_EVENT_FLUSH_START:
1850 GST_DEBUG_OBJECT (render, "text flush start");
1851 GST_ASS_RENDER_LOCK (render);
1852 render->subtitle_flushing = TRUE;
1853 GST_ASS_RENDER_BROADCAST (render);
1854 GST_ASS_RENDER_UNLOCK (render);
1855 gst_event_unref (event);
1859 GST_ASS_RENDER_LOCK (render);
1860 render->subtitle_eos = TRUE;
1861 GST_INFO_OBJECT (render, "text EOS");
1862 /* wake up the video chain, it might be waiting for a text buffer or
1863 * a text segment update */
1864 GST_ASS_RENDER_BROADCAST (render);
1865 GST_ASS_RENDER_UNLOCK (render);
1866 gst_event_unref (event);
1871 GstTagList *taglist = NULL;
1873 /* tag events may contain attachments which might be fonts */
1874 GST_DEBUG_OBJECT (render, "got TAG event");
1876 gst_event_parse_tag (event, &taglist);
1877 gst_ass_render_handle_tags (render, taglist);
1878 ret = gst_pad_event_default (pad, parent, event);
1882 ret = gst_pad_event_default (pad, parent, event);
1890 plugin_init (GstPlugin * plugin)
1892 return GST_ELEMENT_REGISTER (assrender, plugin);
1895 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1898 "ASS/SSA subtitle renderer",
1899 plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)