2 * Copyright (c) 2008 Benjamin Schmitz <vortex@wolpzone.de>
3 * Copyright (c) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
22 * SECTION:element-assrender
25 * Renders timestamped SSA/ASS subtitles on top of a video stream.
27 * ## Example launch line
29 * gst-launch-1.0 -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mpegaudioparse ! mpg123audiodec ! audioconvert ! autoaudiosink d. ! queue ! h264parse ! avdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink
30 * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
38 #include <gst/video/gstvideometa.h>
40 #include "gstassrender.h"
44 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_debug);
45 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_lib_debug);
46 #define GST_CAT_DEFAULT gst_ass_render_debug
48 /* Filter signals and props */
62 /* FIXME: video-blend.c doesn't support formats with more than 8 bit per
63 * component (which get unpacked into ARGB64 or AYUV64) yet, such as:
64 * v210, v216, UYVP, GRAY16_LE, GRAY16_BE */
65 #define FORMATS "{ BGRx, RGBx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, \
66 I420, YV12, AYUV, YUY2, UYVY, v308, Y41B, Y42B, Y444, \
67 NV12, NV21, A420, YUV9, YVU9, IYU1, GRAY8 }"
69 #define ASSRENDER_CAPS GST_VIDEO_CAPS_MAKE(FORMATS)
71 #define ASSRENDER_ALL_CAPS ASSRENDER_CAPS ";" \
72 GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
74 static GstStaticCaps sw_template_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
76 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
79 GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
82 static GstStaticPadTemplate video_sink_factory =
83 GST_STATIC_PAD_TEMPLATE ("video_sink",
86 GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
89 static GstStaticPadTemplate text_sink_factory =
90 GST_STATIC_PAD_TEMPLATE ("text_sink",
93 GST_STATIC_CAPS ("application/x-ass; application/x-ssa")
96 #define GST_ASS_RENDER_GET_LOCK(ass) (&GST_ASS_RENDER (ass)->lock)
97 #define GST_ASS_RENDER_GET_COND(ass) (&GST_ASS_RENDER (ass)->cond)
98 #define GST_ASS_RENDER_LOCK(ass) (g_mutex_lock (GST_ASS_RENDER_GET_LOCK (ass)))
99 #define GST_ASS_RENDER_UNLOCK(ass) (g_mutex_unlock (GST_ASS_RENDER_GET_LOCK (ass)))
100 #define GST_ASS_RENDER_WAIT(ass) (g_cond_wait (GST_ASS_RENDER_GET_COND (ass), GST_ASS_RENDER_GET_LOCK (ass)))
101 #define GST_ASS_RENDER_SIGNAL(ass) (g_cond_signal (GST_ASS_RENDER_GET_COND (ass)))
102 #define GST_ASS_RENDER_BROADCAST(ass)(g_cond_broadcast (GST_ASS_RENDER_GET_COND (ass)))
104 static void gst_ass_render_set_property (GObject * object, guint prop_id,
105 const GValue * value, GParamSpec * pspec);
106 static void gst_ass_render_get_property (GObject * object, guint prop_id,
107 GValue * value, GParamSpec * pspec);
109 static void gst_ass_render_finalize (GObject * object);
111 static GstStateChangeReturn gst_ass_render_change_state (GstElement * element,
112 GstStateChange transition);
114 #define gst_ass_render_parent_class parent_class
115 G_DEFINE_TYPE (GstAssRender, gst_ass_render, GST_TYPE_ELEMENT);
117 static GstCaps *gst_ass_render_get_videosink_caps (GstPad * pad,
118 GstAssRender * render, GstCaps * filter);
119 static GstCaps *gst_ass_render_get_src_caps (GstPad * pad,
120 GstAssRender * render, GstCaps * filter);
122 static gboolean gst_ass_render_setcaps_video (GstPad * pad,
123 GstAssRender * render, GstCaps * caps);
124 static gboolean gst_ass_render_setcaps_text (GstPad * pad,
125 GstAssRender * render, GstCaps * caps);
127 static GstFlowReturn gst_ass_render_chain_video (GstPad * pad,
128 GstObject * parent, GstBuffer * buf);
129 static GstFlowReturn gst_ass_render_chain_text (GstPad * pad,
130 GstObject * parent, GstBuffer * buf);
132 static gboolean gst_ass_render_event_video (GstPad * pad, GstObject * parent,
134 static gboolean gst_ass_render_event_text (GstPad * pad, GstObject * parent,
136 static gboolean gst_ass_render_event_src (GstPad * pad, GstObject * parent,
139 static gboolean gst_ass_render_query_video (GstPad * pad, GstObject * parent,
141 static gboolean gst_ass_render_query_src (GstPad * pad, GstObject * parent,
144 /* initialize the plugin's class */
146 gst_ass_render_class_init (GstAssRenderClass * klass)
148 GObjectClass *gobject_class = (GObjectClass *) klass;
149 GstElementClass *gstelement_class = (GstElementClass *) klass;
151 gobject_class->set_property = gst_ass_render_set_property;
152 gobject_class->get_property = gst_ass_render_get_property;
153 gobject_class->finalize = gst_ass_render_finalize;
155 g_object_class_install_property (gobject_class, PROP_ENABLE,
156 g_param_spec_boolean ("enable", "Enable",
157 "Enable rendering of subtitles", TRUE,
158 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
160 g_object_class_install_property (gobject_class, PROP_EMBEDDEDFONTS,
161 g_param_spec_boolean ("embeddedfonts", "Embedded Fonts",
162 "Extract and use fonts embedded in the stream", TRUE,
163 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
165 g_object_class_install_property (gobject_class, PROP_WAIT_TEXT,
166 g_param_spec_boolean ("wait-text", "Wait Text",
167 "Whether to wait for subtitles", TRUE,
168 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
170 gstelement_class->change_state =
171 GST_DEBUG_FUNCPTR (gst_ass_render_change_state);
173 gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
174 gst_element_class_add_static_pad_template (gstelement_class,
175 &video_sink_factory);
176 gst_element_class_add_static_pad_template (gstelement_class,
179 gst_element_class_set_static_metadata (gstelement_class, "ASS/SSA Render",
180 "Mixer/Video/Overlay/Subtitle",
181 "Renders ASS/SSA subtitles with libass",
182 "Benjamin Schmitz <vortex@wolpzone.de>, "
183 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
187 _libass_message_cb (gint level, const gchar * fmt, va_list args,
190 gchar *message = g_strdup_vprintf (fmt, args);
193 GST_CAT_ERROR_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
195 GST_CAT_WARNING_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
197 GST_CAT_INFO_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
199 GST_CAT_DEBUG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
201 GST_CAT_LOG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
207 gst_ass_render_init (GstAssRender * render)
209 GST_DEBUG_OBJECT (render, "init");
211 render->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
212 render->video_sinkpad =
213 gst_pad_new_from_static_template (&video_sink_factory, "video_sink");
214 render->text_sinkpad =
215 gst_pad_new_from_static_template (&text_sink_factory, "text_sink");
217 gst_pad_set_chain_function (render->video_sinkpad,
218 GST_DEBUG_FUNCPTR (gst_ass_render_chain_video));
219 gst_pad_set_chain_function (render->text_sinkpad,
220 GST_DEBUG_FUNCPTR (gst_ass_render_chain_text));
222 gst_pad_set_event_function (render->video_sinkpad,
223 GST_DEBUG_FUNCPTR (gst_ass_render_event_video));
224 gst_pad_set_event_function (render->text_sinkpad,
225 GST_DEBUG_FUNCPTR (gst_ass_render_event_text));
226 gst_pad_set_event_function (render->srcpad,
227 GST_DEBUG_FUNCPTR (gst_ass_render_event_src));
229 gst_pad_set_query_function (render->srcpad,
230 GST_DEBUG_FUNCPTR (gst_ass_render_query_src));
231 gst_pad_set_query_function (render->video_sinkpad,
232 GST_DEBUG_FUNCPTR (gst_ass_render_query_video));
234 GST_PAD_SET_PROXY_ALLOCATION (render->video_sinkpad);
236 gst_element_add_pad (GST_ELEMENT (render), render->srcpad);
237 gst_element_add_pad (GST_ELEMENT (render), render->video_sinkpad);
238 gst_element_add_pad (GST_ELEMENT (render), render->text_sinkpad);
240 gst_video_info_init (&render->info);
242 g_mutex_init (&render->lock);
243 g_cond_init (&render->cond);
245 render->renderer_init_ok = FALSE;
246 render->track_init_ok = FALSE;
247 render->enable = TRUE;
248 render->embeddedfonts = TRUE;
249 render->wait_text = FALSE;
251 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
252 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
254 g_mutex_init (&render->ass_mutex);
255 render->ass_library = ass_library_init ();
256 ass_set_message_cb (render->ass_library, _libass_message_cb, render);
257 ass_set_extract_fonts (render->ass_library, 1);
259 render->ass_renderer = ass_renderer_init (render->ass_library);
260 if (!render->ass_renderer) {
261 GST_WARNING_OBJECT (render, "cannot create renderer instance");
262 g_assert_not_reached ();
265 render->ass_track = NULL;
267 GST_DEBUG_OBJECT (render, "init complete");
271 gst_ass_render_finalize (GObject * object)
273 GstAssRender *render = GST_ASS_RENDER (object);
275 g_mutex_clear (&render->lock);
276 g_cond_clear (&render->cond);
278 if (render->ass_track) {
279 ass_free_track (render->ass_track);
282 if (render->ass_renderer) {
283 ass_renderer_done (render->ass_renderer);
286 if (render->ass_library) {
287 ass_library_done (render->ass_library);
290 g_mutex_clear (&render->ass_mutex);
292 G_OBJECT_CLASS (parent_class)->finalize (object);
296 gst_ass_render_reset_composition (GstAssRender * render)
298 if (render->composition) {
299 gst_video_overlay_composition_unref (render->composition);
300 render->composition = NULL;
305 gst_ass_render_set_property (GObject * object, guint prop_id,
306 const GValue * value, GParamSpec * pspec)
308 GstAssRender *render = GST_ASS_RENDER (object);
310 GST_ASS_RENDER_LOCK (render);
313 render->enable = g_value_get_boolean (value);
315 case PROP_EMBEDDEDFONTS:
316 render->embeddedfonts = g_value_get_boolean (value);
317 g_mutex_lock (&render->ass_mutex);
318 ass_set_extract_fonts (render->ass_library, render->embeddedfonts);
319 g_mutex_unlock (&render->ass_mutex);
322 render->wait_text = g_value_get_boolean (value);
325 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
328 GST_ASS_RENDER_UNLOCK (render);
332 gst_ass_render_get_property (GObject * object, guint prop_id,
333 GValue * value, GParamSpec * pspec)
335 GstAssRender *render = GST_ASS_RENDER (object);
337 GST_ASS_RENDER_LOCK (render);
340 g_value_set_boolean (value, render->enable);
342 case PROP_EMBEDDEDFONTS:
343 g_value_set_boolean (value, render->embeddedfonts);
346 g_value_set_boolean (value, render->wait_text);
349 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
352 GST_ASS_RENDER_UNLOCK (render);
355 /* Called with lock held */
357 gst_ass_render_pop_text (GstAssRender * render)
359 while (render->subtitle_pending) {
360 GST_DEBUG_OBJECT (render, "releasing text buffer %p",
361 render->subtitle_pending->data);
362 gst_buffer_unref (render->subtitle_pending->data);
363 render->subtitle_pending =
364 g_slist_delete_link (render->subtitle_pending,
365 render->subtitle_pending);
368 /* Let the text task know we used that buffer */
369 GST_ASS_RENDER_BROADCAST (render);
372 static GstStateChangeReturn
373 gst_ass_render_change_state (GstElement * element, GstStateChange transition)
375 GstAssRender *render = GST_ASS_RENDER (element);
376 GstStateChangeReturn ret;
378 switch (transition) {
379 case GST_STATE_CHANGE_PAUSED_TO_READY:
380 GST_ASS_RENDER_LOCK (render);
381 render->subtitle_flushing = TRUE;
382 render->video_flushing = TRUE;
383 gst_ass_render_pop_text (render);
384 GST_ASS_RENDER_UNLOCK (render);
390 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
391 if (ret == GST_STATE_CHANGE_FAILURE)
394 switch (transition) {
395 case GST_STATE_CHANGE_PAUSED_TO_READY:
396 g_mutex_lock (&render->ass_mutex);
397 if (render->ass_track)
398 ass_free_track (render->ass_track);
399 render->ass_track = NULL;
400 render->track_init_ok = FALSE;
401 render->renderer_init_ok = FALSE;
402 gst_ass_render_reset_composition (render);
403 g_mutex_unlock (&render->ass_mutex);
405 case GST_STATE_CHANGE_READY_TO_PAUSED:
406 GST_ASS_RENDER_LOCK (render);
407 render->subtitle_flushing = FALSE;
408 render->video_flushing = FALSE;
409 render->video_eos = FALSE;
410 render->subtitle_eos = FALSE;
411 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
412 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
413 GST_ASS_RENDER_UNLOCK (render);
424 gst_ass_render_query_src (GstPad * pad, GstObject * parent, GstQuery * query)
426 gboolean res = FALSE;
428 switch (GST_QUERY_TYPE (query)) {
431 GstCaps *filter, *caps;
433 gst_query_parse_caps (query, &filter);
434 caps = gst_ass_render_get_src_caps (pad, (GstAssRender *) parent, filter);
435 gst_query_set_caps_result (query, caps);
436 gst_caps_unref (caps);
441 res = gst_pad_query_default (pad, parent, query);
449 gst_ass_render_event_src (GstPad * pad, GstObject * parent, GstEvent * event)
451 GstAssRender *render = GST_ASS_RENDER (parent);
454 GST_DEBUG_OBJECT (render, "received src event %" GST_PTR_FORMAT, event);
456 /* FIXME: why not just always push it on text pad? */
457 if (render->track_init_ok) {
458 ret = gst_pad_push_event (render->video_sinkpad, gst_event_ref (event));
459 gst_pad_push_event (render->text_sinkpad, event);
461 ret = gst_pad_push_event (render->video_sinkpad, event);
468 * gst_ass_render_add_feature_and_intersect:
470 * Creates a new #GstCaps containing the (given caps +
471 * given caps feature) + (given caps intersected by the
474 * Returns: the new #GstCaps
477 gst_ass_render_add_feature_and_intersect (GstCaps * caps,
478 const gchar * feature, GstCaps * filter)
483 new_caps = gst_caps_copy (caps);
485 caps_size = gst_caps_get_size (new_caps);
486 for (i = 0; i < caps_size; i++) {
487 GstCapsFeatures *features = gst_caps_get_features (new_caps, i);
488 if (!gst_caps_features_is_any (features)) {
489 gst_caps_features_add (features, feature);
493 gst_caps_append (new_caps, gst_caps_intersect_full (caps,
494 filter, GST_CAPS_INTERSECT_FIRST));
500 * gst_ass_render_intersect_by_feature:
502 * Creates a new #GstCaps based on the following filtering rule.
504 * For each individual caps contained in given caps, if the
505 * caps uses the given caps feature, keep a version of the caps
506 * with the feature and an another one without. Otherwise, intersect
507 * the caps with the given filter.
509 * Returns: the new #GstCaps
512 gst_ass_render_intersect_by_feature (GstCaps * caps,
513 const gchar * feature, GstCaps * filter)
518 new_caps = gst_caps_new_empty ();
520 caps_size = gst_caps_get_size (caps);
521 for (i = 0; i < caps_size; i++) {
522 GstStructure *caps_structure = gst_caps_get_structure (caps, i);
523 GstCapsFeatures *caps_features =
524 gst_caps_features_copy (gst_caps_get_features (caps, i));
525 GstCaps *filtered_caps;
526 GstCaps *simple_caps =
527 gst_caps_new_full (gst_structure_copy (caps_structure), NULL);
528 gst_caps_set_features (simple_caps, 0, caps_features);
530 if (gst_caps_features_contains (caps_features, feature)) {
531 gst_caps_append (new_caps, gst_caps_copy (simple_caps));
533 gst_caps_features_remove (caps_features, feature);
534 filtered_caps = gst_caps_ref (simple_caps);
536 filtered_caps = gst_caps_intersect_full (simple_caps, filter,
537 GST_CAPS_INTERSECT_FIRST);
540 gst_caps_unref (simple_caps);
541 gst_caps_append (new_caps, filtered_caps);
548 gst_ass_render_get_videosink_caps (GstPad * pad, GstAssRender * render,
551 GstPad *srcpad = render->srcpad;
552 GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
555 /* filter caps + composition feature + filter caps
556 * filtered by the software caps. */
557 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
558 assrender_filter = gst_ass_render_add_feature_and_intersect (filter,
559 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
560 gst_caps_unref (sw_caps);
562 GST_DEBUG_OBJECT (render, "assrender filter %" GST_PTR_FORMAT,
566 peer_caps = gst_pad_peer_query_caps (srcpad, assrender_filter);
568 if (assrender_filter)
569 gst_caps_unref (assrender_filter);
573 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
575 if (gst_caps_is_any (peer_caps)) {
577 /* if peer returns ANY caps, return filtered src pad template caps */
578 caps = gst_caps_copy (gst_pad_get_pad_template_caps (srcpad));
581 /* duplicate caps which contains the composition into one version with
582 * the meta and one without. Filter the other caps by the software caps */
583 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
584 caps = gst_ass_render_intersect_by_feature (peer_caps,
585 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
586 gst_caps_unref (sw_caps);
589 gst_caps_unref (peer_caps);
592 /* no peer, our padtemplate is enough then */
593 caps = gst_pad_get_pad_template_caps (pad);
597 GstCaps *intersection = gst_caps_intersect_full (filter, caps,
598 GST_CAPS_INTERSECT_FIRST);
599 gst_caps_unref (caps);
603 GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
609 gst_ass_render_get_src_caps (GstPad * pad, GstAssRender * render,
612 GstPad *sinkpad = render->video_sinkpad;
613 GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
616 /* duplicate filter caps which contains the composition into one version
617 * with the meta and one without. Filter the other caps by the software
619 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
621 gst_ass_render_intersect_by_feature (filter,
622 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
623 gst_caps_unref (sw_caps);
626 peer_caps = gst_pad_peer_query_caps (sinkpad, assrender_filter);
628 if (assrender_filter)
629 gst_caps_unref (assrender_filter);
633 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
635 if (gst_caps_is_any (peer_caps)) {
637 /* if peer returns ANY caps, return filtered sink pad template caps */
638 caps = gst_caps_copy (gst_pad_get_pad_template_caps (sinkpad));
642 /* return upstream caps + composition feature + upstream caps
643 * filtered by the software caps. */
644 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
645 caps = gst_ass_render_add_feature_and_intersect (peer_caps,
646 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
647 gst_caps_unref (sw_caps);
650 gst_caps_unref (peer_caps);
653 /* no peer, our padtemplate is enough then */
654 caps = gst_pad_get_pad_template_caps (pad);
658 GstCaps *intersection;
661 gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
662 gst_caps_unref (caps);
666 GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
672 blit_bgra_premultiplied (GstAssRender * render, ASS_Image * ass_image,
673 guint8 * data, gint width, gint height, gint stride, gint x_off, gint y_off)
676 gint alpha, r, g, b, k;
684 memset (data, 0, stride * height);
687 dst_x = ass_image->dst_x + x_off;
688 dst_y = ass_image->dst_y + y_off;
690 w = MIN (ass_image->w, width - dst_x);
691 h = MIN (ass_image->h, height - dst_y);
692 if (w <= 0 || h <= 0)
695 alpha = 255 - (ass_image->color & 0xff);
699 r = ((ass_image->color) >> 24) & 0xff;
700 g = ((ass_image->color) >> 16) & 0xff;
701 b = ((ass_image->color) >> 8) & 0xff;
703 src = ass_image->bitmap;
704 dst = data + dst_y * stride + dst_x * 4;
706 src_skip = ass_image->stride - w;
707 dst_skip = stride - w * 4;
709 for (y = 0; y < h; y++) {
710 for (x = 0; x < w; x++) {
712 k = src[0] * alpha / 255;
715 dst[2] = (k * r) / 255;
716 dst[1] = (k * g) / 255;
717 dst[0] = (k * b) / 255;
719 dst[3] = k + (255 - k) * dst[3] / 255;
720 dst[2] = (k * r + (255 - k) * dst[2]) / 255;
721 dst[1] = (k * g + (255 - k) * dst[1]) / 255;
722 dst[0] = (k * b + (255 - k) * dst[0]) / 255;
733 ass_image = ass_image->next;
735 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter);
739 gst_ass_render_can_handle_caps (GstCaps * incaps)
741 static GstStaticCaps static_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
745 caps = gst_static_caps_get (&static_caps);
746 ret = gst_caps_is_subset (incaps, caps);
747 gst_caps_unref (caps);
753 gst_ass_render_update_render_size (GstAssRender * render)
755 gdouble video_aspect = (gdouble) render->info.width /
756 (gdouble) render->info.height;
757 gdouble window_aspect = (gdouble) render->window_width /
758 (gdouble) render->window_height;
760 /* render at the window size, with the video aspect ratio */
761 if (video_aspect >= window_aspect) {
762 render->ass_frame_width = render->window_width;
763 render->ass_frame_height = render->window_width / video_aspect;
765 render->ass_frame_width = render->window_height * video_aspect;
766 render->ass_frame_height = render->window_height;
771 gst_ass_render_negotiate (GstAssRender * render, GstCaps * caps)
773 gboolean upstream_has_meta = FALSE;
774 gboolean caps_has_meta = FALSE;
775 gboolean alloc_has_meta = FALSE;
776 gboolean attach = FALSE;
780 GstCaps *overlay_caps;
784 GST_DEBUG_OBJECT (render, "performing negotiation");
786 /* Clear cached composition */
787 gst_ass_render_reset_composition (render);
789 /* Clear any pending reconfigure flag */
790 gst_pad_check_reconfigure (render->srcpad);
793 caps = gst_pad_get_current_caps (render->video_sinkpad);
797 if (!caps || gst_caps_is_empty (caps))
800 /* Check if upstream caps have meta */
801 if ((f = gst_caps_get_features (caps, 0))) {
802 upstream_has_meta = gst_caps_features_contains (f,
803 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
806 /* Initialize dimensions */
807 width = render->info.width;
808 height = render->info.height;
810 if (upstream_has_meta) {
811 overlay_caps = gst_caps_ref (caps);
815 /* BaseTransform requires caps for the allocation query to work */
816 overlay_caps = gst_caps_copy (caps);
817 f = gst_caps_get_features (overlay_caps, 0);
818 gst_caps_features_add (f,
819 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
821 /* Then check if downstream accept overlay composition in caps */
822 /* FIXME: We should probably check if downstream *prefers* the
823 * overlay meta, and only enforce usage of it if we can't handle
824 * the format ourselves and thus would have to drop the overlays.
825 * Otherwise we should prefer what downstream wants here.
827 peercaps = gst_pad_peer_query_caps (render->srcpad, NULL);
828 caps_has_meta = gst_caps_can_intersect (peercaps, overlay_caps);
829 gst_caps_unref (peercaps);
831 GST_DEBUG ("caps have overlay meta %d", caps_has_meta);
834 if (upstream_has_meta || caps_has_meta) {
835 /* Send caps immediatly, it's needed by GstBaseTransform to get a reply
836 * from allocation query */
837 ret = gst_pad_set_caps (render->srcpad, overlay_caps);
839 /* First check if the allocation meta has compositon */
840 query = gst_query_new_allocation (overlay_caps, FALSE);
842 if (!gst_pad_peer_query (render->srcpad, query)) {
843 /* no problem, we use the query defaults */
844 GST_DEBUG_OBJECT (render, "ALLOCATION query failed");
846 /* In case we were flushing, mark reconfigure and fail this method,
847 * will make it retry */
848 if (render->video_flushing)
852 alloc_has_meta = gst_query_find_allocation_meta (query,
853 GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, &alloc_index);
855 GST_DEBUG ("sink alloc has overlay meta %d", alloc_has_meta);
857 if (alloc_has_meta) {
858 const GstStructure *params;
860 gst_query_parse_nth_allocation_meta (query, alloc_index, ¶ms);
862 if (gst_structure_get (params, "width", G_TYPE_UINT, &width,
863 "height", G_TYPE_UINT, &height, NULL)) {
864 GST_DEBUG ("received window size: %dx%d", width, height);
865 g_assert (width != 0 && height != 0);
870 gst_query_unref (query);
873 /* Update render size if needed */
874 render->window_width = width;
875 render->window_height = height;
876 gst_ass_render_update_render_size (render);
878 /* For backward compatbility, we will prefer bliting if downstream
879 * allocation does not support the meta. In other case we will prefer
880 * attaching, and will fail the negotiation in the unlikely case we are
881 * force to blit, but format isn't supported. */
883 if (upstream_has_meta) {
885 } else if (caps_has_meta) {
886 if (alloc_has_meta) {
889 /* Don't attach unless we cannot handle the format */
890 attach = !gst_ass_render_can_handle_caps (caps);
893 ret = gst_ass_render_can_handle_caps (caps);
896 /* If we attach, then pick the overlay caps */
898 GST_DEBUG_OBJECT (render, "Using caps %" GST_PTR_FORMAT, overlay_caps);
899 /* Caps where already sent */
901 GST_DEBUG_OBJECT (render, "Using caps %" GST_PTR_FORMAT, caps);
902 ret = gst_pad_set_caps (render->srcpad, caps);
905 render->attach_compo_to_buffer = attach;
908 GST_DEBUG_OBJECT (render, "negotiation failed, schedule reconfigure");
909 gst_pad_mark_reconfigure (render->srcpad);
911 g_mutex_lock (&render->ass_mutex);
912 ass_set_frame_size (render->ass_renderer,
913 render->ass_frame_width, render->ass_frame_height);
914 ass_set_storage_size (render->ass_renderer,
915 render->info.width, render->info.height);
916 ass_set_pixel_aspect (render->ass_renderer,
917 (gdouble) render->info.par_n / (gdouble) render->info.par_d);
918 ass_set_font_scale (render->ass_renderer, 1.0);
919 ass_set_hinting (render->ass_renderer, ASS_HINTING_LIGHT);
921 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif", 1, NULL, 1);
922 ass_set_fonts (render->ass_renderer, NULL, "Sans", 1, NULL, 1);
923 ass_set_margins (render->ass_renderer, 0, 0, 0, 0);
924 ass_set_use_margins (render->ass_renderer, 0);
925 g_mutex_unlock (&render->ass_mutex);
927 render->renderer_init_ok = TRUE;
929 GST_DEBUG_OBJECT (render, "ass renderer setup complete");
932 gst_caps_unref (overlay_caps);
933 gst_caps_unref (caps);
936 gst_pad_mark_reconfigure (render->srcpad);
943 gst_caps_unref (caps);
944 gst_pad_mark_reconfigure (render->srcpad);
950 gst_ass_render_setcaps_video (GstPad * pad, GstAssRender * render,
956 if (!gst_video_info_from_caps (&info, caps))
961 ret = gst_ass_render_negotiate (render, caps);
963 GST_ASS_RENDER_LOCK (render);
965 if (!render->attach_compo_to_buffer && !gst_ass_render_can_handle_caps (caps)) {
966 GST_DEBUG_OBJECT (render, "unsupported caps %" GST_PTR_FORMAT, caps);
969 GST_ASS_RENDER_UNLOCK (render);
976 GST_ERROR_OBJECT (render, "could not parse caps");
982 gst_ass_render_setcaps_text (GstPad * pad, GstAssRender * render,
985 GstStructure *structure;
989 gboolean ret = FALSE;
991 structure = gst_caps_get_structure (caps, 0);
993 GST_DEBUG_OBJECT (render, "text pad linked with caps: %" GST_PTR_FORMAT,
996 value = gst_structure_get_value (structure, "codec_data");
998 g_mutex_lock (&render->ass_mutex);
1000 priv = gst_value_get_buffer (value);
1001 g_return_val_if_fail (priv != NULL, FALSE);
1003 gst_buffer_map (priv, &map, GST_MAP_READ);
1005 if (!render->ass_track)
1006 render->ass_track = ass_new_track (render->ass_library);
1008 ass_process_codec_private (render->ass_track, (char *) map.data, map.size);
1010 gst_buffer_unmap (priv, &map);
1012 GST_DEBUG_OBJECT (render, "ass track created");
1014 render->track_init_ok = TRUE;
1017 } else if (!render->ass_track) {
1018 render->ass_track = ass_new_track (render->ass_library);
1020 render->track_init_ok = TRUE;
1024 g_mutex_unlock (&render->ass_mutex);
1031 gst_ass_render_process_text (GstAssRender * render, GstBuffer * buffer,
1032 GstClockTime running_time, GstClockTime duration)
1035 gdouble pts_start, pts_end;
1037 pts_start = running_time;
1038 pts_start /= GST_MSECOND;
1040 pts_end /= GST_MSECOND;
1042 GST_DEBUG_OBJECT (render,
1043 "Processing subtitles with running time %" GST_TIME_FORMAT
1044 " and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time),
1045 GST_TIME_ARGS (duration));
1047 gst_buffer_map (buffer, &map, GST_MAP_READ);
1049 g_mutex_lock (&render->ass_mutex);
1050 ass_process_chunk (render->ass_track, (gchar *) map.data, map.size,
1051 pts_start, pts_end);
1052 g_mutex_unlock (&render->ass_mutex);
1054 gst_buffer_unmap (buffer, &map);
1057 static GstVideoOverlayComposition *
1058 gst_ass_render_composite_overlay (GstAssRender * render, ASS_Image * images)
1060 GstVideoOverlayComposition *composition;
1061 GstVideoOverlayRectangle *rectangle;
1062 GstVideoMeta *vmeta;
1070 gdouble hscale, vscale;
1078 /* find bounding box of all images, to limit the overlay rectangle size */
1079 for (image = images; image; image = image->next) {
1080 if (min_x > image->dst_x)
1081 min_x = image->dst_x;
1082 if (min_y > image->dst_y)
1083 min_y = image->dst_y;
1084 if (max_x < image->dst_x + image->w)
1085 max_x = image->dst_x + image->w;
1086 if (max_y < image->dst_y + image->h)
1087 max_y = image->dst_y + image->h;
1090 width = MIN (max_x - min_x, render->ass_frame_width);
1091 height = MIN (max_y - min_y, render->ass_frame_height);
1093 GST_DEBUG_OBJECT (render, "render overlay rectangle %dx%d%+d%+d",
1094 width, height, min_x, min_y);
1096 buffer = gst_buffer_new_and_alloc (4 * width * height);
1098 GST_ERROR_OBJECT (render, "Failed to allocate overlay buffer");
1102 vmeta = gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE,
1103 GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB, width, height);
1105 if (!gst_video_meta_map (vmeta, 0, &map, &data, &stride, GST_MAP_READWRITE)) {
1106 GST_ERROR_OBJECT (render, "Failed to map overlay buffer");
1107 gst_buffer_unref (buffer);
1111 blit_bgra_premultiplied (render, images, data, width, height, stride,
1113 gst_video_meta_unmap (vmeta, 0, &map);
1115 hscale = (gdouble) render->info.width / (gdouble) render->ass_frame_width;
1116 vscale = (gdouble) render->info.height / (gdouble) render->ass_frame_height;
1118 rectangle = gst_video_overlay_rectangle_new_raw (buffer,
1119 hscale * min_x, vscale * min_y, hscale * width, vscale * height,
1120 GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
1122 gst_buffer_unref (buffer);
1124 composition = gst_video_overlay_composition_new (rectangle);
1125 gst_video_overlay_rectangle_unref (rectangle);
1131 gst_ass_render_push_frame (GstAssRender * render, GstBuffer * video_frame)
1133 GstVideoFrame frame;
1135 if (!render->composition)
1138 video_frame = gst_buffer_make_writable (video_frame);
1140 if (render->attach_compo_to_buffer) {
1141 gst_buffer_add_video_overlay_composition_meta (video_frame,
1142 render->composition);
1146 if (!gst_video_frame_map (&frame, &render->info, video_frame,
1147 GST_MAP_READWRITE)) {
1148 GST_WARNING_OBJECT (render, "failed to map video frame for blending");
1152 gst_video_overlay_composition_blend (render->composition, &frame);
1153 gst_video_frame_unmap (&frame);
1156 return gst_pad_push (render->srcpad, video_frame);
1159 static GstFlowReturn
1160 gst_ass_render_chain_video (GstPad * pad, GstObject * parent,
1163 GstAssRender *render = GST_ASS_RENDER (parent);
1164 GstFlowReturn ret = GST_FLOW_OK;
1165 gboolean in_seg = FALSE;
1166 guint64 start, stop, clip_start = 0, clip_stop = 0;
1167 ASS_Image *ass_image;
1170 if (gst_pad_check_reconfigure (render->srcpad)) {
1171 if (!gst_ass_render_negotiate (render, NULL)) {
1172 gst_pad_mark_reconfigure (render->srcpad);
1173 if (GST_PAD_IS_FLUSHING (render->srcpad))
1176 goto not_negotiated;
1180 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1181 goto missing_timestamp;
1183 /* ignore buffers that are outside of the current segment */
1184 start = GST_BUFFER_TIMESTAMP (buffer);
1186 if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
1187 stop = GST_CLOCK_TIME_NONE;
1189 stop = start + GST_BUFFER_DURATION (buffer);
1192 /* segment_clip() will adjust start unconditionally to segment_start if
1193 * no stop time is provided, so handle this ourselves */
1194 if (stop == GST_CLOCK_TIME_NONE && start < render->video_segment.start)
1195 goto out_of_segment;
1198 gst_segment_clip (&render->video_segment, GST_FORMAT_TIME, start, stop,
1199 &clip_start, &clip_stop);
1202 goto out_of_segment;
1204 /* if the buffer is only partially in the segment, fix up stamps */
1205 if (clip_start != start || (stop != -1 && clip_stop != stop)) {
1206 GST_DEBUG_OBJECT (render, "clipping buffer timestamp/duration to segment");
1207 buffer = gst_buffer_make_writable (buffer);
1208 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1210 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1213 /* now, after we've done the clipping, fix up end time if there's no
1214 * duration (we only use those estimated values internally though, we
1215 * don't want to set bogus values on the buffer itself) */
1217 if (render->info.fps_n && render->info.fps_d) {
1218 GST_DEBUG_OBJECT (render, "estimating duration based on framerate");
1220 start + gst_util_uint64_scale_int (GST_SECOND, render->info.fps_d,
1221 render->info.fps_n);
1223 GST_WARNING_OBJECT (render, "no duration, assuming minimal duration");
1224 stop = start + 1; /* we need to assume some interval */
1230 GST_ASS_RENDER_LOCK (render);
1232 if (render->video_flushing)
1235 if (render->video_eos)
1238 if (render->renderer_init_ok && render->track_init_ok && render->enable) {
1239 /* Text pad linked, check if we have a text buffer queued */
1240 if (render->subtitle_pending) {
1241 GSList *subtitle_pending = render->subtitle_pending;
1242 GstClockTime text_start = GST_CLOCK_TIME_NONE;
1243 GstClockTime text_end = GST_CLOCK_TIME_NONE;
1244 GstClockTime text_running_time = GST_CLOCK_TIME_NONE;
1245 GstClockTime text_running_time_end = GST_CLOCK_TIME_NONE;
1246 GstClockTime vid_running_time, vid_running_time_end;
1251 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1253 vid_running_time_end =
1254 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1257 GST_LOG_OBJECT (render, "V : %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1258 GST_TIME_ARGS (vid_running_time),
1259 GST_TIME_ARGS (vid_running_time_end));
1261 if (subtitle_pending == NULL)
1262 GST_LOG_OBJECT (render, "T : no pending subtitles");
1264 while (subtitle_pending != NULL) {
1267 /* if the text buffer isn't stamped right, pop it off the
1268 * queue and display it for the current video frame only */
1269 if (!GST_BUFFER_TIMESTAMP_IS_VALID (subtitle_pending->data) ||
1270 !GST_BUFFER_DURATION_IS_VALID (subtitle_pending->data)) {
1271 GSList *bad = subtitle_pending;
1272 GST_WARNING_OBJECT (render,
1273 "Got text buffer with invalid timestamp or duration");
1274 gst_buffer_unref (bad->data);
1275 bad = subtitle_pending->next;
1276 render->subtitle_pending =
1277 g_slist_delete_link (render->subtitle_pending, bad);
1278 GST_ASS_RENDER_BROADCAST (render);
1282 text_start = GST_BUFFER_TIMESTAMP (subtitle_pending->data);
1283 text_end = text_start + GST_BUFFER_DURATION (subtitle_pending->data);
1285 /* If timestamp and duration are valid */
1287 gst_segment_to_running_time (&render->subtitle_segment,
1288 GST_FORMAT_TIME, text_start);
1289 text_running_time_end =
1290 gst_segment_to_running_time (&render->subtitle_segment,
1291 GST_FORMAT_TIME, text_end);
1293 GST_LOG_OBJECT (render, "T%u: %" GST_TIME_FORMAT " - "
1294 "%" GST_TIME_FORMAT, n, GST_TIME_ARGS (text_running_time),
1295 GST_TIME_ARGS (text_running_time_end));
1298 if (text_running_time_end <= vid_running_time) {
1299 GSList *old = subtitle_pending;
1300 GST_DEBUG_OBJECT (render, "text buffer too old, popping");
1301 gst_buffer_unref (old->data);
1302 subtitle_pending = old->next;
1303 render->subtitle_pending =
1304 g_slist_delete_link (render->subtitle_pending, old);
1305 GST_ASS_RENDER_BROADCAST (render);
1309 if (render->need_process) {
1310 GST_DEBUG_OBJECT (render, "process text buffer");
1311 gst_ass_render_process_text (render, subtitle_pending->data,
1312 text_running_time, text_running_time_end - text_running_time);
1315 subtitle_pending = subtitle_pending->next;
1318 if (render->need_process) {
1319 render->need_process = FALSE;
1322 GST_ASS_RENDER_UNLOCK (render);
1324 /* libass needs timestamps in ms */
1325 timestamp = vid_running_time / GST_MSECOND;
1327 g_mutex_lock (&render->ass_mutex);
1328 ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
1329 timestamp, &changed);
1330 g_mutex_unlock (&render->ass_mutex);
1332 if ((!ass_image || changed) && render->composition) {
1333 GST_DEBUG_OBJECT (render, "release overlay (changed %d)", changed);
1334 gst_ass_render_reset_composition (render);
1337 if (ass_image != NULL) {
1338 if (!render->composition)
1339 render->composition = gst_ass_render_composite_overlay (render,
1342 GST_DEBUG_OBJECT (render, "nothing to render right now");
1345 /* Push the video frame */
1346 ret = gst_ass_render_push_frame (render, buffer);
1348 subtitle_pending = render->subtitle_pending;
1349 while (subtitle_pending != NULL) {
1351 text_start = GST_BUFFER_TIMESTAMP (subtitle_pending->data);
1352 text_end = text_start + GST_BUFFER_DURATION (subtitle_pending->data);
1354 text_running_time_end =
1355 gst_segment_to_running_time (&render->video_segment,
1356 GST_FORMAT_TIME, text_end);
1358 if (text_running_time_end <= vid_running_time_end) {
1359 GSList *old = subtitle_pending;
1360 GST_DEBUG_OBJECT (render, "finished text buffer, popping");
1361 GST_ASS_RENDER_LOCK (render);
1362 gst_buffer_unref (old->data);
1363 subtitle_pending = old->next;
1364 render->subtitle_pending =
1365 g_slist_delete_link (render->subtitle_pending, old);
1366 GST_ASS_RENDER_BROADCAST (render);
1367 GST_ASS_RENDER_UNLOCK (render);
1368 render->need_process = TRUE;
1369 if (g_slist_length (render->subtitle_pending) == 0) {
1370 render->need_process = FALSE;
1373 subtitle_pending = subtitle_pending->next;
1377 gboolean wait_for_text_buf = TRUE;
1379 if (render->subtitle_eos)
1380 wait_for_text_buf = FALSE;
1382 if (!render->wait_text)
1383 wait_for_text_buf = FALSE;
1385 /* Text pad linked, but no text buffer available - what now? */
1386 if (render->subtitle_segment.format == GST_FORMAT_TIME) {
1387 GstClockTime text_start_running_time, text_last_stop_running_time;
1388 GstClockTime vid_running_time;
1391 gst_segment_to_running_time (&render->video_segment,
1392 GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer));
1393 text_start_running_time =
1394 gst_segment_to_running_time (&render->subtitle_segment,
1395 GST_FORMAT_TIME, render->subtitle_segment.start);
1396 text_last_stop_running_time =
1397 gst_segment_to_running_time (&render->subtitle_segment,
1398 GST_FORMAT_TIME, render->subtitle_segment.position);
1400 if ((GST_CLOCK_TIME_IS_VALID (text_start_running_time) &&
1401 vid_running_time < text_start_running_time) ||
1402 (GST_CLOCK_TIME_IS_VALID (text_last_stop_running_time) &&
1403 vid_running_time < text_last_stop_running_time)) {
1404 wait_for_text_buf = FALSE;
1408 if (wait_for_text_buf) {
1409 GST_DEBUG_OBJECT (render, "no text buffer, need to wait for one");
1410 GST_ASS_RENDER_WAIT (render);
1411 GST_DEBUG_OBJECT (render, "resuming");
1412 GST_ASS_RENDER_UNLOCK (render);
1413 goto wait_for_text_buf;
1415 GST_ASS_RENDER_UNLOCK (render);
1416 GST_LOG_OBJECT (render, "no need to wait for a text buffer");
1417 ret = gst_pad_push (render->srcpad, buffer);
1421 GST_LOG_OBJECT (render, "rendering disabled, doing buffer passthrough");
1423 GST_ASS_RENDER_UNLOCK (render);
1424 ret = gst_pad_push (render->srcpad, buffer);
1428 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p ret=%d", buffer, ret);
1430 /* Update last_stop */
1431 render->video_segment.position = clip_start;
1437 GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
1438 gst_buffer_unref (buffer);
1443 GST_ASS_RENDER_UNLOCK (render);
1444 GST_DEBUG_OBJECT (render, "not negotiated");
1445 gst_buffer_unref (buffer);
1446 return GST_FLOW_NOT_NEGOTIATED;
1450 GST_ASS_RENDER_UNLOCK (render);
1451 GST_DEBUG_OBJECT (render, "flushing, discarding buffer");
1452 gst_buffer_unref (buffer);
1453 return GST_FLOW_FLUSHING;
1457 GST_ASS_RENDER_UNLOCK (render);
1458 GST_DEBUG_OBJECT (render, "eos, discarding buffer");
1459 gst_buffer_unref (buffer);
1460 return GST_FLOW_EOS;
1464 GST_DEBUG_OBJECT (render, "buffer out of segment, discarding");
1465 gst_buffer_unref (buffer);
1470 static GstFlowReturn
1471 gst_ass_render_chain_text (GstPad * pad, GstObject * parent, GstBuffer * buffer)
1473 GstFlowReturn ret = GST_FLOW_OK;
1474 GstAssRender *render = GST_ASS_RENDER (parent);
1475 gboolean in_seg = FALSE;
1476 guint64 clip_start = 0, clip_stop = 0;
1478 GST_DEBUG_OBJECT (render, "entering chain for buffer %p", buffer);
1480 GST_ASS_RENDER_LOCK (render);
1482 if (render->subtitle_flushing) {
1483 GST_ASS_RENDER_UNLOCK (render);
1484 ret = GST_FLOW_FLUSHING;
1485 GST_LOG_OBJECT (render, "text flushing");
1489 if (render->subtitle_eos) {
1490 GST_ASS_RENDER_UNLOCK (render);
1492 GST_LOG_OBJECT (render, "text EOS");
1496 if (G_LIKELY (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))) {
1499 if (G_LIKELY (GST_BUFFER_DURATION_IS_VALID (buffer)))
1500 stop = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
1502 stop = GST_CLOCK_TIME_NONE;
1504 in_seg = gst_segment_clip (&render->subtitle_segment, GST_FORMAT_TIME,
1505 GST_BUFFER_TIMESTAMP (buffer), stop, &clip_start, &clip_stop);
1511 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1512 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1513 else if (GST_BUFFER_DURATION_IS_VALID (buffer))
1514 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1516 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1517 render->subtitle_segment.position = clip_start;
1519 GST_DEBUG_OBJECT (render,
1520 "New buffer arrived for timestamp %" GST_TIME_FORMAT,
1521 GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
1522 render->subtitle_pending = g_slist_append (render->subtitle_pending,
1523 gst_buffer_ref (buffer));
1524 render->need_process = TRUE;
1526 /* in case the video chain is waiting for a text buffer, wake it up */
1527 GST_ASS_RENDER_BROADCAST (render);
1530 GST_ASS_RENDER_UNLOCK (render);
1533 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p", buffer);
1535 gst_buffer_unref (buffer);
1540 gst_ass_render_handle_tag_sample (GstAssRender * render, GstSample * sample)
1542 static const gchar *mimetypes[] = {
1543 "application/x-font-ttf",
1544 "application/x-font-otf",
1545 "application/x-truetype-font"
1547 static const gchar *extensions[] = {
1553 const GstStructure *structure;
1554 gboolean valid_mimetype, valid_extension;
1556 const gchar *filename;
1558 buf = gst_sample_get_buffer (sample);
1559 structure = gst_sample_get_info (sample);
1561 if (!buf || !structure)
1564 valid_mimetype = FALSE;
1565 valid_extension = FALSE;
1567 for (i = 0; i < G_N_ELEMENTS (mimetypes); i++) {
1568 if (gst_structure_has_name (structure, mimetypes[i])) {
1569 valid_mimetype = TRUE;
1574 filename = gst_structure_get_string (structure, "filename");
1578 if (!valid_mimetype) {
1579 guint len = strlen (filename);
1580 const gchar *extension = filename + len - 4;
1581 for (i = 0; i < G_N_ELEMENTS (extensions); i++) {
1582 if (g_ascii_strcasecmp (extension, extensions[i]) == 0) {
1583 valid_extension = TRUE;
1589 if (valid_mimetype || valid_extension) {
1592 g_mutex_lock (&render->ass_mutex);
1593 gst_buffer_map (buf, &map, GST_MAP_READ);
1594 ass_add_font (render->ass_library, (gchar *) filename,
1595 (gchar *) map.data, map.size);
1596 gst_buffer_unmap (buf, &map);
1597 GST_DEBUG_OBJECT (render, "registered new font %s", filename);
1598 g_mutex_unlock (&render->ass_mutex);
1603 gst_ass_render_handle_tags (GstAssRender * render, GstTagList * taglist)
1610 tag_size = gst_tag_list_get_tag_size (taglist, GST_TAG_ATTACHMENT);
1611 if (tag_size > 0 && render->embeddedfonts) {
1615 GST_DEBUG_OBJECT (render, "TAG event has attachments");
1617 for (index = 0; index < tag_size; index++) {
1618 if (gst_tag_list_get_sample_index (taglist, GST_TAG_ATTACHMENT, index,
1620 gst_ass_render_handle_tag_sample (render, sample);
1621 gst_sample_unref (sample);
1628 gst_ass_render_event_video (GstPad * pad, GstObject * parent, GstEvent * event)
1630 gboolean ret = FALSE;
1631 GstAssRender *render = GST_ASS_RENDER (parent);
1633 GST_DEBUG_OBJECT (pad, "received video event %" GST_PTR_FORMAT, event);
1635 switch (GST_EVENT_TYPE (event)) {
1636 case GST_EVENT_CAPS:
1640 gst_event_parse_caps (event, &caps);
1641 ret = gst_ass_render_setcaps_video (pad, render, caps);
1642 gst_event_unref (event);
1645 case GST_EVENT_SEGMENT:
1649 GST_DEBUG_OBJECT (render, "received new segment");
1651 gst_event_copy_segment (event, &segment);
1653 if (segment.format == GST_FORMAT_TIME) {
1654 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT now: %" GST_SEGMENT_FORMAT,
1655 &render->video_segment);
1657 render->video_segment = segment;
1659 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT after: %" GST_SEGMENT_FORMAT,
1660 &render->video_segment);
1661 ret = gst_pad_event_default (pad, parent, event);
1663 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1664 ("received non-TIME newsegment event on video input"));
1666 gst_event_unref (event);
1672 GstTagList *taglist = NULL;
1674 /* tag events may contain attachments which might be fonts */
1675 GST_DEBUG_OBJECT (render, "got TAG event");
1677 gst_event_parse_tag (event, &taglist);
1678 gst_ass_render_handle_tags (render, taglist);
1679 ret = gst_pad_event_default (pad, parent, event);
1683 GST_ASS_RENDER_LOCK (render);
1684 GST_INFO_OBJECT (render, "video EOS");
1685 render->video_eos = TRUE;
1686 GST_ASS_RENDER_UNLOCK (render);
1687 ret = gst_pad_event_default (pad, parent, event);
1689 case GST_EVENT_FLUSH_START:
1690 GST_ASS_RENDER_LOCK (render);
1691 GST_INFO_OBJECT (render, "video flush start");
1692 render->video_flushing = TRUE;
1693 GST_ASS_RENDER_BROADCAST (render);
1694 GST_ASS_RENDER_UNLOCK (render);
1695 ret = gst_pad_event_default (pad, parent, event);
1697 case GST_EVENT_FLUSH_STOP:
1698 GST_ASS_RENDER_LOCK (render);
1699 GST_INFO_OBJECT (render, "video flush stop");
1700 render->video_flushing = FALSE;
1701 render->video_eos = FALSE;
1702 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
1703 GST_ASS_RENDER_UNLOCK (render);
1704 ret = gst_pad_event_default (pad, parent, event);
1707 ret = gst_pad_event_default (pad, parent, event);
1715 gst_ass_render_query_video (GstPad * pad, GstObject * parent, GstQuery * query)
1717 gboolean res = FALSE;
1719 switch (GST_QUERY_TYPE (query)) {
1720 case GST_QUERY_CAPS:
1722 GstCaps *filter, *caps;
1724 gst_query_parse_caps (query, &filter);
1726 gst_ass_render_get_videosink_caps (pad, (GstAssRender *) parent,
1728 gst_query_set_caps_result (query, caps);
1729 gst_caps_unref (caps);
1734 res = gst_pad_query_default (pad, parent, query);
1742 gst_ass_render_event_text (GstPad * pad, GstObject * parent, GstEvent * event)
1744 gboolean ret = FALSE;
1745 GstAssRender *render = GST_ASS_RENDER (parent);
1747 GST_DEBUG_OBJECT (pad, "received text event %" GST_PTR_FORMAT, event);
1749 switch (GST_EVENT_TYPE (event)) {
1750 case GST_EVENT_CAPS:
1754 gst_event_parse_caps (event, &caps);
1755 ret = gst_ass_render_setcaps_text (pad, render, caps);
1756 gst_event_unref (event);
1759 case GST_EVENT_SEGMENT:
1763 GST_ASS_RENDER_LOCK (render);
1764 render->subtitle_eos = FALSE;
1765 GST_ASS_RENDER_UNLOCK (render);
1767 gst_event_copy_segment (event, &segment);
1769 GST_ASS_RENDER_LOCK (render);
1770 if (segment.format == GST_FORMAT_TIME) {
1771 GST_DEBUG_OBJECT (render, "TEXT SEGMENT now: %" GST_SEGMENT_FORMAT,
1772 &render->subtitle_segment);
1774 render->subtitle_segment = segment;
1776 GST_DEBUG_OBJECT (render,
1777 "TEXT SEGMENT after: %" GST_SEGMENT_FORMAT,
1778 &render->subtitle_segment);
1780 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1781 ("received non-TIME newsegment event on subtitle input"));
1784 gst_event_unref (event);
1787 /* wake up the video chain, it might be waiting for a text buffer or
1788 * a text segment update */
1789 GST_ASS_RENDER_BROADCAST (render);
1790 GST_ASS_RENDER_UNLOCK (render);
1793 case GST_EVENT_GAP:{
1794 GstClockTime start, duration;
1796 gst_event_parse_gap (event, &start, &duration);
1797 if (GST_CLOCK_TIME_IS_VALID (duration))
1799 /* we do not expect another buffer until after gap,
1800 * so that is our position now */
1801 GST_ASS_RENDER_LOCK (render);
1802 render->subtitle_segment.position = start;
1804 /* wake up the video chain, it might be waiting for a text buffer or
1805 * a text segment update */
1806 GST_ASS_RENDER_BROADCAST (render);
1807 GST_ASS_RENDER_UNLOCK (render);
1809 gst_event_unref (event);
1813 case GST_EVENT_FLUSH_STOP:
1814 g_mutex_lock (&render->ass_mutex);
1815 if (render->ass_track) {
1816 ass_flush_events (render->ass_track);
1818 g_mutex_unlock (&render->ass_mutex);
1819 GST_ASS_RENDER_LOCK (render);
1820 GST_INFO_OBJECT (render, "text flush stop");
1821 render->subtitle_flushing = FALSE;
1822 render->subtitle_eos = FALSE;
1823 gst_ass_render_pop_text (render);
1824 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
1825 GST_ASS_RENDER_UNLOCK (render);
1826 gst_event_unref (event);
1829 case GST_EVENT_FLUSH_START:
1830 GST_DEBUG_OBJECT (render, "text flush start");
1831 GST_ASS_RENDER_LOCK (render);
1832 render->subtitle_flushing = TRUE;
1833 GST_ASS_RENDER_BROADCAST (render);
1834 GST_ASS_RENDER_UNLOCK (render);
1835 gst_event_unref (event);
1839 GST_ASS_RENDER_LOCK (render);
1840 render->subtitle_eos = TRUE;
1841 GST_INFO_OBJECT (render, "text EOS");
1842 /* wake up the video chain, it might be waiting for a text buffer or
1843 * a text segment update */
1844 GST_ASS_RENDER_BROADCAST (render);
1845 GST_ASS_RENDER_UNLOCK (render);
1846 gst_event_unref (event);
1851 GstTagList *taglist = NULL;
1853 /* tag events may contain attachments which might be fonts */
1854 GST_DEBUG_OBJECT (render, "got TAG event");
1856 gst_event_parse_tag (event, &taglist);
1857 gst_ass_render_handle_tags (render, taglist);
1858 ret = gst_pad_event_default (pad, parent, event);
1862 ret = gst_pad_event_default (pad, parent, event);
1870 plugin_init (GstPlugin * plugin)
1872 GST_DEBUG_CATEGORY_INIT (gst_ass_render_debug, "assrender",
1873 0, "ASS/SSA subtitle renderer");
1874 GST_DEBUG_CATEGORY_INIT (gst_ass_render_lib_debug, "assrender_library",
1875 0, "ASS/SSA subtitle renderer library");
1877 return gst_element_register (plugin, "assrender",
1878 GST_RANK_PRIMARY, GST_TYPE_ASS_RENDER);
1881 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1884 "ASS/SSA subtitle renderer",
1885 plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)