2 * Copyright (c) 2008 Benjamin Schmitz <vortex@wolpzone.de>
3 * Copyright (c) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
22 * SECTION:element-assrender
24 * Renders timestamped SSA/ASS subtitles on top of a video stream.
27 * <title>Example launch line</title>
29 * gst-launch -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mp3parse ! mad ! audioconvert ! autoaudiosink d. ! queue ! ffdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink
30 * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
38 #include <gst/video/gstvideometa.h>
40 #include "gstassrender.h"
44 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_debug);
45 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_lib_debug);
46 #define GST_CAT_DEFAULT gst_ass_render_debug
48 /* Filter signals and props */
62 /* FIXME: video-blend.c doesn't support formats with more than 8 bit per
63 * component (which get unpacked into ARGB64 or AYUV64) yet, such as:
64 * v210, v216, UYVP, GRAY16_LE, GRAY16_BE */
65 #define FORMATS "{ BGRx, RGBx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, \
66 I420, YV12, AYUV, YUY2, UYVY, v308, Y41B, Y42B, Y444, \
67 NV12, NV21, A420, YUV9, YVU9, IYU1, GRAY8 }"
69 #define ASSRENDER_CAPS GST_VIDEO_CAPS_MAKE(FORMATS)
71 #define ASSRENDER_ALL_CAPS ASSRENDER_CAPS ";" \
72 GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
74 static GstStaticCaps sw_template_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
76 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
79 GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
82 static GstStaticPadTemplate video_sink_factory =
83 GST_STATIC_PAD_TEMPLATE ("video_sink",
86 GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
89 static GstStaticPadTemplate text_sink_factory =
90 GST_STATIC_PAD_TEMPLATE ("text_sink",
93 GST_STATIC_CAPS ("application/x-ass; application/x-ssa")
96 #define GST_ASS_RENDER_GET_LOCK(ass) (&GST_ASS_RENDER (ass)->lock)
97 #define GST_ASS_RENDER_GET_COND(ass) (&GST_ASS_RENDER (ass)->cond)
98 #define GST_ASS_RENDER_LOCK(ass) (g_mutex_lock (GST_ASS_RENDER_GET_LOCK (ass)))
99 #define GST_ASS_RENDER_UNLOCK(ass) (g_mutex_unlock (GST_ASS_RENDER_GET_LOCK (ass)))
100 #define GST_ASS_RENDER_WAIT(ass) (g_cond_wait (GST_ASS_RENDER_GET_COND (ass), GST_ASS_RENDER_GET_LOCK (ass)))
101 #define GST_ASS_RENDER_SIGNAL(ass) (g_cond_signal (GST_ASS_RENDER_GET_COND (ass)))
102 #define GST_ASS_RENDER_BROADCAST(ass)(g_cond_broadcast (GST_ASS_RENDER_GET_COND (ass)))
104 static void gst_ass_render_set_property (GObject * object, guint prop_id,
105 const GValue * value, GParamSpec * pspec);
106 static void gst_ass_render_get_property (GObject * object, guint prop_id,
107 GValue * value, GParamSpec * pspec);
109 static void gst_ass_render_finalize (GObject * object);
111 static GstStateChangeReturn gst_ass_render_change_state (GstElement * element,
112 GstStateChange transition);
114 #define gst_ass_render_parent_class parent_class
115 G_DEFINE_TYPE (GstAssRender, gst_ass_render, GST_TYPE_ELEMENT);
117 static GstCaps *gst_ass_render_get_videosink_caps (GstPad * pad,
118 GstAssRender * render, GstCaps * filter);
119 static GstCaps *gst_ass_render_get_src_caps (GstPad * pad,
120 GstAssRender * render, GstCaps * filter);
122 static gboolean gst_ass_render_setcaps_video (GstPad * pad,
123 GstAssRender * render, GstCaps * caps);
124 static gboolean gst_ass_render_setcaps_text (GstPad * pad,
125 GstAssRender * render, GstCaps * caps);
127 static GstFlowReturn gst_ass_render_chain_video (GstPad * pad,
128 GstObject * parent, GstBuffer * buf);
129 static GstFlowReturn gst_ass_render_chain_text (GstPad * pad,
130 GstObject * parent, GstBuffer * buf);
132 static gboolean gst_ass_render_event_video (GstPad * pad, GstObject * parent,
134 static gboolean gst_ass_render_event_text (GstPad * pad, GstObject * parent,
136 static gboolean gst_ass_render_event_src (GstPad * pad, GstObject * parent,
139 static gboolean gst_ass_render_query_video (GstPad * pad, GstObject * parent,
141 static gboolean gst_ass_render_query_src (GstPad * pad, GstObject * parent,
144 /* initialize the plugin's class */
146 gst_ass_render_class_init (GstAssRenderClass * klass)
148 GObjectClass *gobject_class = (GObjectClass *) klass;
149 GstElementClass *gstelement_class = (GstElementClass *) klass;
151 gobject_class->set_property = gst_ass_render_set_property;
152 gobject_class->get_property = gst_ass_render_get_property;
153 gobject_class->finalize = gst_ass_render_finalize;
155 g_object_class_install_property (gobject_class, PROP_ENABLE,
156 g_param_spec_boolean ("enable", "Enable",
157 "Enable rendering of subtitles", TRUE,
158 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
160 g_object_class_install_property (gobject_class, PROP_EMBEDDEDFONTS,
161 g_param_spec_boolean ("embeddedfonts", "Embedded Fonts",
162 "Extract and use fonts embedded in the stream", TRUE,
163 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
165 g_object_class_install_property (gobject_class, PROP_WAIT_TEXT,
166 g_param_spec_boolean ("wait-text", "Wait Text",
167 "Whether to wait for subtitles", TRUE,
168 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
170 gstelement_class->change_state =
171 GST_DEBUG_FUNCPTR (gst_ass_render_change_state);
173 gst_element_class_add_pad_template (gstelement_class,
174 gst_static_pad_template_get (&src_factory));
175 gst_element_class_add_pad_template (gstelement_class,
176 gst_static_pad_template_get (&video_sink_factory));
177 gst_element_class_add_pad_template (gstelement_class,
178 gst_static_pad_template_get (&text_sink_factory));
180 gst_element_class_set_static_metadata (gstelement_class, "ASS/SSA Render",
181 "Mixer/Video/Overlay/Subtitle",
182 "Renders ASS/SSA subtitles with libass",
183 "Benjamin Schmitz <vortex@wolpzone.de>, "
184 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
187 #if defined(LIBASS_VERSION) && LIBASS_VERSION >= 0x00907000
189 _libass_message_cb (gint level, const gchar * fmt, va_list args,
192 gchar *message = g_strdup_vprintf (fmt, args);
195 GST_CAT_ERROR_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
197 GST_CAT_WARNING_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
199 GST_CAT_INFO_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
201 GST_CAT_DEBUG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
203 GST_CAT_LOG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
210 gst_ass_render_init (GstAssRender * render)
212 GST_DEBUG_OBJECT (render, "init");
214 render->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
215 render->video_sinkpad =
216 gst_pad_new_from_static_template (&video_sink_factory, "video_sink");
217 render->text_sinkpad =
218 gst_pad_new_from_static_template (&text_sink_factory, "text_sink");
220 gst_pad_set_chain_function (render->video_sinkpad,
221 GST_DEBUG_FUNCPTR (gst_ass_render_chain_video));
222 gst_pad_set_chain_function (render->text_sinkpad,
223 GST_DEBUG_FUNCPTR (gst_ass_render_chain_text));
225 gst_pad_set_event_function (render->video_sinkpad,
226 GST_DEBUG_FUNCPTR (gst_ass_render_event_video));
227 gst_pad_set_event_function (render->text_sinkpad,
228 GST_DEBUG_FUNCPTR (gst_ass_render_event_text));
229 gst_pad_set_event_function (render->srcpad,
230 GST_DEBUG_FUNCPTR (gst_ass_render_event_src));
232 gst_pad_set_query_function (render->srcpad,
233 GST_DEBUG_FUNCPTR (gst_ass_render_query_src));
234 gst_pad_set_query_function (render->video_sinkpad,
235 GST_DEBUG_FUNCPTR (gst_ass_render_query_video));
237 GST_PAD_SET_PROXY_ALLOCATION (render->video_sinkpad);
239 gst_element_add_pad (GST_ELEMENT (render), render->srcpad);
240 gst_element_add_pad (GST_ELEMENT (render), render->video_sinkpad);
241 gst_element_add_pad (GST_ELEMENT (render), render->text_sinkpad);
243 gst_video_info_init (&render->info);
245 g_mutex_init (&render->lock);
246 g_cond_init (&render->cond);
248 render->renderer_init_ok = FALSE;
249 render->track_init_ok = FALSE;
250 render->enable = TRUE;
251 render->embeddedfonts = TRUE;
252 render->wait_text = FALSE;
254 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
255 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
257 g_mutex_init (&render->ass_mutex);
258 render->ass_library = ass_library_init ();
259 #if defined(LIBASS_VERSION) && LIBASS_VERSION >= 0x00907000
260 ass_set_message_cb (render->ass_library, _libass_message_cb, render);
262 ass_set_extract_fonts (render->ass_library, 1);
264 render->ass_renderer = ass_renderer_init (render->ass_library);
265 if (!render->ass_renderer) {
266 GST_WARNING_OBJECT (render, "cannot create renderer instance");
267 g_assert_not_reached ();
270 render->ass_track = NULL;
272 GST_DEBUG_OBJECT (render, "init complete");
276 gst_ass_render_finalize (GObject * object)
278 GstAssRender *render = GST_ASS_RENDER (object);
280 g_mutex_clear (&render->lock);
281 g_cond_clear (&render->cond);
283 if (render->ass_track) {
284 ass_free_track (render->ass_track);
287 if (render->ass_renderer) {
288 ass_renderer_done (render->ass_renderer);
291 if (render->ass_library) {
292 ass_library_done (render->ass_library);
295 g_mutex_clear (&render->ass_mutex);
297 G_OBJECT_CLASS (parent_class)->finalize (object);
301 gst_ass_render_set_property (GObject * object, guint prop_id,
302 const GValue * value, GParamSpec * pspec)
304 GstAssRender *render = GST_ASS_RENDER (object);
306 GST_ASS_RENDER_LOCK (render);
309 render->enable = g_value_get_boolean (value);
311 case PROP_EMBEDDEDFONTS:
312 render->embeddedfonts = g_value_get_boolean (value);
313 g_mutex_lock (&render->ass_mutex);
314 ass_set_extract_fonts (render->ass_library, render->embeddedfonts);
315 g_mutex_unlock (&render->ass_mutex);
318 render->wait_text = g_value_get_boolean (value);
321 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
324 GST_ASS_RENDER_UNLOCK (render);
328 gst_ass_render_get_property (GObject * object, guint prop_id,
329 GValue * value, GParamSpec * pspec)
331 GstAssRender *render = GST_ASS_RENDER (object);
333 GST_ASS_RENDER_LOCK (render);
336 g_value_set_boolean (value, render->enable);
338 case PROP_EMBEDDEDFONTS:
339 g_value_set_boolean (value, render->embeddedfonts);
342 g_value_set_boolean (value, render->wait_text);
345 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
348 GST_ASS_RENDER_UNLOCK (render);
351 /* Called with lock held */
353 gst_ass_render_pop_text (GstAssRender * render)
355 if (render->subtitle_pending) {
356 GST_DEBUG_OBJECT (render, "releasing text buffer %p",
357 render->subtitle_pending);
358 gst_buffer_unref (render->subtitle_pending);
359 render->subtitle_pending = NULL;
362 /* Let the text task know we used that buffer */
363 GST_ASS_RENDER_BROADCAST (render);
366 static GstStateChangeReturn
367 gst_ass_render_change_state (GstElement * element, GstStateChange transition)
369 GstAssRender *render = GST_ASS_RENDER (element);
370 GstStateChangeReturn ret;
372 switch (transition) {
373 case GST_STATE_CHANGE_PAUSED_TO_READY:
374 GST_ASS_RENDER_LOCK (render);
375 render->subtitle_flushing = TRUE;
376 render->video_flushing = TRUE;
377 gst_ass_render_pop_text (render);
378 GST_ASS_RENDER_UNLOCK (render);
384 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
385 if (ret == GST_STATE_CHANGE_FAILURE)
388 switch (transition) {
389 case GST_STATE_CHANGE_PAUSED_TO_READY:
390 g_mutex_lock (&render->ass_mutex);
391 if (render->ass_track)
392 ass_free_track (render->ass_track);
393 render->ass_track = NULL;
394 if (render->composition) {
395 gst_video_overlay_composition_unref (render->composition);
396 render->composition = NULL;
398 render->track_init_ok = FALSE;
399 render->renderer_init_ok = FALSE;
400 g_mutex_unlock (&render->ass_mutex);
402 case GST_STATE_CHANGE_READY_TO_PAUSED:
403 GST_ASS_RENDER_LOCK (render);
404 render->subtitle_flushing = FALSE;
405 render->video_flushing = FALSE;
406 render->video_eos = FALSE;
407 render->subtitle_eos = FALSE;
408 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
409 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
410 GST_ASS_RENDER_UNLOCK (render);
421 gst_ass_render_query_src (GstPad * pad, GstObject * parent, GstQuery * query)
423 gboolean res = FALSE;
425 switch (GST_QUERY_TYPE (query)) {
428 GstCaps *filter, *caps;
430 gst_query_parse_caps (query, &filter);
431 caps = gst_ass_render_get_src_caps (pad, (GstAssRender *) parent, filter);
432 gst_query_set_caps_result (query, caps);
433 gst_caps_unref (caps);
438 res = gst_pad_query_default (pad, parent, query);
446 gst_ass_render_event_src (GstPad * pad, GstObject * parent, GstEvent * event)
448 GstAssRender *render = GST_ASS_RENDER (parent);
449 gboolean ret = FALSE;
451 GST_DEBUG_OBJECT (render, "received src event %" GST_PTR_FORMAT, event);
453 switch (GST_EVENT_TYPE (event)) {
454 case GST_EVENT_SEEK:{
457 if (!render->track_init_ok) {
458 GST_DEBUG_OBJECT (render, "seek received, pushing upstream");
459 ret = gst_pad_push_event (render->video_sinkpad, event);
463 GST_DEBUG_OBJECT (render, "seek received, driving from here");
465 gst_event_parse_seek (event, NULL, NULL, &flags, NULL, NULL, NULL, NULL);
467 /* Flush downstream, only for flushing seek */
468 if (flags & GST_SEEK_FLAG_FLUSH)
469 gst_pad_push_event (render->srcpad, gst_event_new_flush_start ());
471 /* Mark subtitle as flushing, unblocks chains */
472 GST_ASS_RENDER_LOCK (render);
473 render->subtitle_flushing = TRUE;
474 render->video_flushing = TRUE;
475 gst_ass_render_pop_text (render);
476 GST_ASS_RENDER_UNLOCK (render);
478 /* Seek on each sink pad */
479 gst_event_ref (event);
480 ret = gst_pad_push_event (render->video_sinkpad, event);
482 ret = gst_pad_push_event (render->text_sinkpad, event);
484 gst_event_unref (event);
489 if (render->track_init_ok) {
490 gst_event_ref (event);
491 ret = gst_pad_push_event (render->video_sinkpad, event);
492 gst_pad_push_event (render->text_sinkpad, event);
494 ret = gst_pad_push_event (render->video_sinkpad, event);
503 * gst_ass_render_add_feature_and_intersect:
505 * Creates a new #GstCaps containing the (given caps +
506 * given caps feature) + (given caps intersected by the
509 * Returns: the new #GstCaps
512 gst_ass_render_add_feature_and_intersect (GstCaps * caps,
513 const gchar * feature, GstCaps * filter)
518 new_caps = gst_caps_copy (caps);
520 caps_size = gst_caps_get_size (new_caps);
521 for (i = 0; i < caps_size; i++) {
522 GstCapsFeatures *features = gst_caps_get_features (new_caps, i);
523 if (!gst_caps_features_is_any (features)) {
524 gst_caps_features_add (features, feature);
528 gst_caps_append (new_caps, gst_caps_intersect_full (caps,
529 filter, GST_CAPS_INTERSECT_FIRST));
535 * gst_ass_render_intersect_by_feature:
537 * Creates a new #GstCaps based on the following filtering rule.
539 * For each individual caps contained in given caps, if the
540 * caps uses the given caps feature, keep a version of the caps
541 * with the feature and an another one without. Otherwise, intersect
542 * the caps with the given filter.
544 * Returns: the new #GstCaps
547 gst_ass_render_intersect_by_feature (GstCaps * caps,
548 const gchar * feature, GstCaps * filter)
553 new_caps = gst_caps_new_empty ();
555 caps_size = gst_caps_get_size (caps);
556 for (i = 0; i < caps_size; i++) {
557 GstStructure *caps_structure = gst_caps_get_structure (caps, i);
558 GstCapsFeatures *caps_features =
559 gst_caps_features_copy (gst_caps_get_features (caps, i));
560 GstCaps *filtered_caps;
561 GstCaps *simple_caps =
562 gst_caps_new_full (gst_structure_copy (caps_structure), NULL);
563 gst_caps_set_features (simple_caps, 0, caps_features);
565 if (gst_caps_features_contains (caps_features, feature)) {
566 gst_caps_append (new_caps, gst_caps_copy (simple_caps));
568 gst_caps_features_remove (caps_features, feature);
569 filtered_caps = gst_caps_ref (simple_caps);
571 filtered_caps = gst_caps_intersect_full (simple_caps, filter,
572 GST_CAPS_INTERSECT_FIRST);
575 gst_caps_unref (simple_caps);
576 gst_caps_append (new_caps, filtered_caps);
583 gst_ass_render_get_videosink_caps (GstPad * pad, GstAssRender * render,
586 GstPad *srcpad = render->srcpad;
587 GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
590 /* filter caps + composition feature + filter caps
591 * filtered by the software caps. */
592 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
593 assrender_filter = gst_ass_render_add_feature_and_intersect (filter,
594 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
595 gst_caps_unref (sw_caps);
597 GST_DEBUG_OBJECT (render, "assrender filter %" GST_PTR_FORMAT,
601 peer_caps = gst_pad_peer_query_caps (srcpad, assrender_filter);
603 if (assrender_filter)
604 gst_caps_unref (assrender_filter);
608 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
610 if (gst_caps_is_any (peer_caps)) {
612 /* if peer returns ANY caps, return filtered src pad template caps */
613 caps = gst_caps_copy (gst_pad_get_pad_template_caps (srcpad));
616 /* duplicate caps which contains the composition into one version with
617 * the meta and one without. Filter the other caps by the software caps */
618 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
619 caps = gst_ass_render_intersect_by_feature (peer_caps,
620 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
621 gst_caps_unref (sw_caps);
624 gst_caps_unref (peer_caps);
627 /* no peer, our padtemplate is enough then */
628 caps = gst_pad_get_pad_template_caps (pad);
632 GstCaps *intersection = gst_caps_intersect_full (filter, caps,
633 GST_CAPS_INTERSECT_FIRST);
634 gst_caps_unref (caps);
638 GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
644 gst_ass_render_get_src_caps (GstPad * pad, GstAssRender * render,
647 GstPad *sinkpad = render->video_sinkpad;
648 GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
651 /* duplicate filter caps which contains the composition into one version
652 * with the meta and one without. Filter the other caps by the software
654 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
656 gst_ass_render_intersect_by_feature (filter,
657 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
658 gst_caps_unref (sw_caps);
661 peer_caps = gst_pad_peer_query_caps (sinkpad, assrender_filter);
663 if (assrender_filter)
664 gst_caps_unref (assrender_filter);
668 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
670 if (gst_caps_is_any (peer_caps)) {
672 /* if peer returns ANY caps, return filtered sink pad template caps */
673 caps = gst_caps_copy (gst_pad_get_pad_template_caps (sinkpad));
677 /* return upstream caps + composition feature + upstream caps
678 * filtered by the software caps. */
679 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
680 caps = gst_ass_render_add_feature_and_intersect (peer_caps,
681 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
682 gst_caps_unref (sw_caps);
685 gst_caps_unref (peer_caps);
688 /* no peer, our padtemplate is enough then */
689 caps = gst_pad_get_pad_template_caps (pad);
693 GstCaps *intersection;
696 gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
697 gst_caps_unref (caps);
701 GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
707 blit_bgra_premultiplied (GstAssRender * render, ASS_Image * ass_image,
708 guint8 * data, gint width, gint height, gint stride, gint x_off, gint y_off)
711 gint alpha, r, g, b, k;
719 memset (data, 0, stride * height);
722 dst_x = ass_image->dst_x + x_off;
723 dst_y = ass_image->dst_y + y_off;
725 if (dst_y >= height || dst_x >= width)
728 alpha = 255 - (ass_image->color & 0xff);
729 r = ((ass_image->color) >> 24) & 0xff;
730 g = ((ass_image->color) >> 16) & 0xff;
731 b = ((ass_image->color) >> 8) & 0xff;
732 src = ass_image->bitmap;
733 dst = data + dst_y * stride + dst_x * 4;
735 w = MIN (ass_image->w, width - dst_x);
736 h = MIN (ass_image->h, height - dst_y);
737 src_skip = ass_image->stride - w;
738 dst_skip = stride - w * 4;
740 for (y = 0; y < h; y++) {
741 for (x = 0; x < w; x++) {
742 k = src[0] * alpha / 255;
745 dst[2] = (k * r) / 255;
746 dst[1] = (k * g) / 255;
747 dst[0] = (k * b) / 255;
749 dst[3] = k + (255 - k) * dst[3] / 255;
750 dst[2] = (k * r + (255 - k) * dst[2]) / 255;
751 dst[1] = (k * g + (255 - k) * dst[1]) / 255;
752 dst[0] = (k * b + (255 - k) * dst[0]) / 255;
762 ass_image = ass_image->next;
764 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter);
768 gst_ass_render_can_handle_caps (GstCaps * incaps)
770 static GstStaticCaps static_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
774 caps = gst_static_caps_get (&static_caps);
775 ret = gst_caps_is_subset (incaps, caps);
776 gst_caps_unref (caps);
782 gst_ass_render_setcaps_video (GstPad * pad, GstAssRender * render,
786 gboolean ret = FALSE;
787 gint par_n = 1, par_d = 1;
790 gboolean attach = FALSE;
791 gboolean caps_has_meta = TRUE;
793 GstCaps *original_caps = caps;
795 if (!gst_video_info_from_caps (&info, caps))
801 /* Try to use the overlay meta if possible */
802 f = gst_caps_get_features (caps, 0);
804 /* if the caps doesn't have the overlay meta, we query if downstream
805 * accepts it before trying the version without the meta
806 * If upstream already is using the meta then we can only use it */
808 || !gst_caps_features_contains (f,
809 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION)) {
810 GstCaps *overlay_caps;
812 /* In this case we added the meta, but we can work without it
813 * so preserve the original caps so we can use it as a fallback */
814 overlay_caps = gst_caps_copy (caps);
816 f = gst_caps_get_features (overlay_caps, 0);
817 gst_caps_features_add (f,
818 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
820 ret = gst_pad_peer_query_accept_caps (render->srcpad, overlay_caps);
821 GST_DEBUG_OBJECT (render, "Downstream accepts the overlay meta: %d", ret);
823 gst_caps_unref (caps);
827 /* fallback to the original */
828 gst_caps_unref (overlay_caps);
829 caps_has_meta = FALSE;
833 GST_DEBUG_OBJECT (render, "Using caps %" GST_PTR_FORMAT, caps);
834 ret = gst_pad_set_caps (render->srcpad, caps);
835 gst_caps_unref (caps);
840 render->width = info.width;
841 render->height = info.height;
843 query = gst_query_new_allocation (caps, FALSE);
844 if (caps_has_meta && gst_pad_peer_query (render->srcpad, query)) {
845 if (gst_query_find_allocation_meta (query,
846 GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL))
849 gst_query_unref (query);
851 render->attach_compo_to_buffer = attach;
855 /* Some elements (fakesink) claim to accept the meta on caps but won't
856 put it in the allocation query result, this leads below
857 check to fail. Prevent this by removing the meta from caps */
858 caps = original_caps;
859 ret = gst_pad_set_caps (render->srcpad, caps);
863 if (!gst_ass_render_can_handle_caps (caps))
864 goto unsupported_caps;
867 g_mutex_lock (&render->ass_mutex);
868 ass_set_frame_size (render->ass_renderer, render->width, render->height);
870 dar = (((gdouble) par_n) * ((gdouble) render->width))
871 / (((gdouble) par_d) * ((gdouble) render->height));
872 #if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
873 ass_set_aspect_ratio (render->ass_renderer, dar);
875 ass_set_aspect_ratio (render->ass_renderer,
876 dar, ((gdouble) render->width) / ((gdouble) render->height));
878 ass_set_font_scale (render->ass_renderer, 1.0);
879 ass_set_hinting (render->ass_renderer, ASS_HINTING_LIGHT);
881 #if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
882 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif");
883 ass_set_fonts (render->ass_renderer, NULL, "Sans");
885 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif", 1, NULL, 1);
886 ass_set_fonts (render->ass_renderer, NULL, "Sans", 1, NULL, 1);
888 ass_set_margins (render->ass_renderer, 0, 0, 0, 0);
889 ass_set_use_margins (render->ass_renderer, 0);
890 g_mutex_unlock (&render->ass_mutex);
892 render->renderer_init_ok = TRUE;
894 GST_DEBUG_OBJECT (render, "ass renderer setup complete");
903 GST_ERROR_OBJECT (render, "Can't parse caps: %" GST_PTR_FORMAT, caps);
909 GST_ERROR_OBJECT (render, "Unsupported caps: %" GST_PTR_FORMAT, caps);
916 gst_ass_render_setcaps_text (GstPad * pad, GstAssRender * render,
919 GstStructure *structure;
923 gboolean ret = FALSE;
925 structure = gst_caps_get_structure (caps, 0);
927 GST_DEBUG_OBJECT (render, "text pad linked with caps: %" GST_PTR_FORMAT,
930 value = gst_structure_get_value (structure, "codec_data");
932 g_mutex_lock (&render->ass_mutex);
934 priv = gst_value_get_buffer (value);
935 g_return_val_if_fail (priv != NULL, FALSE);
937 gst_buffer_map (priv, &map, GST_MAP_READ);
939 if (!render->ass_track)
940 render->ass_track = ass_new_track (render->ass_library);
942 ass_process_codec_private (render->ass_track, (char *) map.data, map.size);
944 gst_buffer_unmap (priv, &map);
946 GST_DEBUG_OBJECT (render, "ass track created");
948 render->track_init_ok = TRUE;
951 } else if (!render->ass_track) {
952 render->ass_track = ass_new_track (render->ass_library);
954 render->track_init_ok = TRUE;
958 g_mutex_unlock (&render->ass_mutex);
965 gst_ass_render_process_text (GstAssRender * render, GstBuffer * buffer,
966 GstClockTime running_time, GstClockTime duration)
969 gdouble pts_start, pts_end;
971 pts_start = running_time;
972 pts_start /= GST_MSECOND;
974 pts_end /= GST_MSECOND;
976 GST_DEBUG_OBJECT (render,
977 "Processing subtitles with running time %" GST_TIME_FORMAT
978 " and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time),
979 GST_TIME_ARGS (duration));
981 gst_buffer_map (buffer, &map, GST_MAP_READ);
983 g_mutex_lock (&render->ass_mutex);
984 ass_process_chunk (render->ass_track, (gchar *) map.data, map.size,
986 g_mutex_unlock (&render->ass_mutex);
988 gst_buffer_unmap (buffer, &map);
991 static GstVideoOverlayComposition *
992 gst_ass_render_composite_overlay (GstAssRender * render, ASS_Image * images)
994 GstVideoOverlayComposition *composition;
995 GstVideoOverlayRectangle *rectangle;
1011 /* find bounding box of all images, to limit the overlay rectangle size */
1012 for (image = images; image; image = image->next) {
1013 if (min_x > image->dst_x)
1014 min_x = image->dst_x;
1015 if (min_y > image->dst_y)
1016 min_y = image->dst_y;
1017 if (max_x < image->dst_x + image->w)
1018 max_x = image->dst_x + image->w;
1019 if (max_y < image->dst_y + image->h)
1020 max_y = image->dst_y + image->h;
1023 width = MIN (max_x - min_x, render->width);
1024 height = MIN (max_y - min_y, render->height);
1026 GST_DEBUG_OBJECT (render, "render overlay rectangle %dx%d%+d%+d",
1027 width, height, min_x, min_y);
1029 buffer = gst_buffer_new_and_alloc (4 * width * height);
1031 GST_ERROR_OBJECT (render, "Failed to allocate overlay buffer");
1035 vmeta = gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE,
1036 GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB, width, height);
1038 if (!gst_video_meta_map (vmeta, 0, &map, &data, &stride, GST_MAP_READWRITE)) {
1039 GST_ERROR_OBJECT (render, "Failed to map overlay buffer");
1040 gst_buffer_unref (buffer);
1044 blit_bgra_premultiplied (render, images, data, width, height, stride,
1046 gst_video_meta_unmap (vmeta, 0, &map);
1048 rectangle = gst_video_overlay_rectangle_new_raw (buffer, min_x, min_y,
1049 width, height, GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
1051 gst_buffer_unref (buffer);
1053 composition = gst_video_overlay_composition_new (rectangle);
1054 gst_video_overlay_rectangle_unref (rectangle);
1060 gst_ass_render_push_frame (GstAssRender * render, GstBuffer * video_frame)
1062 GstVideoFrame frame;
1064 if (!render->composition)
1067 video_frame = gst_buffer_make_writable (video_frame);
1069 if (render->attach_compo_to_buffer) {
1070 gst_buffer_add_video_overlay_composition_meta (video_frame,
1071 render->composition);
1075 if (!gst_video_frame_map (&frame, &render->info, video_frame,
1076 GST_MAP_READWRITE)) {
1077 GST_WARNING_OBJECT (render, "failed to map video frame for blending");
1081 gst_video_overlay_composition_blend (render->composition, &frame);
1082 gst_video_frame_unmap (&frame);
1085 return gst_pad_push (render->srcpad, video_frame);
1088 static GstFlowReturn
1089 gst_ass_render_chain_video (GstPad * pad, GstObject * parent,
1092 GstAssRender *render = GST_ASS_RENDER (parent);
1093 GstFlowReturn ret = GST_FLOW_OK;
1094 gboolean in_seg = FALSE;
1095 guint64 start, stop, clip_start = 0, clip_stop = 0;
1096 ASS_Image *ass_image;
1098 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1099 goto missing_timestamp;
1101 /* ignore buffers that are outside of the current segment */
1102 start = GST_BUFFER_TIMESTAMP (buffer);
1104 if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
1105 stop = GST_CLOCK_TIME_NONE;
1107 stop = start + GST_BUFFER_DURATION (buffer);
1110 /* segment_clip() will adjust start unconditionally to segment_start if
1111 * no stop time is provided, so handle this ourselves */
1112 if (stop == GST_CLOCK_TIME_NONE && start < render->video_segment.start)
1113 goto out_of_segment;
1116 gst_segment_clip (&render->video_segment, GST_FORMAT_TIME, start, stop,
1117 &clip_start, &clip_stop);
1120 goto out_of_segment;
1122 /* if the buffer is only partially in the segment, fix up stamps */
1123 if (clip_start != start || (stop != -1 && clip_stop != stop)) {
1124 GST_DEBUG_OBJECT (render, "clipping buffer timestamp/duration to segment");
1125 buffer = gst_buffer_make_writable (buffer);
1126 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1128 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1131 /* now, after we've done the clipping, fix up end time if there's no
1132 * duration (we only use those estimated values internally though, we
1133 * don't want to set bogus values on the buffer itself) */
1135 if (render->info.fps_n && render->info.fps_d) {
1136 GST_DEBUG_OBJECT (render, "estimating duration based on framerate");
1138 start + gst_util_uint64_scale_int (GST_SECOND, render->info.fps_d,
1139 render->info.fps_n);
1141 GST_WARNING_OBJECT (render, "no duration, assuming minimal duration");
1142 stop = start + 1; /* we need to assume some interval */
1148 GST_ASS_RENDER_LOCK (render);
1150 if (render->video_flushing)
1153 if (render->video_eos)
1156 if (render->renderer_init_ok && render->track_init_ok && render->enable) {
1157 /* Text pad linked, check if we have a text buffer queued */
1158 if (render->subtitle_pending) {
1159 GstClockTime text_start = GST_CLOCK_TIME_NONE;
1160 GstClockTime text_end = GST_CLOCK_TIME_NONE;
1161 GstClockTime text_running_time = GST_CLOCK_TIME_NONE;
1162 GstClockTime text_running_time_end = GST_CLOCK_TIME_NONE;
1163 GstClockTime vid_running_time, vid_running_time_end;
1167 /* if the text buffer isn't stamped right, pop it off the
1168 * queue and display it for the current video frame only */
1169 if (!GST_BUFFER_TIMESTAMP_IS_VALID (render->subtitle_pending) ||
1170 !GST_BUFFER_DURATION_IS_VALID (render->subtitle_pending)) {
1171 GST_WARNING_OBJECT (render,
1172 "Got text buffer with invalid timestamp or duration");
1173 gst_ass_render_pop_text (render);
1174 GST_ASS_RENDER_UNLOCK (render);
1175 goto wait_for_text_buf;
1178 text_start = GST_BUFFER_TIMESTAMP (render->subtitle_pending);
1179 text_end = text_start + GST_BUFFER_DURATION (render->subtitle_pending);
1182 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1184 vid_running_time_end =
1185 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1188 /* If timestamp and duration are valid */
1190 gst_segment_to_running_time (&render->video_segment,
1191 GST_FORMAT_TIME, text_start);
1192 text_running_time_end =
1193 gst_segment_to_running_time (&render->video_segment,
1194 GST_FORMAT_TIME, text_end);
1196 GST_LOG_OBJECT (render, "T: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1197 GST_TIME_ARGS (text_running_time),
1198 GST_TIME_ARGS (text_running_time_end));
1199 GST_LOG_OBJECT (render, "V: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1200 GST_TIME_ARGS (vid_running_time),
1201 GST_TIME_ARGS (vid_running_time_end));
1204 if (text_running_time_end <= vid_running_time) {
1205 GST_DEBUG_OBJECT (render, "text buffer too old, popping");
1206 gst_ass_render_pop_text (render);
1207 GST_ASS_RENDER_UNLOCK (render);
1208 goto wait_for_text_buf;
1211 if (render->need_process) {
1212 GST_DEBUG_OBJECT (render, "process text buffer");
1213 gst_ass_render_process_text (render, render->subtitle_pending,
1214 text_running_time, text_running_time_end - text_running_time);
1215 render->need_process = FALSE;
1218 GST_ASS_RENDER_UNLOCK (render);
1220 /* libass needs timestamps in ms */
1221 timestamp = vid_running_time / GST_MSECOND;
1223 g_mutex_lock (&render->ass_mutex);
1224 ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
1225 timestamp, &changed);
1226 g_mutex_unlock (&render->ass_mutex);
1228 if ((!ass_image || changed) && render->composition) {
1229 GST_DEBUG_OBJECT (render, "release overlay (changed %d)", changed);
1230 gst_video_overlay_composition_unref (render->composition);
1231 render->composition = NULL;
1234 if (ass_image != NULL) {
1235 if (!render->composition)
1236 render->composition = gst_ass_render_composite_overlay (render,
1239 GST_DEBUG_OBJECT (render, "nothing to render right now");
1242 /* Push the video frame */
1243 ret = gst_ass_render_push_frame (render, buffer);
1245 if (text_running_time_end <= vid_running_time_end) {
1246 GST_ASS_RENDER_LOCK (render);
1247 gst_ass_render_pop_text (render);
1248 GST_ASS_RENDER_UNLOCK (render);
1251 gboolean wait_for_text_buf = TRUE;
1253 if (render->subtitle_eos)
1254 wait_for_text_buf = FALSE;
1256 if (!render->wait_text)
1257 wait_for_text_buf = FALSE;
1259 /* Text pad linked, but no text buffer available - what now? */
1260 if (render->subtitle_segment.format == GST_FORMAT_TIME) {
1261 GstClockTime text_start_running_time, text_last_stop_running_time;
1262 GstClockTime vid_running_time;
1265 gst_segment_to_running_time (&render->video_segment,
1266 GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer));
1267 text_start_running_time =
1268 gst_segment_to_running_time (&render->subtitle_segment,
1269 GST_FORMAT_TIME, render->subtitle_segment.start);
1270 text_last_stop_running_time =
1271 gst_segment_to_running_time (&render->subtitle_segment,
1272 GST_FORMAT_TIME, render->subtitle_segment.position);
1274 if ((GST_CLOCK_TIME_IS_VALID (text_start_running_time) &&
1275 vid_running_time < text_start_running_time) ||
1276 (GST_CLOCK_TIME_IS_VALID (text_last_stop_running_time) &&
1277 vid_running_time < text_last_stop_running_time)) {
1278 wait_for_text_buf = FALSE;
1282 if (wait_for_text_buf) {
1283 GST_DEBUG_OBJECT (render, "no text buffer, need to wait for one");
1284 GST_ASS_RENDER_WAIT (render);
1285 GST_DEBUG_OBJECT (render, "resuming");
1286 GST_ASS_RENDER_UNLOCK (render);
1287 goto wait_for_text_buf;
1289 GST_ASS_RENDER_UNLOCK (render);
1290 GST_LOG_OBJECT (render, "no need to wait for a text buffer");
1291 ret = gst_pad_push (render->srcpad, buffer);
1295 GST_LOG_OBJECT (render, "rendering disabled, doing buffer passthrough");
1297 GST_ASS_RENDER_UNLOCK (render);
1298 ret = gst_pad_push (render->srcpad, buffer);
1302 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p ret=%d", buffer, ret);
1304 /* Update last_stop */
1305 render->video_segment.position = clip_start;
1311 GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
1312 gst_buffer_unref (buffer);
1317 GST_ASS_RENDER_UNLOCK (render);
1318 GST_DEBUG_OBJECT (render, "flushing, discarding buffer");
1319 gst_buffer_unref (buffer);
1320 return GST_FLOW_FLUSHING;
1324 GST_ASS_RENDER_UNLOCK (render);
1325 GST_DEBUG_OBJECT (render, "eos, discarding buffer");
1326 gst_buffer_unref (buffer);
1327 return GST_FLOW_EOS;
1331 GST_DEBUG_OBJECT (render, "buffer out of segment, discarding");
1332 gst_buffer_unref (buffer);
1337 static GstFlowReturn
1338 gst_ass_render_chain_text (GstPad * pad, GstObject * parent, GstBuffer * buffer)
1340 GstFlowReturn ret = GST_FLOW_OK;
1341 GstAssRender *render = GST_ASS_RENDER (parent);
1342 gboolean in_seg = FALSE;
1343 guint64 clip_start = 0, clip_stop = 0;
1345 GST_DEBUG_OBJECT (render, "entering chain for buffer %p", buffer);
1347 GST_ASS_RENDER_LOCK (render);
1349 if (render->subtitle_flushing) {
1350 GST_ASS_RENDER_UNLOCK (render);
1351 ret = GST_FLOW_FLUSHING;
1352 GST_LOG_OBJECT (render, "text flushing");
1356 if (render->subtitle_eos) {
1357 GST_ASS_RENDER_UNLOCK (render);
1359 GST_LOG_OBJECT (render, "text EOS");
1363 if (G_LIKELY (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))) {
1366 if (G_LIKELY (GST_BUFFER_DURATION_IS_VALID (buffer)))
1367 stop = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
1369 stop = GST_CLOCK_TIME_NONE;
1371 in_seg = gst_segment_clip (&render->subtitle_segment, GST_FORMAT_TIME,
1372 GST_BUFFER_TIMESTAMP (buffer), stop, &clip_start, &clip_stop);
1378 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1379 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1380 else if (GST_BUFFER_DURATION_IS_VALID (buffer))
1381 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1383 if (render->subtitle_pending
1384 && (!GST_BUFFER_TIMESTAMP_IS_VALID (render->subtitle_pending)
1385 || !GST_BUFFER_DURATION_IS_VALID (render->subtitle_pending))) {
1386 gst_buffer_unref (render->subtitle_pending);
1387 render->subtitle_pending = NULL;
1388 GST_ASS_RENDER_BROADCAST (render);
1390 /* Wait for the previous buffer to go away */
1391 while (render->subtitle_pending != NULL) {
1392 GST_DEBUG ("Pad %s:%s has a buffer queued, waiting",
1393 GST_DEBUG_PAD_NAME (pad));
1394 GST_ASS_RENDER_WAIT (render);
1395 GST_DEBUG ("Pad %s:%s resuming", GST_DEBUG_PAD_NAME (pad));
1396 if (render->subtitle_flushing) {
1397 GST_ASS_RENDER_UNLOCK (render);
1398 ret = GST_FLOW_FLUSHING;
1404 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1405 render->subtitle_segment.position = clip_start;
1407 GST_DEBUG_OBJECT (render,
1408 "New buffer arrived for timestamp %" GST_TIME_FORMAT,
1409 GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
1410 render->subtitle_pending = gst_buffer_ref (buffer);
1411 render->need_process = TRUE;
1413 /* in case the video chain is waiting for a text buffer, wake it up */
1414 GST_ASS_RENDER_BROADCAST (render);
1417 GST_ASS_RENDER_UNLOCK (render);
1420 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p", buffer);
1422 gst_buffer_unref (buffer);
1427 gst_ass_render_handle_tag_sample (GstAssRender * render, GstSample * sample)
1429 static const gchar *mimetypes[] = {
1430 "application/x-font-ttf",
1431 "application/x-font-otf",
1432 "application/x-truetype-font"
1434 static const gchar *extensions[] = {
1440 const GstStructure *structure;
1441 gboolean valid_mimetype, valid_extension;
1443 const gchar *filename;
1445 buf = gst_sample_get_buffer (sample);
1446 structure = gst_sample_get_info (sample);
1448 if (!buf || !structure)
1451 valid_mimetype = FALSE;
1452 valid_extension = FALSE;
1454 for (i = 0; i < G_N_ELEMENTS (mimetypes); i++) {
1455 if (gst_structure_has_name (structure, mimetypes[i])) {
1456 valid_mimetype = TRUE;
1461 filename = gst_structure_get_string (structure, "filename");
1465 if (!valid_mimetype) {
1466 guint len = strlen (filename);
1467 const gchar *extension = filename + len - 4;
1468 for (i = 0; i < G_N_ELEMENTS (extensions); i++) {
1469 if (g_ascii_strcasecmp (extension, extensions[i]) == 0) {
1470 valid_extension = TRUE;
1476 if (valid_mimetype || valid_extension) {
1479 g_mutex_lock (&render->ass_mutex);
1480 gst_buffer_map (buf, &map, GST_MAP_READ);
1481 ass_add_font (render->ass_library, (gchar *) filename,
1482 (gchar *) map.data, map.size);
1483 gst_buffer_unmap (buf, &map);
1484 GST_DEBUG_OBJECT (render, "registered new font %s", filename);
1485 g_mutex_unlock (&render->ass_mutex);
1490 gst_ass_render_handle_tags (GstAssRender * render, GstTagList * taglist)
1497 tag_size = gst_tag_list_get_tag_size (taglist, GST_TAG_ATTACHMENT);
1498 if (tag_size > 0 && render->embeddedfonts) {
1502 GST_DEBUG_OBJECT (render, "TAG event has attachments");
1504 for (index = 0; index < tag_size; index++) {
1505 if (gst_tag_list_get_sample_index (taglist, GST_TAG_ATTACHMENT, index,
1507 gst_ass_render_handle_tag_sample (render, sample);
1508 gst_sample_unref (sample);
1515 gst_ass_render_event_video (GstPad * pad, GstObject * parent, GstEvent * event)
1517 gboolean ret = FALSE;
1518 GstAssRender *render = GST_ASS_RENDER (parent);
1520 GST_DEBUG_OBJECT (pad, "received video event %" GST_PTR_FORMAT, event);
1522 switch (GST_EVENT_TYPE (event)) {
1523 case GST_EVENT_CAPS:
1527 gst_event_parse_caps (event, &caps);
1528 ret = gst_ass_render_setcaps_video (pad, render, caps);
1529 gst_event_unref (event);
1532 case GST_EVENT_SEGMENT:
1536 GST_DEBUG_OBJECT (render, "received new segment");
1538 gst_event_copy_segment (event, &segment);
1540 if (segment.format == GST_FORMAT_TIME) {
1541 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT now: %" GST_SEGMENT_FORMAT,
1542 &render->video_segment);
1544 render->video_segment = segment;
1546 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT after: %" GST_SEGMENT_FORMAT,
1547 &render->video_segment);
1548 ret = gst_pad_event_default (pad, parent, event);
1550 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1551 ("received non-TIME newsegment event on video input"));
1553 gst_event_unref (event);
1559 GstTagList *taglist = NULL;
1561 /* tag events may contain attachments which might be fonts */
1562 GST_DEBUG_OBJECT (render, "got TAG event");
1564 gst_event_parse_tag (event, &taglist);
1565 gst_ass_render_handle_tags (render, taglist);
1566 ret = gst_pad_event_default (pad, parent, event);
1570 GST_ASS_RENDER_LOCK (render);
1571 GST_INFO_OBJECT (render, "video EOS");
1572 render->video_eos = TRUE;
1573 GST_ASS_RENDER_UNLOCK (render);
1574 ret = gst_pad_event_default (pad, parent, event);
1576 case GST_EVENT_FLUSH_START:
1577 GST_ASS_RENDER_LOCK (render);
1578 GST_INFO_OBJECT (render, "video flush start");
1579 render->video_flushing = TRUE;
1580 GST_ASS_RENDER_BROADCAST (render);
1581 GST_ASS_RENDER_UNLOCK (render);
1582 ret = gst_pad_event_default (pad, parent, event);
1584 case GST_EVENT_FLUSH_STOP:
1585 GST_ASS_RENDER_LOCK (render);
1586 GST_INFO_OBJECT (render, "video flush stop");
1587 render->video_flushing = FALSE;
1588 render->video_eos = FALSE;
1589 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
1590 GST_ASS_RENDER_UNLOCK (render);
1591 ret = gst_pad_event_default (pad, parent, event);
1594 ret = gst_pad_event_default (pad, parent, event);
1602 gst_ass_render_query_video (GstPad * pad, GstObject * parent, GstQuery * query)
1604 gboolean res = FALSE;
1606 switch (GST_QUERY_TYPE (query)) {
1607 case GST_QUERY_CAPS:
1609 GstCaps *filter, *caps;
1611 gst_query_parse_caps (query, &filter);
1613 gst_ass_render_get_videosink_caps (pad, (GstAssRender *) parent,
1615 gst_query_set_caps_result (query, caps);
1616 gst_caps_unref (caps);
1621 res = gst_pad_query_default (pad, parent, query);
1629 gst_ass_render_event_text (GstPad * pad, GstObject * parent, GstEvent * event)
1632 gboolean ret = FALSE;
1633 GstAssRender *render = GST_ASS_RENDER (parent);
1635 GST_DEBUG_OBJECT (pad, "received text event %" GST_PTR_FORMAT, event);
1637 switch (GST_EVENT_TYPE (event)) {
1638 case GST_EVENT_CAPS:
1642 gst_event_parse_caps (event, &caps);
1643 ret = gst_ass_render_setcaps_text (pad, render, caps);
1644 gst_event_unref (event);
1647 case GST_EVENT_SEGMENT:
1651 GST_ASS_RENDER_LOCK (render);
1652 render->subtitle_eos = FALSE;
1653 GST_ASS_RENDER_UNLOCK (render);
1655 gst_event_copy_segment (event, &segment);
1657 GST_ASS_RENDER_LOCK (render);
1658 if (segment.format == GST_FORMAT_TIME) {
1659 GST_DEBUG_OBJECT (render, "TEXT SEGMENT now: %" GST_SEGMENT_FORMAT,
1660 &render->subtitle_segment);
1662 render->subtitle_segment = segment;
1664 GST_DEBUG_OBJECT (render,
1665 "TEXT SEGMENT after: %" GST_SEGMENT_FORMAT,
1666 &render->subtitle_segment);
1668 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1669 ("received non-TIME newsegment event on subtitle input"));
1672 gst_event_unref (event);
1675 /* wake up the video chain, it might be waiting for a text buffer or
1676 * a text segment update */
1677 GST_ASS_RENDER_BROADCAST (render);
1678 GST_ASS_RENDER_UNLOCK (render);
1681 case GST_EVENT_GAP:{
1682 GstClockTime start, duration;
1684 gst_event_parse_gap (event, &start, &duration);
1685 if (GST_CLOCK_TIME_IS_VALID (duration))
1687 /* we do not expect another buffer until after gap,
1688 * so that is our position now */
1689 GST_ASS_RENDER_LOCK (render);
1690 render->subtitle_segment.position = start;
1692 /* wake up the video chain, it might be waiting for a text buffer or
1693 * a text segment update */
1694 GST_ASS_RENDER_BROADCAST (render);
1695 GST_ASS_RENDER_UNLOCK (render);
1697 gst_event_unref (event);
1701 case GST_EVENT_FLUSH_STOP:
1702 GST_ASS_RENDER_LOCK (render);
1703 GST_INFO_OBJECT (render, "text flush stop");
1704 render->subtitle_flushing = FALSE;
1705 render->subtitle_eos = FALSE;
1706 gst_ass_render_pop_text (render);
1707 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
1708 GST_ASS_RENDER_UNLOCK (render);
1709 gst_event_unref (event);
1712 case GST_EVENT_FLUSH_START:
1713 GST_DEBUG_OBJECT (render, "text flush start");
1714 g_mutex_lock (&render->ass_mutex);
1715 if (render->ass_track) {
1716 /* delete any events on the ass_track */
1717 for (i = 0; i < render->ass_track->n_events; i++) {
1718 GST_DEBUG_OBJECT (render, "deleted event with eid %i", i);
1719 ass_free_event (render->ass_track, i);
1721 render->ass_track->n_events = 0;
1722 GST_DEBUG_OBJECT (render, "done flushing");
1724 g_mutex_unlock (&render->ass_mutex);
1725 GST_ASS_RENDER_LOCK (render);
1726 render->subtitle_flushing = TRUE;
1727 GST_ASS_RENDER_BROADCAST (render);
1728 GST_ASS_RENDER_UNLOCK (render);
1729 gst_event_unref (event);
1733 GST_ASS_RENDER_LOCK (render);
1734 render->subtitle_eos = TRUE;
1735 GST_INFO_OBJECT (render, "text EOS");
1736 /* wake up the video chain, it might be waiting for a text buffer or
1737 * a text segment update */
1738 GST_ASS_RENDER_BROADCAST (render);
1739 GST_ASS_RENDER_UNLOCK (render);
1740 gst_event_unref (event);
1745 GstTagList *taglist = NULL;
1747 /* tag events may contain attachments which might be fonts */
1748 GST_DEBUG_OBJECT (render, "got TAG event");
1750 gst_event_parse_tag (event, &taglist);
1751 gst_ass_render_handle_tags (render, taglist);
1752 ret = gst_pad_event_default (pad, parent, event);
1756 ret = gst_pad_event_default (pad, parent, event);
1764 plugin_init (GstPlugin * plugin)
1766 GST_DEBUG_CATEGORY_INIT (gst_ass_render_debug, "assrender",
1767 0, "ASS/SSA subtitle renderer");
1768 GST_DEBUG_CATEGORY_INIT (gst_ass_render_lib_debug, "assrender_library",
1769 0, "ASS/SSA subtitle renderer library");
1771 return gst_element_register (plugin, "assrender",
1772 GST_RANK_PRIMARY, GST_TYPE_ASS_RENDER);
1775 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1778 "ASS/SSA subtitle renderer",
1779 plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)