2 * Copyright (c) 2008 Benjamin Schmitz <vortex@wolpzone.de>
3 * Copyright (c) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
22 * SECTION:element-assrender
24 * Renders timestamped SSA/ASS subtitles on top of a video stream.
27 * <title>Example launch line</title>
29 * gst-launch -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mp3parse ! mad ! audioconvert ! autoaudiosink d. ! queue ! ffdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink
30 * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
34 /* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
35 * with newer GLib versions (>= 2.31.0) */
36 #define GLIB_DISABLE_DEPRECATION_WARNINGS
42 #include <gst/video/gstvideometa.h>
44 #include "gstassrender.h"
48 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_debug);
49 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_lib_debug);
50 #define GST_CAT_DEFAULT gst_ass_render_debug
52 /* Filter signals and props */
66 /* FIXME: video-blend.c doesn't support formats with more than 8 bit per
67 * component (which get unpacked into ARGB64 or AYUV64) yet, such as:
68 * v210, v216, UYVP, GRAY16_LE, GRAY16_BE */
69 #define FORMATS "{ BGRx, RGBx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, \
70 I420, YV12, AYUV, YUY2, UYVY, v308, Y41B, Y42B, Y444, \
71 NV12, NV21, A420, YUV9, YVU9, IYU1, GRAY8 }"
73 #define ASSRENDER_CAPS GST_VIDEO_CAPS_MAKE(FORMATS)
75 #define ASSRENDER_ALL_CAPS ASSRENDER_CAPS ";" \
76 GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
78 static GstStaticCaps sw_template_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
80 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
83 GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
86 static GstStaticPadTemplate video_sink_factory =
87 GST_STATIC_PAD_TEMPLATE ("video_sink",
90 GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
93 static GstStaticPadTemplate text_sink_factory =
94 GST_STATIC_PAD_TEMPLATE ("text_sink",
97 GST_STATIC_CAPS ("application/x-ass; application/x-ssa")
100 #define GST_ASS_RENDER_GET_LOCK(ass) (&GST_ASS_RENDER (ass)->lock)
101 #define GST_ASS_RENDER_GET_COND(ass) (&GST_ASS_RENDER (ass)->cond)
102 #define GST_ASS_RENDER_LOCK(ass) (g_mutex_lock (GST_ASS_RENDER_GET_LOCK (ass)))
103 #define GST_ASS_RENDER_UNLOCK(ass) (g_mutex_unlock (GST_ASS_RENDER_GET_LOCK (ass)))
104 #define GST_ASS_RENDER_WAIT(ass) (g_cond_wait (GST_ASS_RENDER_GET_COND (ass), GST_ASS_RENDER_GET_LOCK (ass)))
105 #define GST_ASS_RENDER_SIGNAL(ass) (g_cond_signal (GST_ASS_RENDER_GET_COND (ass)))
106 #define GST_ASS_RENDER_BROADCAST(ass)(g_cond_broadcast (GST_ASS_RENDER_GET_COND (ass)))
108 static void gst_ass_render_set_property (GObject * object, guint prop_id,
109 const GValue * value, GParamSpec * pspec);
110 static void gst_ass_render_get_property (GObject * object, guint prop_id,
111 GValue * value, GParamSpec * pspec);
113 static void gst_ass_render_finalize (GObject * object);
115 static GstStateChangeReturn gst_ass_render_change_state (GstElement * element,
116 GstStateChange transition);
118 #define gst_ass_render_parent_class parent_class
119 G_DEFINE_TYPE (GstAssRender, gst_ass_render, GST_TYPE_ELEMENT);
121 static GstCaps *gst_ass_render_get_videosink_caps (GstPad * pad,
123 static GstCaps *gst_ass_render_get_src_caps (GstPad * pad, GstCaps * filter);
125 static gboolean gst_ass_render_setcaps_video (GstPad * pad, GstCaps * caps);
126 static gboolean gst_ass_render_setcaps_text (GstPad * pad, GstCaps * caps);
128 static GstFlowReturn gst_ass_render_chain_video (GstPad * pad,
129 GstObject * parent, GstBuffer * buf);
130 static GstFlowReturn gst_ass_render_chain_text (GstPad * pad,
131 GstObject * parent, GstBuffer * buf);
133 static gboolean gst_ass_render_event_video (GstPad * pad, GstObject * parent,
135 static gboolean gst_ass_render_event_text (GstPad * pad, GstObject * parent,
137 static gboolean gst_ass_render_event_src (GstPad * pad, GstObject * parent,
140 static gboolean gst_ass_render_query_video (GstPad * pad, GstObject * parent,
142 static gboolean gst_ass_render_query_src (GstPad * pad, GstObject * parent,
145 /* initialize the plugin's class */
147 gst_ass_render_class_init (GstAssRenderClass * klass)
149 GObjectClass *gobject_class = (GObjectClass *) klass;
150 GstElementClass *gstelement_class = (GstElementClass *) klass;
152 gobject_class->set_property = gst_ass_render_set_property;
153 gobject_class->get_property = gst_ass_render_get_property;
154 gobject_class->finalize = gst_ass_render_finalize;
156 g_object_class_install_property (gobject_class, PROP_ENABLE,
157 g_param_spec_boolean ("enable", "Enable",
158 "Enable rendering of subtitles", TRUE,
159 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
161 g_object_class_install_property (gobject_class, PROP_EMBEDDEDFONTS,
162 g_param_spec_boolean ("embeddedfonts", "Embedded Fonts",
163 "Extract and use fonts embedded in the stream", TRUE,
164 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
166 g_object_class_install_property (gobject_class, PROP_WAIT_TEXT,
167 g_param_spec_boolean ("wait-text", "Wait Text",
168 "Whether to wait for subtitles", TRUE,
169 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
171 gstelement_class->change_state =
172 GST_DEBUG_FUNCPTR (gst_ass_render_change_state);
174 gst_element_class_add_pad_template (gstelement_class,
175 gst_static_pad_template_get (&src_factory));
176 gst_element_class_add_pad_template (gstelement_class,
177 gst_static_pad_template_get (&video_sink_factory));
178 gst_element_class_add_pad_template (gstelement_class,
179 gst_static_pad_template_get (&text_sink_factory));
181 gst_element_class_set_static_metadata (gstelement_class, "ASS/SSA Render",
182 "Mixer/Video/Overlay/Subtitle",
183 "Renders ASS/SSA subtitles with libass",
184 "Benjamin Schmitz <vortex@wolpzone.de>, "
185 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
188 #if defined(LIBASS_VERSION) && LIBASS_VERSION >= 0x00907000
190 _libass_message_cb (gint level, const gchar * fmt, va_list args,
193 gchar *message = g_strdup_vprintf (fmt, args);
196 GST_CAT_ERROR_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
198 GST_CAT_WARNING_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
200 GST_CAT_INFO_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
202 GST_CAT_DEBUG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
204 GST_CAT_LOG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
211 gst_ass_render_init (GstAssRender * render)
213 GST_DEBUG_OBJECT (render, "init");
215 render->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
216 render->video_sinkpad =
217 gst_pad_new_from_static_template (&video_sink_factory, "video_sink");
218 render->text_sinkpad =
219 gst_pad_new_from_static_template (&text_sink_factory, "text_sink");
221 gst_pad_set_chain_function (render->video_sinkpad,
222 GST_DEBUG_FUNCPTR (gst_ass_render_chain_video));
223 gst_pad_set_chain_function (render->text_sinkpad,
224 GST_DEBUG_FUNCPTR (gst_ass_render_chain_text));
226 gst_pad_set_event_function (render->video_sinkpad,
227 GST_DEBUG_FUNCPTR (gst_ass_render_event_video));
228 gst_pad_set_event_function (render->text_sinkpad,
229 GST_DEBUG_FUNCPTR (gst_ass_render_event_text));
230 gst_pad_set_event_function (render->srcpad,
231 GST_DEBUG_FUNCPTR (gst_ass_render_event_src));
233 gst_pad_set_query_function (render->srcpad,
234 GST_DEBUG_FUNCPTR (gst_ass_render_query_src));
235 gst_pad_set_query_function (render->video_sinkpad,
236 GST_DEBUG_FUNCPTR (gst_ass_render_query_video));
238 GST_PAD_SET_PROXY_ALLOCATION (render->video_sinkpad);
240 gst_element_add_pad (GST_ELEMENT (render), render->srcpad);
241 gst_element_add_pad (GST_ELEMENT (render), render->video_sinkpad);
242 gst_element_add_pad (GST_ELEMENT (render), render->text_sinkpad);
244 gst_video_info_init (&render->info);
246 g_mutex_init (&render->lock);
247 g_cond_init (&render->cond);
249 render->renderer_init_ok = FALSE;
250 render->track_init_ok = FALSE;
251 render->enable = TRUE;
252 render->embeddedfonts = TRUE;
253 render->wait_text = FALSE;
255 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
256 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
258 g_mutex_init (&render->ass_mutex);
259 render->ass_library = ass_library_init ();
260 #if defined(LIBASS_VERSION) && LIBASS_VERSION >= 0x00907000
261 ass_set_message_cb (render->ass_library, _libass_message_cb, render);
263 ass_set_extract_fonts (render->ass_library, 1);
265 render->ass_renderer = ass_renderer_init (render->ass_library);
266 if (!render->ass_renderer) {
267 GST_WARNING_OBJECT (render, "cannot create renderer instance");
268 g_assert_not_reached ();
271 render->ass_track = NULL;
273 GST_DEBUG_OBJECT (render, "init complete");
277 gst_ass_render_finalize (GObject * object)
279 GstAssRender *render = GST_ASS_RENDER (object);
281 g_mutex_clear (&render->lock);
282 g_cond_clear (&render->cond);
284 if (render->ass_track) {
285 ass_free_track (render->ass_track);
288 if (render->ass_renderer) {
289 ass_renderer_done (render->ass_renderer);
292 if (render->ass_library) {
293 ass_library_done (render->ass_library);
296 g_mutex_clear (&render->ass_mutex);
298 G_OBJECT_CLASS (parent_class)->finalize (object);
302 gst_ass_render_set_property (GObject * object, guint prop_id,
303 const GValue * value, GParamSpec * pspec)
305 GstAssRender *render = GST_ASS_RENDER (object);
307 GST_ASS_RENDER_LOCK (render);
310 render->enable = g_value_get_boolean (value);
312 case PROP_EMBEDDEDFONTS:
313 render->embeddedfonts = g_value_get_boolean (value);
314 g_mutex_lock (&render->ass_mutex);
315 ass_set_extract_fonts (render->ass_library, render->embeddedfonts);
316 g_mutex_unlock (&render->ass_mutex);
319 render->wait_text = g_value_get_boolean (value);
322 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
325 GST_ASS_RENDER_UNLOCK (render);
329 gst_ass_render_get_property (GObject * object, guint prop_id,
330 GValue * value, GParamSpec * pspec)
332 GstAssRender *render = GST_ASS_RENDER (object);
334 GST_ASS_RENDER_LOCK (render);
337 g_value_set_boolean (value, render->enable);
339 case PROP_EMBEDDEDFONTS:
340 g_value_set_boolean (value, render->embeddedfonts);
343 g_value_set_boolean (value, render->wait_text);
346 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
349 GST_ASS_RENDER_UNLOCK (render);
352 /* Called with lock held */
354 gst_ass_render_pop_text (GstAssRender * render)
356 if (render->subtitle_pending) {
357 GST_DEBUG_OBJECT (render, "releasing text buffer %p",
358 render->subtitle_pending);
359 gst_buffer_unref (render->subtitle_pending);
360 render->subtitle_pending = NULL;
363 /* Let the text task know we used that buffer */
364 GST_ASS_RENDER_BROADCAST (render);
367 static GstStateChangeReturn
368 gst_ass_render_change_state (GstElement * element, GstStateChange transition)
370 GstAssRender *render = GST_ASS_RENDER (element);
371 GstStateChangeReturn ret;
373 switch (transition) {
374 case GST_STATE_CHANGE_PAUSED_TO_READY:
375 GST_ASS_RENDER_LOCK (render);
376 render->subtitle_flushing = TRUE;
377 render->video_flushing = TRUE;
378 gst_ass_render_pop_text (render);
379 GST_ASS_RENDER_UNLOCK (render);
385 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
386 if (ret == GST_STATE_CHANGE_FAILURE)
389 switch (transition) {
390 case GST_STATE_CHANGE_PAUSED_TO_READY:
391 g_mutex_lock (&render->ass_mutex);
392 if (render->ass_track)
393 ass_free_track (render->ass_track);
394 render->ass_track = NULL;
395 if (render->composition) {
396 gst_video_overlay_composition_unref (render->composition);
397 render->composition = NULL;
399 render->track_init_ok = FALSE;
400 render->renderer_init_ok = FALSE;
401 g_mutex_unlock (&render->ass_mutex);
403 case GST_STATE_CHANGE_READY_TO_PAUSED:
404 GST_ASS_RENDER_LOCK (render);
405 render->subtitle_flushing = FALSE;
406 render->video_flushing = FALSE;
407 render->video_eos = FALSE;
408 render->subtitle_eos = FALSE;
409 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
410 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
411 GST_ASS_RENDER_UNLOCK (render);
422 gst_ass_render_query_src (GstPad * pad, GstObject * parent, GstQuery * query)
424 gboolean res = FALSE;
426 switch (GST_QUERY_TYPE (query)) {
429 GstCaps *filter, *caps;
431 gst_query_parse_caps (query, &filter);
432 caps = gst_ass_render_get_src_caps (pad, filter);
433 gst_query_set_caps_result (query, caps);
434 gst_caps_unref (caps);
439 res = gst_pad_query_default (pad, parent, query);
447 gst_ass_render_event_src (GstPad * pad, GstObject * parent, GstEvent * event)
449 GstAssRender *render = GST_ASS_RENDER (parent);
450 gboolean ret = FALSE;
452 GST_DEBUG_OBJECT (render, "received src event %" GST_PTR_FORMAT, event);
454 switch (GST_EVENT_TYPE (event)) {
455 case GST_EVENT_SEEK:{
458 if (!render->track_init_ok) {
459 GST_DEBUG_OBJECT (render, "seek received, pushing upstream");
460 ret = gst_pad_push_event (render->video_sinkpad, event);
464 GST_DEBUG_OBJECT (render, "seek received, driving from here");
466 gst_event_parse_seek (event, NULL, NULL, &flags, NULL, NULL, NULL, NULL);
468 /* Flush downstream, only for flushing seek */
469 if (flags & GST_SEEK_FLAG_FLUSH)
470 gst_pad_push_event (render->srcpad, gst_event_new_flush_start ());
472 /* Mark subtitle as flushing, unblocks chains */
473 GST_ASS_RENDER_LOCK (render);
474 render->subtitle_flushing = TRUE;
475 render->video_flushing = TRUE;
476 gst_ass_render_pop_text (render);
477 GST_ASS_RENDER_UNLOCK (render);
479 /* Seek on each sink pad */
480 gst_event_ref (event);
481 ret = gst_pad_push_event (render->video_sinkpad, event);
483 ret = gst_pad_push_event (render->text_sinkpad, event);
485 gst_event_unref (event);
490 if (render->track_init_ok) {
491 gst_event_ref (event);
492 ret = gst_pad_push_event (render->video_sinkpad, event);
493 gst_pad_push_event (render->text_sinkpad, event);
495 ret = gst_pad_push_event (render->video_sinkpad, event);
504 * gst_ass_render_add_feature_and_intersect:
506 * Creates a new #GstCaps containing the (given caps +
507 * given caps feature) + (given caps intersected by the
510 * Returns: the new #GstCaps
513 gst_ass_render_add_feature_and_intersect (GstCaps * caps,
514 const gchar * feature, GstCaps * filter)
519 new_caps = gst_caps_copy (caps);
521 caps_size = gst_caps_get_size (new_caps);
522 for (i = 0; i < caps_size; i++) {
523 GstCapsFeatures *features = gst_caps_get_features (new_caps, i);
524 if (!gst_caps_features_is_any (features)) {
525 gst_caps_features_add (features, feature);
529 gst_caps_append (new_caps, gst_caps_intersect_full (caps,
530 filter, GST_CAPS_INTERSECT_FIRST));
536 * gst_ass_render_intersect_by_feature:
538 * Creates a new #GstCaps based on the following filtering rule.
540 * For each individual caps contained in given caps, if the
541 * caps uses the given caps feature, keep a version of the caps
542 * with the feature and an another one without. Otherwise, intersect
543 * the caps with the given filter.
545 * Returns: the new #GstCaps
548 gst_ass_render_intersect_by_feature (GstCaps * caps,
549 const gchar * feature, GstCaps * filter)
554 new_caps = gst_caps_new_empty ();
556 caps_size = gst_caps_get_size (caps);
557 for (i = 0; i < caps_size; i++) {
558 GstStructure *caps_structure = gst_caps_get_structure (caps, i);
559 GstCapsFeatures *caps_features =
560 gst_caps_features_copy (gst_caps_get_features (caps, i));
561 GstCaps *filtered_caps;
562 GstCaps *simple_caps =
563 gst_caps_new_full (gst_structure_copy (caps_structure), NULL);
564 gst_caps_set_features (simple_caps, 0, caps_features);
566 if (gst_caps_features_contains (caps_features, feature)) {
567 gst_caps_append (new_caps, gst_caps_copy (simple_caps));
569 gst_caps_features_remove (caps_features, feature);
570 filtered_caps = gst_caps_ref (simple_caps);
572 filtered_caps = gst_caps_intersect_full (simple_caps, filter,
573 GST_CAPS_INTERSECT_FIRST);
576 gst_caps_unref (simple_caps);
577 gst_caps_append (new_caps, filtered_caps);
584 gst_ass_render_get_videosink_caps (GstPad * pad, GstCaps * filter)
586 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
587 GstPad *srcpad = render->srcpad;
588 GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
591 /* filter caps + composition feature + filter caps
592 * filtered by the software caps. */
593 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
594 assrender_filter = gst_ass_render_add_feature_and_intersect (filter,
595 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
596 gst_caps_unref (sw_caps);
598 GST_DEBUG_OBJECT (render, "assrender filter %" GST_PTR_FORMAT,
602 peer_caps = gst_pad_peer_query_caps (srcpad, assrender_filter);
604 if (assrender_filter)
605 gst_caps_unref (assrender_filter);
609 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
611 if (gst_caps_is_any (peer_caps)) {
613 /* if peer returns ANY caps, return filtered src pad template caps */
614 caps = gst_caps_copy (gst_pad_get_pad_template_caps (srcpad));
616 GstCaps *intersection = gst_caps_intersect_full (filter, caps,
617 GST_CAPS_INTERSECT_FIRST);
618 gst_caps_unref (caps);
624 /* duplicate caps which contains the composition into one version with
625 * the meta and one without. Filter the other caps by the software caps */
626 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
627 caps = gst_ass_render_intersect_by_feature (peer_caps,
628 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
629 gst_caps_unref (sw_caps);
632 gst_caps_unref (peer_caps);
635 /* no peer, our padtemplate is enough then */
636 caps = gst_pad_get_pad_template_caps (pad);
638 GstCaps *intersection;
641 gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
642 gst_caps_unref (caps);
647 GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
649 gst_object_unref (render);
655 gst_ass_render_get_src_caps (GstPad * pad, GstCaps * filter)
657 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
658 GstPad *sinkpad = render->video_sinkpad;
659 GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
662 /* duplicate filter caps which contains the composition into one version
663 * with the meta and one without. Filter the other caps by the software
665 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
667 gst_ass_render_intersect_by_feature (filter,
668 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
669 gst_caps_unref (sw_caps);
672 peer_caps = gst_pad_peer_query_caps (sinkpad, assrender_filter);
674 if (assrender_filter)
675 gst_caps_unref (assrender_filter);
679 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
681 if (gst_caps_is_any (peer_caps)) {
683 /* if peer returns ANY caps, return filtered sink pad template caps */
684 caps = gst_caps_copy (gst_pad_get_pad_template_caps (sinkpad));
686 GstCaps *intersection = gst_caps_intersect_full (filter, caps,
687 GST_CAPS_INTERSECT_FIRST);
688 gst_caps_unref (caps);
694 /* return upstream caps + composition feature + upstream caps
695 * filtered by the software caps. */
696 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
697 caps = gst_ass_render_add_feature_and_intersect (peer_caps,
698 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
699 gst_caps_unref (sw_caps);
702 gst_caps_unref (peer_caps);
705 /* no peer, our padtemplate is enough then */
706 caps = gst_pad_get_pad_template_caps (pad);
708 GstCaps *intersection;
711 gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
712 gst_caps_unref (caps);
717 GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
719 gst_object_unref (render);
725 blit_bgra_premultiplied (GstAssRender * render, ASS_Image * ass_image,
726 guint8 * data, gint width, gint height, gint stride, gint x_off, gint y_off)
729 gint alpha, r, g, b, k;
737 memset (data, 0, stride * height);
740 dst_x = ass_image->dst_x + x_off;
741 dst_y = ass_image->dst_y + y_off;
743 if (dst_y >= height || dst_x >= width)
746 alpha = 255 - (ass_image->color & 0xff);
747 r = ((ass_image->color) >> 24) & 0xff;
748 g = ((ass_image->color) >> 16) & 0xff;
749 b = ((ass_image->color) >> 8) & 0xff;
750 src = ass_image->bitmap;
751 dst = data + dst_y * stride + dst_x * 4;
753 w = MIN (ass_image->w, width - dst_x);
754 h = MIN (ass_image->h, height - dst_y);
755 src_skip = ass_image->stride - w;
756 dst_skip = stride - w * 4;
758 for (y = 0; y < h; y++) {
759 for (x = 0; x < w; x++) {
760 k = src[0] * alpha / 255;
763 dst[2] = (k * r) / 255;
764 dst[1] = (k * g) / 255;
765 dst[0] = (k * b) / 255;
767 dst[3] = k + (255 - k) * dst[3] / 255;
768 dst[2] = (k * r + (255 - k) * dst[2]) / 255;
769 dst[1] = (k * g + (255 - k) * dst[1]) / 255;
770 dst[0] = (k * b + (255 - k) * dst[0]) / 255;
780 ass_image = ass_image->next;
782 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter);
786 gst_ass_render_can_handle_caps (GstCaps * incaps)
788 static GstStaticCaps static_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
792 caps = gst_static_caps_get (&static_caps);
793 ret = gst_caps_is_subset (incaps, caps);
794 gst_caps_unref (caps);
800 gst_ass_render_setcaps_video (GstPad * pad, GstCaps * caps)
802 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
804 gboolean ret = FALSE;
805 gint par_n = 1, par_d = 1;
808 gboolean attach = FALSE;
810 if (!gst_video_info_from_caps (&info, caps))
815 ret = gst_pad_set_caps (render->srcpad, caps);
819 render->width = info.width;
820 render->height = info.height;
822 query = gst_query_new_allocation (caps, FALSE);
823 if (gst_pad_peer_query (render->srcpad, query)) {
824 if (gst_query_find_allocation_meta (query,
825 GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL))
828 gst_query_unref (query);
830 render->attach_compo_to_buffer = attach;
832 if (!attach && !gst_ass_render_can_handle_caps (caps))
833 goto unsupported_caps;
835 g_mutex_lock (&render->ass_mutex);
836 ass_set_frame_size (render->ass_renderer, render->width, render->height);
838 dar = (((gdouble) par_n) * ((gdouble) render->width))
839 / (((gdouble) par_d) * ((gdouble) render->height));
840 #if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
841 ass_set_aspect_ratio (render->ass_renderer, dar);
843 ass_set_aspect_ratio (render->ass_renderer,
844 dar, ((gdouble) render->width) / ((gdouble) render->height));
846 ass_set_font_scale (render->ass_renderer, 1.0);
847 ass_set_hinting (render->ass_renderer, ASS_HINTING_LIGHT);
849 #if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
850 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif");
851 ass_set_fonts (render->ass_renderer, NULL, "Sans");
853 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif", 1, NULL, 1);
854 ass_set_fonts (render->ass_renderer, NULL, "Sans", 1, NULL, 1);
856 ass_set_margins (render->ass_renderer, 0, 0, 0, 0);
857 ass_set_use_margins (render->ass_renderer, 0);
858 g_mutex_unlock (&render->ass_mutex);
860 render->renderer_init_ok = TRUE;
862 GST_DEBUG_OBJECT (render, "ass renderer setup complete");
865 gst_object_unref (render);
872 GST_ERROR_OBJECT (render, "Can't parse caps: %" GST_PTR_FORMAT, caps);
878 GST_ERROR_OBJECT (render, "Unsupported caps: %" GST_PTR_FORMAT, caps);
885 gst_ass_render_setcaps_text (GstPad * pad, GstCaps * caps)
887 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
888 GstStructure *structure;
892 gboolean ret = FALSE;
894 structure = gst_caps_get_structure (caps, 0);
896 GST_DEBUG_OBJECT (render, "text pad linked with caps: %" GST_PTR_FORMAT,
899 value = gst_structure_get_value (structure, "codec_data");
901 g_mutex_lock (&render->ass_mutex);
903 priv = gst_value_get_buffer (value);
904 g_return_val_if_fail (priv != NULL, FALSE);
906 gst_buffer_map (priv, &map, GST_MAP_READ);
908 if (!render->ass_track)
909 render->ass_track = ass_new_track (render->ass_library);
911 ass_process_codec_private (render->ass_track, (char *) map.data, map.size);
913 gst_buffer_unmap (priv, &map);
915 GST_DEBUG_OBJECT (render, "ass track created");
917 render->track_init_ok = TRUE;
920 } else if (!render->ass_track) {
921 render->ass_track = ass_new_track (render->ass_library);
923 render->track_init_ok = TRUE;
927 g_mutex_unlock (&render->ass_mutex);
929 gst_object_unref (render);
936 gst_ass_render_process_text (GstAssRender * render, GstBuffer * buffer,
937 GstClockTime running_time, GstClockTime duration)
940 gdouble pts_start, pts_end;
942 pts_start = running_time;
943 pts_start /= GST_MSECOND;
945 pts_end /= GST_MSECOND;
947 GST_DEBUG_OBJECT (render,
948 "Processing subtitles with running time %" GST_TIME_FORMAT
949 " and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time),
950 GST_TIME_ARGS (duration));
952 gst_buffer_map (buffer, &map, GST_MAP_READ);
954 g_mutex_lock (&render->ass_mutex);
955 ass_process_chunk (render->ass_track, (gchar *) map.data, map.size,
957 g_mutex_unlock (&render->ass_mutex);
959 gst_buffer_unmap (buffer, &map);
962 static GstVideoOverlayComposition *
963 gst_ass_render_composite_overlay (GstAssRender * render, ASS_Image * images)
965 GstVideoOverlayComposition *composition;
966 GstVideoOverlayRectangle *rectangle;
982 /* find bounding box of all images, to limit the overlay rectangle size */
983 for (image = images; image; image = image->next) {
984 if (min_x > image->dst_x)
985 min_x = image->dst_x;
986 if (min_y > image->dst_y)
987 min_y = image->dst_y;
988 if (max_x < image->dst_x + image->w)
989 max_x = image->dst_x + image->w;
990 if (max_y < image->dst_y + image->h)
991 max_y = image->dst_y + image->h;
994 width = MIN (max_x - min_x, render->width);
995 height = MIN (max_y - min_y, render->height);
997 GST_DEBUG_OBJECT (render, "render overlay rectangle %dx%d%+d%+d",
998 width, height, min_x, min_y);
1000 buffer = gst_buffer_new_and_alloc (4 * width * height);
1002 GST_ERROR_OBJECT (render, "Failed to allocate overlay buffer");
1006 vmeta = gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE,
1007 GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB, width, height);
1009 if (!gst_video_meta_map (vmeta, 0, &map, &data, &stride, GST_MAP_READWRITE)) {
1010 GST_ERROR_OBJECT (render, "Failed to map overlay buffer");
1011 gst_buffer_unref (buffer);
1015 blit_bgra_premultiplied (render, images, data, width, height, stride,
1017 gst_video_meta_unmap (vmeta, 0, &map);
1019 rectangle = gst_video_overlay_rectangle_new_raw (buffer, min_x, min_y,
1020 width, height, GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
1022 gst_buffer_unref (buffer);
1024 composition = gst_video_overlay_composition_new (rectangle);
1025 gst_video_overlay_rectangle_unref (rectangle);
1030 static GstFlowReturn
1031 gst_ass_render_chain_video (GstPad * pad, GstObject * parent,
1034 GstAssRender *render = GST_ASS_RENDER (parent);
1035 GstFlowReturn ret = GST_FLOW_OK;
1036 gboolean in_seg = FALSE;
1037 guint64 start, stop, clip_start = 0, clip_stop = 0;
1038 ASS_Image *ass_image;
1040 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1041 goto missing_timestamp;
1043 /* ignore buffers that are outside of the current segment */
1044 start = GST_BUFFER_TIMESTAMP (buffer);
1046 if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
1047 stop = GST_CLOCK_TIME_NONE;
1049 stop = start + GST_BUFFER_DURATION (buffer);
1052 /* segment_clip() will adjust start unconditionally to segment_start if
1053 * no stop time is provided, so handle this ourselves */
1054 if (stop == GST_CLOCK_TIME_NONE && start < render->video_segment.start)
1055 goto out_of_segment;
1058 gst_segment_clip (&render->video_segment, GST_FORMAT_TIME, start, stop,
1059 &clip_start, &clip_stop);
1062 goto out_of_segment;
1064 /* if the buffer is only partially in the segment, fix up stamps */
1065 if (clip_start != start || (stop != -1 && clip_stop != stop)) {
1066 GST_DEBUG_OBJECT (render, "clipping buffer timestamp/duration to segment");
1067 buffer = gst_buffer_make_writable (buffer);
1068 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1070 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1073 /* now, after we've done the clipping, fix up end time if there's no
1074 * duration (we only use those estimated values internally though, we
1075 * don't want to set bogus values on the buffer itself) */
1077 if (render->info.fps_n && render->info.fps_d) {
1078 GST_DEBUG_OBJECT (render, "estimating duration based on framerate");
1080 start + gst_util_uint64_scale_int (GST_SECOND, render->info.fps_d,
1081 render->info.fps_n);
1083 GST_WARNING_OBJECT (render, "no duration, assuming minimal duration");
1084 stop = start + 1; /* we need to assume some interval */
1090 GST_ASS_RENDER_LOCK (render);
1092 if (render->video_flushing)
1095 if (render->video_eos)
1098 if (render->renderer_init_ok && render->track_init_ok && render->enable) {
1099 /* Text pad linked, check if we have a text buffer queued */
1100 if (render->subtitle_pending) {
1101 GstClockTime text_start = GST_CLOCK_TIME_NONE;
1102 GstClockTime text_end = GST_CLOCK_TIME_NONE;
1103 GstClockTime text_running_time = GST_CLOCK_TIME_NONE;
1104 GstClockTime text_running_time_end = GST_CLOCK_TIME_NONE;
1105 GstClockTime vid_running_time, vid_running_time_end;
1109 /* if the text buffer isn't stamped right, pop it off the
1110 * queue and display it for the current video frame only */
1111 if (!GST_BUFFER_TIMESTAMP_IS_VALID (render->subtitle_pending) ||
1112 !GST_BUFFER_DURATION_IS_VALID (render->subtitle_pending)) {
1113 GST_WARNING_OBJECT (render,
1114 "Got text buffer with invalid timestamp or duration");
1115 gst_ass_render_pop_text (render);
1116 GST_ASS_RENDER_UNLOCK (render);
1117 goto wait_for_text_buf;
1120 text_start = GST_BUFFER_TIMESTAMP (render->subtitle_pending);
1121 text_end = text_start + GST_BUFFER_DURATION (render->subtitle_pending);
1124 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1126 vid_running_time_end =
1127 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1130 /* If timestamp and duration are valid */
1132 gst_segment_to_running_time (&render->video_segment,
1133 GST_FORMAT_TIME, text_start);
1134 text_running_time_end =
1135 gst_segment_to_running_time (&render->video_segment,
1136 GST_FORMAT_TIME, text_end);
1138 GST_LOG_OBJECT (render, "T: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1139 GST_TIME_ARGS (text_running_time),
1140 GST_TIME_ARGS (text_running_time_end));
1141 GST_LOG_OBJECT (render, "V: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1142 GST_TIME_ARGS (vid_running_time),
1143 GST_TIME_ARGS (vid_running_time_end));
1146 if (text_running_time_end <= vid_running_time) {
1147 GST_DEBUG_OBJECT (render, "text buffer too old, popping");
1148 gst_ass_render_pop_text (render);
1149 GST_ASS_RENDER_UNLOCK (render);
1150 goto wait_for_text_buf;
1153 if (render->need_process) {
1154 GST_DEBUG_OBJECT (render, "process text buffer");
1155 gst_ass_render_process_text (render, render->subtitle_pending,
1156 text_running_time, text_running_time_end - text_running_time);
1157 render->need_process = FALSE;
1160 GST_ASS_RENDER_UNLOCK (render);
1162 /* libass needs timestamps in ms */
1163 timestamp = vid_running_time / GST_MSECOND;
1165 g_mutex_lock (&render->ass_mutex);
1166 ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
1167 timestamp, &changed);
1168 g_mutex_unlock (&render->ass_mutex);
1170 if ((!ass_image || changed) && render->composition) {
1171 GST_DEBUG_OBJECT (render, "release overlay (changed %d)", changed);
1172 gst_video_overlay_composition_unref (render->composition);
1173 render->composition = NULL;
1176 if (ass_image != NULL) {
1177 if (!render->composition)
1178 render->composition = gst_ass_render_composite_overlay (render,
1181 if (render->composition) {
1182 buffer = gst_buffer_make_writable (buffer);
1183 if (render->attach_compo_to_buffer) {
1184 gst_buffer_add_video_overlay_composition_meta (buffer,
1185 render->composition);
1187 GstVideoFrame frame;
1189 gst_video_frame_map (&frame, &render->info, buffer, GST_MAP_WRITE);
1190 gst_video_overlay_composition_blend (render->composition, &frame);
1191 gst_video_frame_unmap (&frame);
1195 GST_DEBUG_OBJECT (render, "nothing to render right now");
1198 /* Push the video frame */
1199 ret = gst_pad_push (render->srcpad, buffer);
1201 if (text_running_time_end <= vid_running_time_end) {
1202 GST_ASS_RENDER_LOCK (render);
1203 gst_ass_render_pop_text (render);
1204 GST_ASS_RENDER_UNLOCK (render);
1207 gboolean wait_for_text_buf = TRUE;
1209 if (render->subtitle_eos)
1210 wait_for_text_buf = FALSE;
1212 if (!render->wait_text)
1213 wait_for_text_buf = FALSE;
1215 /* Text pad linked, but no text buffer available - what now? */
1216 if (render->subtitle_segment.format == GST_FORMAT_TIME) {
1217 GstClockTime text_start_running_time, text_last_stop_running_time;
1218 GstClockTime vid_running_time;
1221 gst_segment_to_running_time (&render->video_segment,
1222 GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer));
1223 text_start_running_time =
1224 gst_segment_to_running_time (&render->subtitle_segment,
1225 GST_FORMAT_TIME, render->subtitle_segment.start);
1226 text_last_stop_running_time =
1227 gst_segment_to_running_time (&render->subtitle_segment,
1228 GST_FORMAT_TIME, render->subtitle_segment.position);
1230 if ((GST_CLOCK_TIME_IS_VALID (text_start_running_time) &&
1231 vid_running_time < text_start_running_time) ||
1232 (GST_CLOCK_TIME_IS_VALID (text_last_stop_running_time) &&
1233 vid_running_time < text_last_stop_running_time)) {
1234 wait_for_text_buf = FALSE;
1238 if (wait_for_text_buf) {
1239 GST_DEBUG_OBJECT (render, "no text buffer, need to wait for one");
1240 GST_ASS_RENDER_WAIT (render);
1241 GST_DEBUG_OBJECT (render, "resuming");
1242 GST_ASS_RENDER_UNLOCK (render);
1243 goto wait_for_text_buf;
1245 GST_ASS_RENDER_UNLOCK (render);
1246 GST_LOG_OBJECT (render, "no need to wait for a text buffer");
1247 ret = gst_pad_push (render->srcpad, buffer);
1251 GST_LOG_OBJECT (render, "rendering disabled, doing buffer passthrough");
1253 GST_ASS_RENDER_UNLOCK (render);
1254 ret = gst_pad_push (render->srcpad, buffer);
1258 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p ret=%d", buffer, ret);
1260 /* Update last_stop */
1261 render->video_segment.position = clip_start;
1267 GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
1268 gst_buffer_unref (buffer);
1273 GST_ASS_RENDER_UNLOCK (render);
1274 GST_DEBUG_OBJECT (render, "flushing, discarding buffer");
1275 gst_buffer_unref (buffer);
1276 return GST_FLOW_FLUSHING;
1280 GST_ASS_RENDER_UNLOCK (render);
1281 GST_DEBUG_OBJECT (render, "eos, discarding buffer");
1282 gst_buffer_unref (buffer);
1283 return GST_FLOW_EOS;
1287 GST_DEBUG_OBJECT (render, "buffer out of segment, discarding");
1288 gst_buffer_unref (buffer);
1293 static GstFlowReturn
1294 gst_ass_render_chain_text (GstPad * pad, GstObject * parent, GstBuffer * buffer)
1296 GstFlowReturn ret = GST_FLOW_OK;
1297 GstAssRender *render = GST_ASS_RENDER (parent);
1298 gboolean in_seg = FALSE;
1299 guint64 clip_start = 0, clip_stop = 0;
1301 GST_DEBUG_OBJECT (render, "entering chain for buffer %p", buffer);
1303 GST_ASS_RENDER_LOCK (render);
1305 if (render->subtitle_flushing) {
1306 GST_ASS_RENDER_UNLOCK (render);
1307 ret = GST_FLOW_FLUSHING;
1308 GST_LOG_OBJECT (render, "text flushing");
1312 if (render->subtitle_eos) {
1313 GST_ASS_RENDER_UNLOCK (render);
1315 GST_LOG_OBJECT (render, "text EOS");
1319 if (G_LIKELY (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))) {
1322 if (G_LIKELY (GST_BUFFER_DURATION_IS_VALID (buffer)))
1323 stop = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
1325 stop = GST_CLOCK_TIME_NONE;
1327 in_seg = gst_segment_clip (&render->subtitle_segment, GST_FORMAT_TIME,
1328 GST_BUFFER_TIMESTAMP (buffer), stop, &clip_start, &clip_stop);
1334 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1335 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1336 else if (GST_BUFFER_DURATION_IS_VALID (buffer))
1337 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1339 if (render->subtitle_pending
1340 && (!GST_BUFFER_TIMESTAMP_IS_VALID (render->subtitle_pending)
1341 || !GST_BUFFER_DURATION_IS_VALID (render->subtitle_pending))) {
1342 gst_buffer_unref (render->subtitle_pending);
1343 render->subtitle_pending = NULL;
1344 GST_ASS_RENDER_BROADCAST (render);
1346 /* Wait for the previous buffer to go away */
1347 while (render->subtitle_pending != NULL) {
1348 GST_DEBUG ("Pad %s:%s has a buffer queued, waiting",
1349 GST_DEBUG_PAD_NAME (pad));
1350 GST_ASS_RENDER_WAIT (render);
1351 GST_DEBUG ("Pad %s:%s resuming", GST_DEBUG_PAD_NAME (pad));
1352 if (render->subtitle_flushing) {
1353 GST_ASS_RENDER_UNLOCK (render);
1354 ret = GST_FLOW_FLUSHING;
1360 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1361 render->subtitle_segment.position = clip_start;
1363 GST_DEBUG_OBJECT (render,
1364 "New buffer arrived for timestamp %" GST_TIME_FORMAT,
1365 GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
1366 render->subtitle_pending = gst_buffer_ref (buffer);
1367 render->need_process = TRUE;
1369 /* in case the video chain is waiting for a text buffer, wake it up */
1370 GST_ASS_RENDER_BROADCAST (render);
1373 GST_ASS_RENDER_UNLOCK (render);
1376 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p", buffer);
1378 gst_buffer_unref (buffer);
1383 gst_ass_render_handle_tags (GstAssRender * render, GstTagList * taglist)
1385 static const gchar *mimetypes[] = {
1386 "application/x-font-ttf",
1387 "application/x-font-otf",
1388 "application/x-truetype-font"
1390 static const gchar *extensions[] = {
1399 tag_size = gst_tag_list_get_tag_size (taglist, GST_TAG_ATTACHMENT);
1400 if (tag_size > 0 && render->embeddedfonts) {
1403 const GstStructure *structure;
1404 gboolean valid_mimetype, valid_extension;
1406 const gchar *filename;
1410 GST_DEBUG_OBJECT (render, "TAG event has attachments");
1412 for (index = 0; index < tag_size; index++) {
1413 if (!gst_tag_list_get_sample_index (taglist, GST_TAG_ATTACHMENT, index,
1416 buf = gst_sample_get_buffer (sample);
1417 structure = gst_sample_get_info (sample);
1418 if (!buf || !structure)
1421 valid_mimetype = FALSE;
1422 valid_extension = FALSE;
1424 for (j = 0; j < G_N_ELEMENTS (mimetypes); j++) {
1425 if (gst_structure_has_name (structure, mimetypes[j])) {
1426 valid_mimetype = TRUE;
1430 filename = gst_structure_get_string (structure, "filename");
1434 if (!valid_mimetype) {
1435 guint len = strlen (filename);
1436 const gchar *extension = filename + len - 4;
1437 for (j = 0; j < G_N_ELEMENTS (extensions); j++) {
1438 if (g_ascii_strcasecmp (extension, extensions[j]) == 0) {
1439 valid_extension = TRUE;
1445 if (valid_mimetype || valid_extension) {
1446 g_mutex_lock (&render->ass_mutex);
1447 gst_buffer_map (buf, &map, GST_MAP_READ);
1448 ass_add_font (render->ass_library, (gchar *) filename,
1449 (gchar *) map.data, map.size);
1450 gst_buffer_unmap (buf, &map);
1451 GST_DEBUG_OBJECT (render, "registered new font %s", filename);
1452 g_mutex_unlock (&render->ass_mutex);
1459 gst_ass_render_event_video (GstPad * pad, GstObject * parent, GstEvent * event)
1461 gboolean ret = FALSE;
1462 GstAssRender *render = GST_ASS_RENDER (parent);
1464 GST_DEBUG_OBJECT (pad, "received video event %" GST_PTR_FORMAT, event);
1466 switch (GST_EVENT_TYPE (event)) {
1467 case GST_EVENT_CAPS:
1471 gst_event_parse_caps (event, &caps);
1472 ret = gst_ass_render_setcaps_video (pad, caps);
1473 gst_event_unref (event);
1476 case GST_EVENT_SEGMENT:
1480 GST_DEBUG_OBJECT (render, "received new segment");
1482 gst_event_copy_segment (event, &segment);
1484 if (segment.format == GST_FORMAT_TIME) {
1485 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT now: %" GST_SEGMENT_FORMAT,
1486 &render->video_segment);
1488 render->video_segment = segment;
1490 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT after: %" GST_SEGMENT_FORMAT,
1491 &render->video_segment);
1492 ret = gst_pad_event_default (pad, parent, event);
1494 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1495 ("received non-TIME newsegment event on video input"));
1497 gst_event_unref (event);
1503 GstTagList *taglist = NULL;
1505 /* tag events may contain attachments which might be fonts */
1506 GST_DEBUG_OBJECT (render, "got TAG event");
1508 gst_event_parse_tag (event, &taglist);
1509 gst_ass_render_handle_tags (render, taglist);
1510 ret = gst_pad_event_default (pad, parent, event);
1514 GST_ASS_RENDER_LOCK (render);
1515 GST_INFO_OBJECT (render, "video EOS");
1516 render->video_eos = TRUE;
1517 GST_ASS_RENDER_UNLOCK (render);
1518 ret = gst_pad_event_default (pad, parent, event);
1520 case GST_EVENT_FLUSH_START:
1521 GST_ASS_RENDER_LOCK (render);
1522 GST_INFO_OBJECT (render, "video flush start");
1523 render->video_flushing = TRUE;
1524 GST_ASS_RENDER_BROADCAST (render);
1525 GST_ASS_RENDER_UNLOCK (render);
1526 ret = gst_pad_event_default (pad, parent, event);
1528 case GST_EVENT_FLUSH_STOP:
1529 GST_ASS_RENDER_LOCK (render);
1530 GST_INFO_OBJECT (render, "video flush stop");
1531 render->video_flushing = FALSE;
1532 render->video_eos = FALSE;
1533 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
1534 GST_ASS_RENDER_UNLOCK (render);
1535 ret = gst_pad_event_default (pad, parent, event);
1538 ret = gst_pad_event_default (pad, parent, event);
1546 gst_ass_render_query_video (GstPad * pad, GstObject * parent, GstQuery * query)
1548 gboolean res = FALSE;
1550 switch (GST_QUERY_TYPE (query)) {
1551 case GST_QUERY_CAPS:
1553 GstCaps *filter, *caps;
1555 gst_query_parse_caps (query, &filter);
1556 caps = gst_ass_render_get_videosink_caps (pad, filter);
1557 gst_query_set_caps_result (query, caps);
1558 gst_caps_unref (caps);
1563 res = gst_pad_query_default (pad, parent, query);
1571 gst_ass_render_event_text (GstPad * pad, GstObject * parent, GstEvent * event)
1574 gboolean ret = FALSE;
1575 GstAssRender *render = GST_ASS_RENDER (parent);
1577 GST_DEBUG_OBJECT (pad, "received text event %" GST_PTR_FORMAT, event);
1579 switch (GST_EVENT_TYPE (event)) {
1580 case GST_EVENT_CAPS:
1584 gst_event_parse_caps (event, &caps);
1585 ret = gst_ass_render_setcaps_text (pad, caps);
1586 gst_event_unref (event);
1589 case GST_EVENT_SEGMENT:
1593 GST_ASS_RENDER_LOCK (render);
1594 render->subtitle_eos = FALSE;
1595 GST_ASS_RENDER_UNLOCK (render);
1597 gst_event_copy_segment (event, &segment);
1599 GST_ASS_RENDER_LOCK (render);
1600 if (segment.format == GST_FORMAT_TIME) {
1601 GST_DEBUG_OBJECT (render, "TEXT SEGMENT now: %" GST_SEGMENT_FORMAT,
1602 &render->subtitle_segment);
1604 render->subtitle_segment = segment;
1606 GST_DEBUG_OBJECT (render,
1607 "TEXT SEGMENT after: %" GST_SEGMENT_FORMAT,
1608 &render->subtitle_segment);
1610 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1611 ("received non-TIME newsegment event on subtitle input"));
1614 gst_event_unref (event);
1617 /* wake up the video chain, it might be waiting for a text buffer or
1618 * a text segment update */
1619 GST_ASS_RENDER_BROADCAST (render);
1620 GST_ASS_RENDER_UNLOCK (render);
1623 case GST_EVENT_GAP:{
1624 GstClockTime start, duration;
1626 gst_event_parse_gap (event, &start, &duration);
1627 if (GST_CLOCK_TIME_IS_VALID (duration))
1629 /* we do not expect another buffer until after gap,
1630 * so that is our position now */
1631 GST_ASS_RENDER_LOCK (render);
1632 render->subtitle_segment.position = start;
1634 /* wake up the video chain, it might be waiting for a text buffer or
1635 * a text segment update */
1636 GST_ASS_RENDER_BROADCAST (render);
1637 GST_ASS_RENDER_UNLOCK (render);
1640 case GST_EVENT_FLUSH_STOP:
1641 GST_ASS_RENDER_LOCK (render);
1642 GST_INFO_OBJECT (render, "text flush stop");
1643 render->subtitle_flushing = FALSE;
1644 render->subtitle_eos = FALSE;
1645 gst_ass_render_pop_text (render);
1646 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
1647 GST_ASS_RENDER_UNLOCK (render);
1648 gst_event_unref (event);
1651 case GST_EVENT_FLUSH_START:
1652 GST_DEBUG_OBJECT (render, "text flush start");
1653 g_mutex_lock (&render->ass_mutex);
1654 if (render->ass_track) {
1655 /* delete any events on the ass_track */
1656 for (i = 0; i < render->ass_track->n_events; i++) {
1657 GST_DEBUG_OBJECT (render, "deleted event with eid %i", i);
1658 ass_free_event (render->ass_track, i);
1660 render->ass_track->n_events = 0;
1661 GST_DEBUG_OBJECT (render, "done flushing");
1663 g_mutex_unlock (&render->ass_mutex);
1664 GST_ASS_RENDER_LOCK (render);
1665 render->subtitle_flushing = TRUE;
1666 GST_ASS_RENDER_BROADCAST (render);
1667 GST_ASS_RENDER_UNLOCK (render);
1668 gst_event_unref (event);
1672 GST_ASS_RENDER_LOCK (render);
1673 render->subtitle_eos = TRUE;
1674 GST_INFO_OBJECT (render, "text EOS");
1675 /* wake up the video chain, it might be waiting for a text buffer or
1676 * a text segment update */
1677 GST_ASS_RENDER_BROADCAST (render);
1678 GST_ASS_RENDER_UNLOCK (render);
1679 gst_event_unref (event);
1684 GstTagList *taglist = NULL;
1686 /* tag events may contain attachments which might be fonts */
1687 GST_DEBUG_OBJECT (render, "got TAG event");
1689 gst_event_parse_tag (event, &taglist);
1690 gst_ass_render_handle_tags (render, taglist);
1691 ret = gst_pad_event_default (pad, parent, event);
1695 ret = gst_pad_event_default (pad, parent, event);
1703 plugin_init (GstPlugin * plugin)
1705 GST_DEBUG_CATEGORY_INIT (gst_ass_render_debug, "assrender",
1706 0, "ASS/SSA subtitle renderer");
1707 GST_DEBUG_CATEGORY_INIT (gst_ass_render_lib_debug, "assrender_library",
1708 0, "ASS/SSA subtitle renderer library");
1710 return gst_element_register (plugin, "assrender",
1711 GST_RANK_PRIMARY, GST_TYPE_ASS_RENDER);
1714 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1717 "ASS/SSA subtitle renderer",
1718 plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)