2 * Copyright (c) 2008 Benjamin Schmitz <vortex@wolpzone.de>
3 * Copyright (c) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
22 * SECTION:element-assrender
24 * Renders timestamped SSA/ASS subtitles on top of a video stream.
27 * <title>Example launch line</title>
29 * gst-launch -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mp3parse ! mad ! audioconvert ! autoaudiosink d. ! queue ! ffdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink
30 * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
38 #include <gst/video/gstvideometa.h>
40 #include "gstassrender.h"
44 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_debug);
45 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_lib_debug);
46 #define GST_CAT_DEFAULT gst_ass_render_debug
48 /* Filter signals and props */
62 /* FIXME: video-blend.c doesn't support formats with more than 8 bit per
63 * component (which get unpacked into ARGB64 or AYUV64) yet, such as:
64 * v210, v216, UYVP, GRAY16_LE, GRAY16_BE */
65 #define FORMATS "{ BGRx, RGBx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, \
66 I420, YV12, AYUV, YUY2, UYVY, v308, Y41B, Y42B, Y444, \
67 NV12, NV21, A420, YUV9, YVU9, IYU1, GRAY8 }"
69 #define ASSRENDER_CAPS GST_VIDEO_CAPS_MAKE(FORMATS)
71 #define ASSRENDER_ALL_CAPS ASSRENDER_CAPS ";" \
72 GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
74 static GstStaticCaps sw_template_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
76 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
79 GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
82 static GstStaticPadTemplate video_sink_factory =
83 GST_STATIC_PAD_TEMPLATE ("video_sink",
86 GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
89 static GstStaticPadTemplate text_sink_factory =
90 GST_STATIC_PAD_TEMPLATE ("text_sink",
93 GST_STATIC_CAPS ("application/x-ass; application/x-ssa")
96 #define GST_ASS_RENDER_GET_LOCK(ass) (&GST_ASS_RENDER (ass)->lock)
97 #define GST_ASS_RENDER_GET_COND(ass) (&GST_ASS_RENDER (ass)->cond)
98 #define GST_ASS_RENDER_LOCK(ass) (g_mutex_lock (GST_ASS_RENDER_GET_LOCK (ass)))
99 #define GST_ASS_RENDER_UNLOCK(ass) (g_mutex_unlock (GST_ASS_RENDER_GET_LOCK (ass)))
100 #define GST_ASS_RENDER_WAIT(ass) (g_cond_wait (GST_ASS_RENDER_GET_COND (ass), GST_ASS_RENDER_GET_LOCK (ass)))
101 #define GST_ASS_RENDER_SIGNAL(ass) (g_cond_signal (GST_ASS_RENDER_GET_COND (ass)))
102 #define GST_ASS_RENDER_BROADCAST(ass)(g_cond_broadcast (GST_ASS_RENDER_GET_COND (ass)))
104 static void gst_ass_render_set_property (GObject * object, guint prop_id,
105 const GValue * value, GParamSpec * pspec);
106 static void gst_ass_render_get_property (GObject * object, guint prop_id,
107 GValue * value, GParamSpec * pspec);
109 static void gst_ass_render_finalize (GObject * object);
111 static GstStateChangeReturn gst_ass_render_change_state (GstElement * element,
112 GstStateChange transition);
114 #define gst_ass_render_parent_class parent_class
115 G_DEFINE_TYPE (GstAssRender, gst_ass_render, GST_TYPE_ELEMENT);
117 static GstCaps *gst_ass_render_get_videosink_caps (GstPad * pad,
118 GstAssRender * render, GstCaps * filter);
119 static GstCaps *gst_ass_render_get_src_caps (GstPad * pad,
120 GstAssRender * render, GstCaps * filter);
122 static gboolean gst_ass_render_setcaps_video (GstPad * pad,
123 GstAssRender * render, GstCaps * caps);
124 static gboolean gst_ass_render_setcaps_text (GstPad * pad,
125 GstAssRender * render, GstCaps * caps);
127 static GstFlowReturn gst_ass_render_chain_video (GstPad * pad,
128 GstObject * parent, GstBuffer * buf);
129 static GstFlowReturn gst_ass_render_chain_text (GstPad * pad,
130 GstObject * parent, GstBuffer * buf);
132 static gboolean gst_ass_render_event_video (GstPad * pad, GstObject * parent,
134 static gboolean gst_ass_render_event_text (GstPad * pad, GstObject * parent,
136 static gboolean gst_ass_render_event_src (GstPad * pad, GstObject * parent,
139 static gboolean gst_ass_render_query_video (GstPad * pad, GstObject * parent,
141 static gboolean gst_ass_render_query_src (GstPad * pad, GstObject * parent,
144 /* initialize the plugin's class */
146 gst_ass_render_class_init (GstAssRenderClass * klass)
148 GObjectClass *gobject_class = (GObjectClass *) klass;
149 GstElementClass *gstelement_class = (GstElementClass *) klass;
151 gobject_class->set_property = gst_ass_render_set_property;
152 gobject_class->get_property = gst_ass_render_get_property;
153 gobject_class->finalize = gst_ass_render_finalize;
155 g_object_class_install_property (gobject_class, PROP_ENABLE,
156 g_param_spec_boolean ("enable", "Enable",
157 "Enable rendering of subtitles", TRUE,
158 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
160 g_object_class_install_property (gobject_class, PROP_EMBEDDEDFONTS,
161 g_param_spec_boolean ("embeddedfonts", "Embedded Fonts",
162 "Extract and use fonts embedded in the stream", TRUE,
163 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
165 g_object_class_install_property (gobject_class, PROP_WAIT_TEXT,
166 g_param_spec_boolean ("wait-text", "Wait Text",
167 "Whether to wait for subtitles", TRUE,
168 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
170 gstelement_class->change_state =
171 GST_DEBUG_FUNCPTR (gst_ass_render_change_state);
173 gst_element_class_add_pad_template (gstelement_class,
174 gst_static_pad_template_get (&src_factory));
175 gst_element_class_add_pad_template (gstelement_class,
176 gst_static_pad_template_get (&video_sink_factory));
177 gst_element_class_add_pad_template (gstelement_class,
178 gst_static_pad_template_get (&text_sink_factory));
180 gst_element_class_set_static_metadata (gstelement_class, "ASS/SSA Render",
181 "Mixer/Video/Overlay/Subtitle",
182 "Renders ASS/SSA subtitles with libass",
183 "Benjamin Schmitz <vortex@wolpzone.de>, "
184 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
188 _libass_message_cb (gint level, const gchar * fmt, va_list args,
191 gchar *message = g_strdup_vprintf (fmt, args);
194 GST_CAT_ERROR_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
196 GST_CAT_WARNING_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
198 GST_CAT_INFO_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
200 GST_CAT_DEBUG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
202 GST_CAT_LOG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
208 gst_ass_render_init (GstAssRender * render)
210 GST_DEBUG_OBJECT (render, "init");
212 render->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
213 render->video_sinkpad =
214 gst_pad_new_from_static_template (&video_sink_factory, "video_sink");
215 render->text_sinkpad =
216 gst_pad_new_from_static_template (&text_sink_factory, "text_sink");
218 gst_pad_set_chain_function (render->video_sinkpad,
219 GST_DEBUG_FUNCPTR (gst_ass_render_chain_video));
220 gst_pad_set_chain_function (render->text_sinkpad,
221 GST_DEBUG_FUNCPTR (gst_ass_render_chain_text));
223 gst_pad_set_event_function (render->video_sinkpad,
224 GST_DEBUG_FUNCPTR (gst_ass_render_event_video));
225 gst_pad_set_event_function (render->text_sinkpad,
226 GST_DEBUG_FUNCPTR (gst_ass_render_event_text));
227 gst_pad_set_event_function (render->srcpad,
228 GST_DEBUG_FUNCPTR (gst_ass_render_event_src));
230 gst_pad_set_query_function (render->srcpad,
231 GST_DEBUG_FUNCPTR (gst_ass_render_query_src));
232 gst_pad_set_query_function (render->video_sinkpad,
233 GST_DEBUG_FUNCPTR (gst_ass_render_query_video));
235 GST_PAD_SET_PROXY_ALLOCATION (render->video_sinkpad);
237 gst_element_add_pad (GST_ELEMENT (render), render->srcpad);
238 gst_element_add_pad (GST_ELEMENT (render), render->video_sinkpad);
239 gst_element_add_pad (GST_ELEMENT (render), render->text_sinkpad);
241 gst_video_info_init (&render->info);
243 g_mutex_init (&render->lock);
244 g_cond_init (&render->cond);
246 render->renderer_init_ok = FALSE;
247 render->track_init_ok = FALSE;
248 render->enable = TRUE;
249 render->embeddedfonts = TRUE;
250 render->wait_text = FALSE;
252 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
253 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
255 g_mutex_init (&render->ass_mutex);
256 render->ass_library = ass_library_init ();
257 ass_set_message_cb (render->ass_library, _libass_message_cb, render);
258 ass_set_extract_fonts (render->ass_library, 1);
260 render->ass_renderer = ass_renderer_init (render->ass_library);
261 if (!render->ass_renderer) {
262 GST_WARNING_OBJECT (render, "cannot create renderer instance");
263 g_assert_not_reached ();
266 render->ass_track = NULL;
268 GST_DEBUG_OBJECT (render, "init complete");
272 gst_ass_render_finalize (GObject * object)
274 GstAssRender *render = GST_ASS_RENDER (object);
276 g_mutex_clear (&render->lock);
277 g_cond_clear (&render->cond);
279 if (render->ass_track) {
280 ass_free_track (render->ass_track);
283 if (render->ass_renderer) {
284 ass_renderer_done (render->ass_renderer);
287 if (render->ass_library) {
288 ass_library_done (render->ass_library);
291 g_mutex_clear (&render->ass_mutex);
293 G_OBJECT_CLASS (parent_class)->finalize (object);
297 gst_ass_render_set_property (GObject * object, guint prop_id,
298 const GValue * value, GParamSpec * pspec)
300 GstAssRender *render = GST_ASS_RENDER (object);
302 GST_ASS_RENDER_LOCK (render);
305 render->enable = g_value_get_boolean (value);
307 case PROP_EMBEDDEDFONTS:
308 render->embeddedfonts = g_value_get_boolean (value);
309 g_mutex_lock (&render->ass_mutex);
310 ass_set_extract_fonts (render->ass_library, render->embeddedfonts);
311 g_mutex_unlock (&render->ass_mutex);
314 render->wait_text = g_value_get_boolean (value);
317 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
320 GST_ASS_RENDER_UNLOCK (render);
324 gst_ass_render_get_property (GObject * object, guint prop_id,
325 GValue * value, GParamSpec * pspec)
327 GstAssRender *render = GST_ASS_RENDER (object);
329 GST_ASS_RENDER_LOCK (render);
332 g_value_set_boolean (value, render->enable);
334 case PROP_EMBEDDEDFONTS:
335 g_value_set_boolean (value, render->embeddedfonts);
338 g_value_set_boolean (value, render->wait_text);
341 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
344 GST_ASS_RENDER_UNLOCK (render);
347 /* Called with lock held */
349 gst_ass_render_pop_text (GstAssRender * render)
351 if (render->subtitle_pending) {
352 GST_DEBUG_OBJECT (render, "releasing text buffer %p",
353 render->subtitle_pending);
354 gst_buffer_unref (render->subtitle_pending);
355 render->subtitle_pending = NULL;
358 /* Let the text task know we used that buffer */
359 GST_ASS_RENDER_BROADCAST (render);
362 static GstStateChangeReturn
363 gst_ass_render_change_state (GstElement * element, GstStateChange transition)
365 GstAssRender *render = GST_ASS_RENDER (element);
366 GstStateChangeReturn ret;
368 switch (transition) {
369 case GST_STATE_CHANGE_PAUSED_TO_READY:
370 GST_ASS_RENDER_LOCK (render);
371 render->subtitle_flushing = TRUE;
372 render->video_flushing = TRUE;
373 gst_ass_render_pop_text (render);
374 GST_ASS_RENDER_UNLOCK (render);
380 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
381 if (ret == GST_STATE_CHANGE_FAILURE)
384 switch (transition) {
385 case GST_STATE_CHANGE_PAUSED_TO_READY:
386 g_mutex_lock (&render->ass_mutex);
387 if (render->ass_track)
388 ass_free_track (render->ass_track);
389 render->ass_track = NULL;
390 if (render->composition) {
391 gst_video_overlay_composition_unref (render->composition);
392 render->composition = NULL;
394 render->track_init_ok = FALSE;
395 render->renderer_init_ok = FALSE;
396 g_mutex_unlock (&render->ass_mutex);
398 case GST_STATE_CHANGE_READY_TO_PAUSED:
399 GST_ASS_RENDER_LOCK (render);
400 render->subtitle_flushing = FALSE;
401 render->video_flushing = FALSE;
402 render->video_eos = FALSE;
403 render->subtitle_eos = FALSE;
404 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
405 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
406 GST_ASS_RENDER_UNLOCK (render);
417 gst_ass_render_query_src (GstPad * pad, GstObject * parent, GstQuery * query)
419 gboolean res = FALSE;
421 switch (GST_QUERY_TYPE (query)) {
424 GstCaps *filter, *caps;
426 gst_query_parse_caps (query, &filter);
427 caps = gst_ass_render_get_src_caps (pad, (GstAssRender *) parent, filter);
428 gst_query_set_caps_result (query, caps);
429 gst_caps_unref (caps);
434 res = gst_pad_query_default (pad, parent, query);
442 gst_ass_render_event_src (GstPad * pad, GstObject * parent, GstEvent * event)
444 GstAssRender *render = GST_ASS_RENDER (parent);
445 gboolean ret = FALSE;
447 GST_DEBUG_OBJECT (render, "received src event %" GST_PTR_FORMAT, event);
449 switch (GST_EVENT_TYPE (event)) {
450 case GST_EVENT_SEEK:{
453 if (!render->track_init_ok) {
454 GST_DEBUG_OBJECT (render, "seek received, pushing upstream");
455 ret = gst_pad_push_event (render->video_sinkpad, event);
459 GST_DEBUG_OBJECT (render, "seek received, driving from here");
461 gst_event_parse_seek (event, NULL, NULL, &flags, NULL, NULL, NULL, NULL);
463 /* Flush downstream, only for flushing seek */
464 if (flags & GST_SEEK_FLAG_FLUSH)
465 gst_pad_push_event (render->srcpad, gst_event_new_flush_start ());
467 /* Mark subtitle as flushing, unblocks chains */
468 GST_ASS_RENDER_LOCK (render);
469 render->subtitle_flushing = TRUE;
470 render->video_flushing = TRUE;
471 gst_ass_render_pop_text (render);
472 GST_ASS_RENDER_UNLOCK (render);
474 /* Seek on each sink pad */
475 gst_event_ref (event);
476 ret = gst_pad_push_event (render->video_sinkpad, event);
478 ret = gst_pad_push_event (render->text_sinkpad, event);
480 gst_event_unref (event);
485 if (render->track_init_ok) {
486 gst_event_ref (event);
487 ret = gst_pad_push_event (render->video_sinkpad, event);
488 gst_pad_push_event (render->text_sinkpad, event);
490 ret = gst_pad_push_event (render->video_sinkpad, event);
499 * gst_ass_render_add_feature_and_intersect:
501 * Creates a new #GstCaps containing the (given caps +
502 * given caps feature) + (given caps intersected by the
505 * Returns: the new #GstCaps
508 gst_ass_render_add_feature_and_intersect (GstCaps * caps,
509 const gchar * feature, GstCaps * filter)
514 new_caps = gst_caps_copy (caps);
516 caps_size = gst_caps_get_size (new_caps);
517 for (i = 0; i < caps_size; i++) {
518 GstCapsFeatures *features = gst_caps_get_features (new_caps, i);
519 if (!gst_caps_features_is_any (features)) {
520 gst_caps_features_add (features, feature);
524 gst_caps_append (new_caps, gst_caps_intersect_full (caps,
525 filter, GST_CAPS_INTERSECT_FIRST));
531 * gst_ass_render_intersect_by_feature:
533 * Creates a new #GstCaps based on the following filtering rule.
535 * For each individual caps contained in given caps, if the
536 * caps uses the given caps feature, keep a version of the caps
537 * with the feature and an another one without. Otherwise, intersect
538 * the caps with the given filter.
540 * Returns: the new #GstCaps
543 gst_ass_render_intersect_by_feature (GstCaps * caps,
544 const gchar * feature, GstCaps * filter)
549 new_caps = gst_caps_new_empty ();
551 caps_size = gst_caps_get_size (caps);
552 for (i = 0; i < caps_size; i++) {
553 GstStructure *caps_structure = gst_caps_get_structure (caps, i);
554 GstCapsFeatures *caps_features =
555 gst_caps_features_copy (gst_caps_get_features (caps, i));
556 GstCaps *filtered_caps;
557 GstCaps *simple_caps =
558 gst_caps_new_full (gst_structure_copy (caps_structure), NULL);
559 gst_caps_set_features (simple_caps, 0, caps_features);
561 if (gst_caps_features_contains (caps_features, feature)) {
562 gst_caps_append (new_caps, gst_caps_copy (simple_caps));
564 gst_caps_features_remove (caps_features, feature);
565 filtered_caps = gst_caps_ref (simple_caps);
567 filtered_caps = gst_caps_intersect_full (simple_caps, filter,
568 GST_CAPS_INTERSECT_FIRST);
571 gst_caps_unref (simple_caps);
572 gst_caps_append (new_caps, filtered_caps);
579 gst_ass_render_get_videosink_caps (GstPad * pad, GstAssRender * render,
582 GstPad *srcpad = render->srcpad;
583 GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
586 /* filter caps + composition feature + filter caps
587 * filtered by the software caps. */
588 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
589 assrender_filter = gst_ass_render_add_feature_and_intersect (filter,
590 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
591 gst_caps_unref (sw_caps);
593 GST_DEBUG_OBJECT (render, "assrender filter %" GST_PTR_FORMAT,
597 peer_caps = gst_pad_peer_query_caps (srcpad, assrender_filter);
599 if (assrender_filter)
600 gst_caps_unref (assrender_filter);
604 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
606 if (gst_caps_is_any (peer_caps)) {
608 /* if peer returns ANY caps, return filtered src pad template caps */
609 caps = gst_caps_copy (gst_pad_get_pad_template_caps (srcpad));
612 /* duplicate caps which contains the composition into one version with
613 * the meta and one without. Filter the other caps by the software caps */
614 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
615 caps = gst_ass_render_intersect_by_feature (peer_caps,
616 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
617 gst_caps_unref (sw_caps);
620 gst_caps_unref (peer_caps);
623 /* no peer, our padtemplate is enough then */
624 caps = gst_pad_get_pad_template_caps (pad);
628 GstCaps *intersection = gst_caps_intersect_full (filter, caps,
629 GST_CAPS_INTERSECT_FIRST);
630 gst_caps_unref (caps);
634 GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
640 gst_ass_render_get_src_caps (GstPad * pad, GstAssRender * render,
643 GstPad *sinkpad = render->video_sinkpad;
644 GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
647 /* duplicate filter caps which contains the composition into one version
648 * with the meta and one without. Filter the other caps by the software
650 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
652 gst_ass_render_intersect_by_feature (filter,
653 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
654 gst_caps_unref (sw_caps);
657 peer_caps = gst_pad_peer_query_caps (sinkpad, assrender_filter);
659 if (assrender_filter)
660 gst_caps_unref (assrender_filter);
664 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
666 if (gst_caps_is_any (peer_caps)) {
668 /* if peer returns ANY caps, return filtered sink pad template caps */
669 caps = gst_caps_copy (gst_pad_get_pad_template_caps (sinkpad));
673 /* return upstream caps + composition feature + upstream caps
674 * filtered by the software caps. */
675 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
676 caps = gst_ass_render_add_feature_and_intersect (peer_caps,
677 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
678 gst_caps_unref (sw_caps);
681 gst_caps_unref (peer_caps);
684 /* no peer, our padtemplate is enough then */
685 caps = gst_pad_get_pad_template_caps (pad);
689 GstCaps *intersection;
692 gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
693 gst_caps_unref (caps);
697 GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
703 blit_bgra_premultiplied (GstAssRender * render, ASS_Image * ass_image,
704 guint8 * data, gint width, gint height, gint stride, gint x_off, gint y_off)
707 gint alpha, r, g, b, k;
715 memset (data, 0, stride * height);
718 dst_x = ass_image->dst_x + x_off;
719 dst_y = ass_image->dst_y + y_off;
721 if (dst_y >= height || dst_x >= width)
724 alpha = 255 - (ass_image->color & 0xff);
725 r = ((ass_image->color) >> 24) & 0xff;
726 g = ((ass_image->color) >> 16) & 0xff;
727 b = ((ass_image->color) >> 8) & 0xff;
728 src = ass_image->bitmap;
729 dst = data + dst_y * stride + dst_x * 4;
731 w = MIN (ass_image->w, width - dst_x);
732 h = MIN (ass_image->h, height - dst_y);
733 src_skip = ass_image->stride - w;
734 dst_skip = stride - w * 4;
736 for (y = 0; y < h; y++) {
737 for (x = 0; x < w; x++) {
738 k = src[0] * alpha / 255;
741 dst[2] = (k * r) / 255;
742 dst[1] = (k * g) / 255;
743 dst[0] = (k * b) / 255;
745 dst[3] = k + (255 - k) * dst[3] / 255;
746 dst[2] = (k * r + (255 - k) * dst[2]) / 255;
747 dst[1] = (k * g + (255 - k) * dst[1]) / 255;
748 dst[0] = (k * b + (255 - k) * dst[0]) / 255;
758 ass_image = ass_image->next;
760 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter);
764 gst_ass_render_can_handle_caps (GstCaps * incaps)
766 static GstStaticCaps static_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
770 caps = gst_static_caps_get (&static_caps);
771 ret = gst_caps_is_subset (incaps, caps);
772 gst_caps_unref (caps);
778 gst_ass_render_setcaps_video (GstPad * pad, GstAssRender * render,
782 gboolean ret = FALSE;
784 gboolean attach = FALSE;
785 gboolean caps_has_meta = TRUE;
787 GstCaps *original_caps = caps;
789 if (!gst_video_info_from_caps (&info, caps))
795 /* Try to use the overlay meta if possible */
796 f = gst_caps_get_features (caps, 0);
798 /* if the caps doesn't have the overlay meta, we query if downstream
799 * accepts it before trying the version without the meta
800 * If upstream already is using the meta then we can only use it */
802 || !gst_caps_features_contains (f,
803 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION)) {
804 GstCaps *overlay_caps;
806 /* In this case we added the meta, but we can work without it
807 * so preserve the original caps so we can use it as a fallback */
808 overlay_caps = gst_caps_copy (caps);
810 f = gst_caps_get_features (overlay_caps, 0);
811 gst_caps_features_add (f,
812 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
814 ret = gst_pad_peer_query_accept_caps (render->srcpad, overlay_caps);
815 GST_DEBUG_OBJECT (render, "Downstream accepts the overlay meta: %d", ret);
817 gst_caps_unref (caps);
821 /* fallback to the original */
822 gst_caps_unref (overlay_caps);
823 caps_has_meta = FALSE;
827 GST_DEBUG_OBJECT (render, "Using caps %" GST_PTR_FORMAT, caps);
828 ret = gst_pad_set_caps (render->srcpad, caps);
829 gst_caps_unref (caps);
834 render->width = info.width;
835 render->height = info.height;
837 query = gst_query_new_allocation (caps, FALSE);
838 if (caps_has_meta && gst_pad_peer_query (render->srcpad, query)) {
839 if (gst_query_find_allocation_meta (query,
840 GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL))
843 gst_query_unref (query);
845 render->attach_compo_to_buffer = attach;
849 /* Some elements (fakesink) claim to accept the meta on caps but won't
850 put it in the allocation query result, this leads below
851 check to fail. Prevent this by removing the meta from caps */
852 caps = original_caps;
853 ret = gst_pad_set_caps (render->srcpad, caps);
857 if (!gst_ass_render_can_handle_caps (caps))
858 goto unsupported_caps;
861 g_mutex_lock (&render->ass_mutex);
862 ass_set_frame_size (render->ass_renderer, render->width, render->height);
863 ass_set_storage_size (render->ass_renderer,
864 render->info.width, render->info.height);
865 ass_set_pixel_aspect (render->ass_renderer,
866 (gdouble) render->info.par_n / (gdouble) render->info.par_d);
867 ass_set_font_scale (render->ass_renderer, 1.0);
868 ass_set_hinting (render->ass_renderer, ASS_HINTING_LIGHT);
870 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif", 1, NULL, 1);
871 ass_set_fonts (render->ass_renderer, NULL, "Sans", 1, NULL, 1);
872 ass_set_margins (render->ass_renderer, 0, 0, 0, 0);
873 ass_set_use_margins (render->ass_renderer, 0);
874 g_mutex_unlock (&render->ass_mutex);
876 render->renderer_init_ok = TRUE;
878 GST_DEBUG_OBJECT (render, "ass renderer setup complete");
887 GST_ERROR_OBJECT (render, "Can't parse caps: %" GST_PTR_FORMAT, caps);
893 GST_ERROR_OBJECT (render, "Unsupported caps: %" GST_PTR_FORMAT, caps);
900 gst_ass_render_setcaps_text (GstPad * pad, GstAssRender * render,
903 GstStructure *structure;
907 gboolean ret = FALSE;
909 structure = gst_caps_get_structure (caps, 0);
911 GST_DEBUG_OBJECT (render, "text pad linked with caps: %" GST_PTR_FORMAT,
914 value = gst_structure_get_value (structure, "codec_data");
916 g_mutex_lock (&render->ass_mutex);
918 priv = gst_value_get_buffer (value);
919 g_return_val_if_fail (priv != NULL, FALSE);
921 gst_buffer_map (priv, &map, GST_MAP_READ);
923 if (!render->ass_track)
924 render->ass_track = ass_new_track (render->ass_library);
926 ass_process_codec_private (render->ass_track, (char *) map.data, map.size);
928 gst_buffer_unmap (priv, &map);
930 GST_DEBUG_OBJECT (render, "ass track created");
932 render->track_init_ok = TRUE;
935 } else if (!render->ass_track) {
936 render->ass_track = ass_new_track (render->ass_library);
938 render->track_init_ok = TRUE;
942 g_mutex_unlock (&render->ass_mutex);
949 gst_ass_render_process_text (GstAssRender * render, GstBuffer * buffer,
950 GstClockTime running_time, GstClockTime duration)
953 gdouble pts_start, pts_end;
955 pts_start = running_time;
956 pts_start /= GST_MSECOND;
958 pts_end /= GST_MSECOND;
960 GST_DEBUG_OBJECT (render,
961 "Processing subtitles with running time %" GST_TIME_FORMAT
962 " and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time),
963 GST_TIME_ARGS (duration));
965 gst_buffer_map (buffer, &map, GST_MAP_READ);
967 g_mutex_lock (&render->ass_mutex);
968 ass_process_chunk (render->ass_track, (gchar *) map.data, map.size,
970 g_mutex_unlock (&render->ass_mutex);
972 gst_buffer_unmap (buffer, &map);
975 static GstVideoOverlayComposition *
976 gst_ass_render_composite_overlay (GstAssRender * render, ASS_Image * images)
978 GstVideoOverlayComposition *composition;
979 GstVideoOverlayRectangle *rectangle;
995 /* find bounding box of all images, to limit the overlay rectangle size */
996 for (image = images; image; image = image->next) {
997 if (min_x > image->dst_x)
998 min_x = image->dst_x;
999 if (min_y > image->dst_y)
1000 min_y = image->dst_y;
1001 if (max_x < image->dst_x + image->w)
1002 max_x = image->dst_x + image->w;
1003 if (max_y < image->dst_y + image->h)
1004 max_y = image->dst_y + image->h;
1007 width = MIN (max_x - min_x, render->width);
1008 height = MIN (max_y - min_y, render->height);
1010 GST_DEBUG_OBJECT (render, "render overlay rectangle %dx%d%+d%+d",
1011 width, height, min_x, min_y);
1013 buffer = gst_buffer_new_and_alloc (4 * width * height);
1015 GST_ERROR_OBJECT (render, "Failed to allocate overlay buffer");
1019 vmeta = gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE,
1020 GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB, width, height);
1022 if (!gst_video_meta_map (vmeta, 0, &map, &data, &stride, GST_MAP_READWRITE)) {
1023 GST_ERROR_OBJECT (render, "Failed to map overlay buffer");
1024 gst_buffer_unref (buffer);
1028 blit_bgra_premultiplied (render, images, data, width, height, stride,
1030 gst_video_meta_unmap (vmeta, 0, &map);
1032 rectangle = gst_video_overlay_rectangle_new_raw (buffer, min_x, min_y,
1033 width, height, GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
1035 gst_buffer_unref (buffer);
1037 composition = gst_video_overlay_composition_new (rectangle);
1038 gst_video_overlay_rectangle_unref (rectangle);
1044 gst_ass_render_push_frame (GstAssRender * render, GstBuffer * video_frame)
1046 GstVideoFrame frame;
1048 if (!render->composition)
1051 video_frame = gst_buffer_make_writable (video_frame);
1053 if (render->attach_compo_to_buffer) {
1054 gst_buffer_add_video_overlay_composition_meta (video_frame,
1055 render->composition);
1059 if (!gst_video_frame_map (&frame, &render->info, video_frame,
1060 GST_MAP_READWRITE)) {
1061 GST_WARNING_OBJECT (render, "failed to map video frame for blending");
1065 gst_video_overlay_composition_blend (render->composition, &frame);
1066 gst_video_frame_unmap (&frame);
1069 return gst_pad_push (render->srcpad, video_frame);
1072 static GstFlowReturn
1073 gst_ass_render_chain_video (GstPad * pad, GstObject * parent,
1076 GstAssRender *render = GST_ASS_RENDER (parent);
1077 GstFlowReturn ret = GST_FLOW_OK;
1078 gboolean in_seg = FALSE;
1079 guint64 start, stop, clip_start = 0, clip_stop = 0;
1080 ASS_Image *ass_image;
1082 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1083 goto missing_timestamp;
1085 /* ignore buffers that are outside of the current segment */
1086 start = GST_BUFFER_TIMESTAMP (buffer);
1088 if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
1089 stop = GST_CLOCK_TIME_NONE;
1091 stop = start + GST_BUFFER_DURATION (buffer);
1094 /* segment_clip() will adjust start unconditionally to segment_start if
1095 * no stop time is provided, so handle this ourselves */
1096 if (stop == GST_CLOCK_TIME_NONE && start < render->video_segment.start)
1097 goto out_of_segment;
1100 gst_segment_clip (&render->video_segment, GST_FORMAT_TIME, start, stop,
1101 &clip_start, &clip_stop);
1104 goto out_of_segment;
1106 /* if the buffer is only partially in the segment, fix up stamps */
1107 if (clip_start != start || (stop != -1 && clip_stop != stop)) {
1108 GST_DEBUG_OBJECT (render, "clipping buffer timestamp/duration to segment");
1109 buffer = gst_buffer_make_writable (buffer);
1110 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1112 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1115 /* now, after we've done the clipping, fix up end time if there's no
1116 * duration (we only use those estimated values internally though, we
1117 * don't want to set bogus values on the buffer itself) */
1119 if (render->info.fps_n && render->info.fps_d) {
1120 GST_DEBUG_OBJECT (render, "estimating duration based on framerate");
1122 start + gst_util_uint64_scale_int (GST_SECOND, render->info.fps_d,
1123 render->info.fps_n);
1125 GST_WARNING_OBJECT (render, "no duration, assuming minimal duration");
1126 stop = start + 1; /* we need to assume some interval */
1132 GST_ASS_RENDER_LOCK (render);
1134 if (render->video_flushing)
1137 if (render->video_eos)
1140 if (render->renderer_init_ok && render->track_init_ok && render->enable) {
1141 /* Text pad linked, check if we have a text buffer queued */
1142 if (render->subtitle_pending) {
1143 GstClockTime text_start = GST_CLOCK_TIME_NONE;
1144 GstClockTime text_end = GST_CLOCK_TIME_NONE;
1145 GstClockTime text_running_time = GST_CLOCK_TIME_NONE;
1146 GstClockTime text_running_time_end = GST_CLOCK_TIME_NONE;
1147 GstClockTime vid_running_time, vid_running_time_end;
1151 /* if the text buffer isn't stamped right, pop it off the
1152 * queue and display it for the current video frame only */
1153 if (!GST_BUFFER_TIMESTAMP_IS_VALID (render->subtitle_pending) ||
1154 !GST_BUFFER_DURATION_IS_VALID (render->subtitle_pending)) {
1155 GST_WARNING_OBJECT (render,
1156 "Got text buffer with invalid timestamp or duration");
1157 gst_ass_render_pop_text (render);
1158 GST_ASS_RENDER_UNLOCK (render);
1159 goto wait_for_text_buf;
1162 text_start = GST_BUFFER_TIMESTAMP (render->subtitle_pending);
1163 text_end = text_start + GST_BUFFER_DURATION (render->subtitle_pending);
1166 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1168 vid_running_time_end =
1169 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1172 /* If timestamp and duration are valid */
1174 gst_segment_to_running_time (&render->video_segment,
1175 GST_FORMAT_TIME, text_start);
1176 text_running_time_end =
1177 gst_segment_to_running_time (&render->video_segment,
1178 GST_FORMAT_TIME, text_end);
1180 GST_LOG_OBJECT (render, "T: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1181 GST_TIME_ARGS (text_running_time),
1182 GST_TIME_ARGS (text_running_time_end));
1183 GST_LOG_OBJECT (render, "V: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1184 GST_TIME_ARGS (vid_running_time),
1185 GST_TIME_ARGS (vid_running_time_end));
1188 if (text_running_time_end <= vid_running_time) {
1189 GST_DEBUG_OBJECT (render, "text buffer too old, popping");
1190 gst_ass_render_pop_text (render);
1191 GST_ASS_RENDER_UNLOCK (render);
1192 goto wait_for_text_buf;
1195 if (render->need_process) {
1196 GST_DEBUG_OBJECT (render, "process text buffer");
1197 gst_ass_render_process_text (render, render->subtitle_pending,
1198 text_running_time, text_running_time_end - text_running_time);
1199 render->need_process = FALSE;
1202 GST_ASS_RENDER_UNLOCK (render);
1204 /* libass needs timestamps in ms */
1205 timestamp = vid_running_time / GST_MSECOND;
1207 g_mutex_lock (&render->ass_mutex);
1208 ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
1209 timestamp, &changed);
1210 g_mutex_unlock (&render->ass_mutex);
1212 if ((!ass_image || changed) && render->composition) {
1213 GST_DEBUG_OBJECT (render, "release overlay (changed %d)", changed);
1214 gst_video_overlay_composition_unref (render->composition);
1215 render->composition = NULL;
1218 if (ass_image != NULL) {
1219 if (!render->composition)
1220 render->composition = gst_ass_render_composite_overlay (render,
1223 GST_DEBUG_OBJECT (render, "nothing to render right now");
1226 /* Push the video frame */
1227 ret = gst_ass_render_push_frame (render, buffer);
1229 if (text_running_time_end <= vid_running_time_end) {
1230 GST_ASS_RENDER_LOCK (render);
1231 gst_ass_render_pop_text (render);
1232 GST_ASS_RENDER_UNLOCK (render);
1235 gboolean wait_for_text_buf = TRUE;
1237 if (render->subtitle_eos)
1238 wait_for_text_buf = FALSE;
1240 if (!render->wait_text)
1241 wait_for_text_buf = FALSE;
1243 /* Text pad linked, but no text buffer available - what now? */
1244 if (render->subtitle_segment.format == GST_FORMAT_TIME) {
1245 GstClockTime text_start_running_time, text_last_stop_running_time;
1246 GstClockTime vid_running_time;
1249 gst_segment_to_running_time (&render->video_segment,
1250 GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer));
1251 text_start_running_time =
1252 gst_segment_to_running_time (&render->subtitle_segment,
1253 GST_FORMAT_TIME, render->subtitle_segment.start);
1254 text_last_stop_running_time =
1255 gst_segment_to_running_time (&render->subtitle_segment,
1256 GST_FORMAT_TIME, render->subtitle_segment.position);
1258 if ((GST_CLOCK_TIME_IS_VALID (text_start_running_time) &&
1259 vid_running_time < text_start_running_time) ||
1260 (GST_CLOCK_TIME_IS_VALID (text_last_stop_running_time) &&
1261 vid_running_time < text_last_stop_running_time)) {
1262 wait_for_text_buf = FALSE;
1266 if (wait_for_text_buf) {
1267 GST_DEBUG_OBJECT (render, "no text buffer, need to wait for one");
1268 GST_ASS_RENDER_WAIT (render);
1269 GST_DEBUG_OBJECT (render, "resuming");
1270 GST_ASS_RENDER_UNLOCK (render);
1271 goto wait_for_text_buf;
1273 GST_ASS_RENDER_UNLOCK (render);
1274 GST_LOG_OBJECT (render, "no need to wait for a text buffer");
1275 ret = gst_pad_push (render->srcpad, buffer);
1279 GST_LOG_OBJECT (render, "rendering disabled, doing buffer passthrough");
1281 GST_ASS_RENDER_UNLOCK (render);
1282 ret = gst_pad_push (render->srcpad, buffer);
1286 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p ret=%d", buffer, ret);
1288 /* Update last_stop */
1289 render->video_segment.position = clip_start;
1295 GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
1296 gst_buffer_unref (buffer);
1301 GST_ASS_RENDER_UNLOCK (render);
1302 GST_DEBUG_OBJECT (render, "flushing, discarding buffer");
1303 gst_buffer_unref (buffer);
1304 return GST_FLOW_FLUSHING;
1308 GST_ASS_RENDER_UNLOCK (render);
1309 GST_DEBUG_OBJECT (render, "eos, discarding buffer");
1310 gst_buffer_unref (buffer);
1311 return GST_FLOW_EOS;
1315 GST_DEBUG_OBJECT (render, "buffer out of segment, discarding");
1316 gst_buffer_unref (buffer);
1321 static GstFlowReturn
1322 gst_ass_render_chain_text (GstPad * pad, GstObject * parent, GstBuffer * buffer)
1324 GstFlowReturn ret = GST_FLOW_OK;
1325 GstAssRender *render = GST_ASS_RENDER (parent);
1326 gboolean in_seg = FALSE;
1327 guint64 clip_start = 0, clip_stop = 0;
1329 GST_DEBUG_OBJECT (render, "entering chain for buffer %p", buffer);
1331 GST_ASS_RENDER_LOCK (render);
1333 if (render->subtitle_flushing) {
1334 GST_ASS_RENDER_UNLOCK (render);
1335 ret = GST_FLOW_FLUSHING;
1336 GST_LOG_OBJECT (render, "text flushing");
1340 if (render->subtitle_eos) {
1341 GST_ASS_RENDER_UNLOCK (render);
1343 GST_LOG_OBJECT (render, "text EOS");
1347 if (G_LIKELY (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))) {
1350 if (G_LIKELY (GST_BUFFER_DURATION_IS_VALID (buffer)))
1351 stop = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
1353 stop = GST_CLOCK_TIME_NONE;
1355 in_seg = gst_segment_clip (&render->subtitle_segment, GST_FORMAT_TIME,
1356 GST_BUFFER_TIMESTAMP (buffer), stop, &clip_start, &clip_stop);
1362 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1363 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1364 else if (GST_BUFFER_DURATION_IS_VALID (buffer))
1365 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1367 if (render->subtitle_pending
1368 && (!GST_BUFFER_TIMESTAMP_IS_VALID (render->subtitle_pending)
1369 || !GST_BUFFER_DURATION_IS_VALID (render->subtitle_pending))) {
1370 gst_buffer_unref (render->subtitle_pending);
1371 render->subtitle_pending = NULL;
1372 GST_ASS_RENDER_BROADCAST (render);
1374 /* Wait for the previous buffer to go away */
1375 while (render->subtitle_pending != NULL) {
1376 GST_DEBUG ("Pad %s:%s has a buffer queued, waiting",
1377 GST_DEBUG_PAD_NAME (pad));
1378 GST_ASS_RENDER_WAIT (render);
1379 GST_DEBUG ("Pad %s:%s resuming", GST_DEBUG_PAD_NAME (pad));
1380 if (render->subtitle_flushing) {
1381 GST_ASS_RENDER_UNLOCK (render);
1382 ret = GST_FLOW_FLUSHING;
1388 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1389 render->subtitle_segment.position = clip_start;
1391 GST_DEBUG_OBJECT (render,
1392 "New buffer arrived for timestamp %" GST_TIME_FORMAT,
1393 GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
1394 render->subtitle_pending = gst_buffer_ref (buffer);
1395 render->need_process = TRUE;
1397 /* in case the video chain is waiting for a text buffer, wake it up */
1398 GST_ASS_RENDER_BROADCAST (render);
1401 GST_ASS_RENDER_UNLOCK (render);
1404 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p", buffer);
1406 gst_buffer_unref (buffer);
1411 gst_ass_render_handle_tag_sample (GstAssRender * render, GstSample * sample)
1413 static const gchar *mimetypes[] = {
1414 "application/x-font-ttf",
1415 "application/x-font-otf",
1416 "application/x-truetype-font"
1418 static const gchar *extensions[] = {
1424 const GstStructure *structure;
1425 gboolean valid_mimetype, valid_extension;
1427 const gchar *filename;
1429 buf = gst_sample_get_buffer (sample);
1430 structure = gst_sample_get_info (sample);
1432 if (!buf || !structure)
1435 valid_mimetype = FALSE;
1436 valid_extension = FALSE;
1438 for (i = 0; i < G_N_ELEMENTS (mimetypes); i++) {
1439 if (gst_structure_has_name (structure, mimetypes[i])) {
1440 valid_mimetype = TRUE;
1445 filename = gst_structure_get_string (structure, "filename");
1449 if (!valid_mimetype) {
1450 guint len = strlen (filename);
1451 const gchar *extension = filename + len - 4;
1452 for (i = 0; i < G_N_ELEMENTS (extensions); i++) {
1453 if (g_ascii_strcasecmp (extension, extensions[i]) == 0) {
1454 valid_extension = TRUE;
1460 if (valid_mimetype || valid_extension) {
1463 g_mutex_lock (&render->ass_mutex);
1464 gst_buffer_map (buf, &map, GST_MAP_READ);
1465 ass_add_font (render->ass_library, (gchar *) filename,
1466 (gchar *) map.data, map.size);
1467 gst_buffer_unmap (buf, &map);
1468 GST_DEBUG_OBJECT (render, "registered new font %s", filename);
1469 g_mutex_unlock (&render->ass_mutex);
1474 gst_ass_render_handle_tags (GstAssRender * render, GstTagList * taglist)
1481 tag_size = gst_tag_list_get_tag_size (taglist, GST_TAG_ATTACHMENT);
1482 if (tag_size > 0 && render->embeddedfonts) {
1486 GST_DEBUG_OBJECT (render, "TAG event has attachments");
1488 for (index = 0; index < tag_size; index++) {
1489 if (gst_tag_list_get_sample_index (taglist, GST_TAG_ATTACHMENT, index,
1491 gst_ass_render_handle_tag_sample (render, sample);
1492 gst_sample_unref (sample);
1499 gst_ass_render_event_video (GstPad * pad, GstObject * parent, GstEvent * event)
1501 gboolean ret = FALSE;
1502 GstAssRender *render = GST_ASS_RENDER (parent);
1504 GST_DEBUG_OBJECT (pad, "received video event %" GST_PTR_FORMAT, event);
1506 switch (GST_EVENT_TYPE (event)) {
1507 case GST_EVENT_CAPS:
1511 gst_event_parse_caps (event, &caps);
1512 ret = gst_ass_render_setcaps_video (pad, render, caps);
1513 gst_event_unref (event);
1516 case GST_EVENT_SEGMENT:
1520 GST_DEBUG_OBJECT (render, "received new segment");
1522 gst_event_copy_segment (event, &segment);
1524 if (segment.format == GST_FORMAT_TIME) {
1525 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT now: %" GST_SEGMENT_FORMAT,
1526 &render->video_segment);
1528 render->video_segment = segment;
1530 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT after: %" GST_SEGMENT_FORMAT,
1531 &render->video_segment);
1532 ret = gst_pad_event_default (pad, parent, event);
1534 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1535 ("received non-TIME newsegment event on video input"));
1537 gst_event_unref (event);
1543 GstTagList *taglist = NULL;
1545 /* tag events may contain attachments which might be fonts */
1546 GST_DEBUG_OBJECT (render, "got TAG event");
1548 gst_event_parse_tag (event, &taglist);
1549 gst_ass_render_handle_tags (render, taglist);
1550 ret = gst_pad_event_default (pad, parent, event);
1554 GST_ASS_RENDER_LOCK (render);
1555 GST_INFO_OBJECT (render, "video EOS");
1556 render->video_eos = TRUE;
1557 GST_ASS_RENDER_UNLOCK (render);
1558 ret = gst_pad_event_default (pad, parent, event);
1560 case GST_EVENT_FLUSH_START:
1561 GST_ASS_RENDER_LOCK (render);
1562 GST_INFO_OBJECT (render, "video flush start");
1563 render->video_flushing = TRUE;
1564 GST_ASS_RENDER_BROADCAST (render);
1565 GST_ASS_RENDER_UNLOCK (render);
1566 ret = gst_pad_event_default (pad, parent, event);
1568 case GST_EVENT_FLUSH_STOP:
1569 GST_ASS_RENDER_LOCK (render);
1570 GST_INFO_OBJECT (render, "video flush stop");
1571 render->video_flushing = FALSE;
1572 render->video_eos = FALSE;
1573 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
1574 GST_ASS_RENDER_UNLOCK (render);
1575 ret = gst_pad_event_default (pad, parent, event);
1578 ret = gst_pad_event_default (pad, parent, event);
1586 gst_ass_render_query_video (GstPad * pad, GstObject * parent, GstQuery * query)
1588 gboolean res = FALSE;
1590 switch (GST_QUERY_TYPE (query)) {
1591 case GST_QUERY_CAPS:
1593 GstCaps *filter, *caps;
1595 gst_query_parse_caps (query, &filter);
1597 gst_ass_render_get_videosink_caps (pad, (GstAssRender *) parent,
1599 gst_query_set_caps_result (query, caps);
1600 gst_caps_unref (caps);
1605 res = gst_pad_query_default (pad, parent, query);
1613 gst_ass_render_event_text (GstPad * pad, GstObject * parent, GstEvent * event)
1616 gboolean ret = FALSE;
1617 GstAssRender *render = GST_ASS_RENDER (parent);
1619 GST_DEBUG_OBJECT (pad, "received text event %" GST_PTR_FORMAT, event);
1621 switch (GST_EVENT_TYPE (event)) {
1622 case GST_EVENT_CAPS:
1626 gst_event_parse_caps (event, &caps);
1627 ret = gst_ass_render_setcaps_text (pad, render, caps);
1628 gst_event_unref (event);
1631 case GST_EVENT_SEGMENT:
1635 GST_ASS_RENDER_LOCK (render);
1636 render->subtitle_eos = FALSE;
1637 GST_ASS_RENDER_UNLOCK (render);
1639 gst_event_copy_segment (event, &segment);
1641 GST_ASS_RENDER_LOCK (render);
1642 if (segment.format == GST_FORMAT_TIME) {
1643 GST_DEBUG_OBJECT (render, "TEXT SEGMENT now: %" GST_SEGMENT_FORMAT,
1644 &render->subtitle_segment);
1646 render->subtitle_segment = segment;
1648 GST_DEBUG_OBJECT (render,
1649 "TEXT SEGMENT after: %" GST_SEGMENT_FORMAT,
1650 &render->subtitle_segment);
1652 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1653 ("received non-TIME newsegment event on subtitle input"));
1656 gst_event_unref (event);
1659 /* wake up the video chain, it might be waiting for a text buffer or
1660 * a text segment update */
1661 GST_ASS_RENDER_BROADCAST (render);
1662 GST_ASS_RENDER_UNLOCK (render);
1665 case GST_EVENT_GAP:{
1666 GstClockTime start, duration;
1668 gst_event_parse_gap (event, &start, &duration);
1669 if (GST_CLOCK_TIME_IS_VALID (duration))
1671 /* we do not expect another buffer until after gap,
1672 * so that is our position now */
1673 GST_ASS_RENDER_LOCK (render);
1674 render->subtitle_segment.position = start;
1676 /* wake up the video chain, it might be waiting for a text buffer or
1677 * a text segment update */
1678 GST_ASS_RENDER_BROADCAST (render);
1679 GST_ASS_RENDER_UNLOCK (render);
1681 gst_event_unref (event);
1685 case GST_EVENT_FLUSH_STOP:
1686 GST_ASS_RENDER_LOCK (render);
1687 GST_INFO_OBJECT (render, "text flush stop");
1688 render->subtitle_flushing = FALSE;
1689 render->subtitle_eos = FALSE;
1690 gst_ass_render_pop_text (render);
1691 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
1692 GST_ASS_RENDER_UNLOCK (render);
1693 gst_event_unref (event);
1696 case GST_EVENT_FLUSH_START:
1697 GST_DEBUG_OBJECT (render, "text flush start");
1698 g_mutex_lock (&render->ass_mutex);
1699 if (render->ass_track) {
1700 /* delete any events on the ass_track */
1701 for (i = 0; i < render->ass_track->n_events; i++) {
1702 GST_DEBUG_OBJECT (render, "deleted event with eid %i", i);
1703 ass_free_event (render->ass_track, i);
1705 render->ass_track->n_events = 0;
1706 GST_DEBUG_OBJECT (render, "done flushing");
1708 g_mutex_unlock (&render->ass_mutex);
1709 GST_ASS_RENDER_LOCK (render);
1710 render->subtitle_flushing = TRUE;
1711 GST_ASS_RENDER_BROADCAST (render);
1712 GST_ASS_RENDER_UNLOCK (render);
1713 gst_event_unref (event);
1717 GST_ASS_RENDER_LOCK (render);
1718 render->subtitle_eos = TRUE;
1719 GST_INFO_OBJECT (render, "text EOS");
1720 /* wake up the video chain, it might be waiting for a text buffer or
1721 * a text segment update */
1722 GST_ASS_RENDER_BROADCAST (render);
1723 GST_ASS_RENDER_UNLOCK (render);
1724 gst_event_unref (event);
1729 GstTagList *taglist = NULL;
1731 /* tag events may contain attachments which might be fonts */
1732 GST_DEBUG_OBJECT (render, "got TAG event");
1734 gst_event_parse_tag (event, &taglist);
1735 gst_ass_render_handle_tags (render, taglist);
1736 ret = gst_pad_event_default (pad, parent, event);
1740 ret = gst_pad_event_default (pad, parent, event);
1748 plugin_init (GstPlugin * plugin)
1750 GST_DEBUG_CATEGORY_INIT (gst_ass_render_debug, "assrender",
1751 0, "ASS/SSA subtitle renderer");
1752 GST_DEBUG_CATEGORY_INIT (gst_ass_render_lib_debug, "assrender_library",
1753 0, "ASS/SSA subtitle renderer library");
1755 return gst_element_register (plugin, "assrender",
1756 GST_RANK_PRIMARY, GST_TYPE_ASS_RENDER);
1759 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1762 "ASS/SSA subtitle renderer",
1763 plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)