2 * Copyright (c) 2008 Benjamin Schmitz <vortex@wolpzone.de>
3 * Copyright (c) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
18 * Boston, MA 02111-1307, USA.
22 * SECTION:element-assrender
24 * Renders timestamped SSA/ASS subtitles on top of a video stream.
27 * <title>Example launch line</title>
29 * gst-launch -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mp3parse ! mad ! audioconvert ! autoaudiosink d. ! queue ! ffdec_h264 ! ffmpegcolorspace ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! ffmpegcolorspace ! autovideosink
30 * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
39 #include "gstassrender.h"
43 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_debug);
44 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_lib_debug);
45 #define GST_CAT_DEFAULT gst_ass_render_debug
47 /* Filter signals and props */
60 #define FORMATS "{ RGB, BGR, xRGB, xBGR, RGBx, BGRx, I420 }"
62 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
65 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
68 static GstStaticPadTemplate video_sink_factory =
69 GST_STATIC_PAD_TEMPLATE ("video_sink",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
75 static GstStaticPadTemplate text_sink_factory =
76 GST_STATIC_PAD_TEMPLATE ("text_sink",
79 GST_STATIC_CAPS ("application/x-ass; application/x-ssa")
82 static void gst_ass_render_set_property (GObject * object, guint prop_id,
83 const GValue * value, GParamSpec * pspec);
84 static void gst_ass_render_get_property (GObject * object, guint prop_id,
85 GValue * value, GParamSpec * pspec);
87 static void gst_ass_render_finalize (GObject * object);
89 static GstStateChangeReturn gst_ass_render_change_state (GstElement * element,
90 GstStateChange transition);
92 #define gst_ass_render_parent_class parent_class
93 G_DEFINE_TYPE (GstAssRender, gst_ass_render, GST_TYPE_ELEMENT);
95 static GstCaps *gst_ass_render_getcaps (GstPad * pad, GstCaps * filter);
97 static gboolean gst_ass_render_setcaps_video (GstPad * pad, GstCaps * caps);
98 static gboolean gst_ass_render_setcaps_text (GstPad * pad, GstCaps * caps);
100 static GstFlowReturn gst_ass_render_chain_video (GstPad * pad,
101 GstObject * parent, GstBuffer * buf);
102 static GstFlowReturn gst_ass_render_chain_text (GstPad * pad,
103 GstObject * parent, GstBuffer * buf);
105 static gboolean gst_ass_render_event_video (GstPad * pad, GstObject * parent,
107 static gboolean gst_ass_render_event_text (GstPad * pad, GstObject * parent,
109 static gboolean gst_ass_render_event_src (GstPad * pad, GstObject * parent,
112 static gboolean gst_ass_render_query_video (GstPad * pad, GstObject * parent,
114 static gboolean gst_ass_render_query_src (GstPad * pad, GstObject * parent,
117 /* initialize the plugin's class */
119 gst_ass_render_class_init (GstAssRenderClass * klass)
121 GObjectClass *gobject_class = (GObjectClass *) klass;
122 GstElementClass *gstelement_class = (GstElementClass *) klass;
124 gobject_class->set_property = gst_ass_render_set_property;
125 gobject_class->get_property = gst_ass_render_get_property;
126 gobject_class->finalize = gst_ass_render_finalize;
128 g_object_class_install_property (gobject_class, PROP_ENABLE,
129 g_param_spec_boolean ("enable", "Enable",
130 "Enable rendering of subtitles", TRUE,
131 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
132 g_object_class_install_property (gobject_class, PROP_EMBEDDEDFONTS,
133 g_param_spec_boolean ("embeddedfonts", "Embedded Fonts",
134 "Extract and use fonts embedded in the stream", TRUE,
135 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
137 gstelement_class->change_state =
138 GST_DEBUG_FUNCPTR (gst_ass_render_change_state);
140 gst_element_class_add_pad_template (gstelement_class,
141 gst_static_pad_template_get (&src_factory));
142 gst_element_class_add_pad_template (gstelement_class,
143 gst_static_pad_template_get (&video_sink_factory));
144 gst_element_class_add_pad_template (gstelement_class,
145 gst_static_pad_template_get (&text_sink_factory));
147 gst_element_class_set_details_simple (gstelement_class, "ASS/SSA Render",
148 "Mixer/Video/Overlay/Subtitle",
149 "Renders ASS/SSA subtitles with libass",
150 "Benjamin Schmitz <vortex@wolpzone.de>, "
151 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
154 #if defined(LIBASS_VERSION) && LIBASS_VERSION >= 0x00907000
156 _libass_message_cb (gint level, const gchar * fmt, va_list args,
159 gchar *message = g_strdup_vprintf (fmt, args);
162 GST_CAT_ERROR_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
164 GST_CAT_WARNING_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
166 GST_CAT_INFO_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
168 GST_CAT_DEBUG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
170 GST_CAT_LOG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
177 gst_ass_render_init (GstAssRender * render)
179 GST_DEBUG_OBJECT (render, "init");
181 render->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
182 render->video_sinkpad =
183 gst_pad_new_from_static_template (&video_sink_factory, "video_sink");
184 render->text_sinkpad =
185 gst_pad_new_from_static_template (&text_sink_factory, "text_sink");
187 gst_pad_set_chain_function (render->video_sinkpad,
188 GST_DEBUG_FUNCPTR (gst_ass_render_chain_video));
189 gst_pad_set_chain_function (render->text_sinkpad,
190 GST_DEBUG_FUNCPTR (gst_ass_render_chain_text));
192 gst_pad_set_event_function (render->video_sinkpad,
193 GST_DEBUG_FUNCPTR (gst_ass_render_event_video));
194 gst_pad_set_event_function (render->text_sinkpad,
195 GST_DEBUG_FUNCPTR (gst_ass_render_event_text));
196 gst_pad_set_event_function (render->srcpad,
197 GST_DEBUG_FUNCPTR (gst_ass_render_event_src));
199 gst_pad_set_query_function (render->srcpad,
200 GST_DEBUG_FUNCPTR (gst_ass_render_query_src));
201 gst_pad_set_query_function (render->video_sinkpad,
202 GST_DEBUG_FUNCPTR (gst_ass_render_query_video));
204 gst_element_add_pad (GST_ELEMENT (render), render->srcpad);
205 gst_element_add_pad (GST_ELEMENT (render), render->video_sinkpad);
206 gst_element_add_pad (GST_ELEMENT (render), render->text_sinkpad);
208 gst_video_info_init (&render->info);
210 render->subtitle_mutex = g_mutex_new ();
211 render->subtitle_cond = g_cond_new ();
213 render->renderer_init_ok = FALSE;
214 render->track_init_ok = FALSE;
215 render->enable = TRUE;
216 render->embeddedfonts = TRUE;
218 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
219 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
221 render->ass_mutex = g_mutex_new ();
222 render->ass_library = ass_library_init ();
223 #if defined(LIBASS_VERSION) && LIBASS_VERSION >= 0x00907000
224 ass_set_message_cb (render->ass_library, _libass_message_cb, render);
226 ass_set_extract_fonts (render->ass_library, 1);
228 render->ass_renderer = ass_renderer_init (render->ass_library);
229 if (!render->ass_renderer) {
230 GST_WARNING_OBJECT (render, "cannot create renderer instance");
231 g_assert_not_reached ();
234 render->ass_track = NULL;
236 GST_DEBUG_OBJECT (render, "init complete");
240 gst_ass_render_finalize (GObject * object)
242 GstAssRender *render = GST_ASS_RENDER (object);
244 if (render->subtitle_mutex)
245 g_mutex_free (render->subtitle_mutex);
247 if (render->subtitle_cond)
248 g_cond_free (render->subtitle_cond);
250 if (render->ass_track) {
251 ass_free_track (render->ass_track);
254 if (render->ass_renderer) {
255 ass_renderer_done (render->ass_renderer);
258 if (render->ass_library) {
259 ass_library_done (render->ass_library);
262 if (render->ass_mutex)
263 g_mutex_free (render->ass_mutex);
265 G_OBJECT_CLASS (parent_class)->finalize (object);
269 gst_ass_render_set_property (GObject * object, guint prop_id,
270 const GValue * value, GParamSpec * pspec)
272 GstAssRender *render = GST_ASS_RENDER (object);
276 render->enable = g_value_get_boolean (value);
278 case PROP_EMBEDDEDFONTS:
279 render->embeddedfonts = g_value_get_boolean (value);
280 g_mutex_lock (render->ass_mutex);
281 ass_set_extract_fonts (render->ass_library, render->embeddedfonts);
282 g_mutex_unlock (render->ass_mutex);
285 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
291 gst_ass_render_get_property (GObject * object, guint prop_id,
292 GValue * value, GParamSpec * pspec)
294 GstAssRender *render = GST_ASS_RENDER (object);
298 g_value_set_boolean (value, render->enable);
300 case PROP_EMBEDDEDFONTS:
301 g_value_set_boolean (value, render->embeddedfonts);
304 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
309 static GstStateChangeReturn
310 gst_ass_render_change_state (GstElement * element, GstStateChange transition)
312 GstAssRender *render = GST_ASS_RENDER (element);
313 GstStateChangeReturn ret;
315 switch (transition) {
316 case GST_STATE_CHANGE_READY_TO_PAUSED:
317 render->subtitle_flushing = FALSE;
318 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
319 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
321 case GST_STATE_CHANGE_NULL_TO_READY:
322 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
326 case GST_STATE_CHANGE_PAUSED_TO_READY:
327 g_mutex_lock (render->subtitle_mutex);
328 render->subtitle_flushing = TRUE;
329 if (render->subtitle_pending)
330 gst_buffer_unref (render->subtitle_pending);
331 render->subtitle_pending = NULL;
332 g_cond_signal (render->subtitle_cond);
333 g_mutex_unlock (render->subtitle_mutex);
337 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
339 switch (transition) {
340 case GST_STATE_CHANGE_PAUSED_TO_READY:
341 g_mutex_lock (render->ass_mutex);
342 if (render->ass_track)
343 ass_free_track (render->ass_track);
344 render->ass_track = NULL;
345 g_mutex_unlock (render->ass_mutex);
346 render->track_init_ok = FALSE;
347 render->renderer_init_ok = FALSE;
349 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
350 case GST_STATE_CHANGE_READY_TO_NULL:
360 gst_ass_render_query_src (GstPad * pad, GstObject * parent, GstQuery * query)
362 gboolean res = FALSE;
364 switch (GST_QUERY_TYPE (query)) {
367 GstCaps *filter, *caps;
369 gst_query_parse_caps (query, &filter);
370 caps = gst_ass_render_getcaps (pad, filter);
371 gst_query_set_caps_result (query, caps);
372 gst_caps_unref (caps);
377 res = gst_pad_query_default (pad, parent, query);
385 gst_ass_render_event_src (GstPad * pad, GstObject * parent, GstEvent * event)
387 GstAssRender *render = GST_ASS_RENDER (parent);
388 gboolean ret = FALSE;
390 switch (GST_EVENT_TYPE (event)) {
391 case GST_EVENT_SEEK:{
394 GST_DEBUG_OBJECT (render, "seek received, driving from here");
396 gst_event_parse_seek (event, NULL, NULL, &flags, NULL, NULL, NULL, NULL);
398 /* Flush downstream, only for flushing seek */
399 if (flags & GST_SEEK_FLAG_FLUSH)
400 gst_pad_push_event (render->srcpad, gst_event_new_flush_start ());
402 /* Mark subtitle as flushing, unblocks chains */
403 g_mutex_lock (render->subtitle_mutex);
404 if (render->subtitle_pending)
405 gst_buffer_unref (render->subtitle_pending);
406 render->subtitle_pending = NULL;
407 render->subtitle_flushing = TRUE;
408 g_cond_signal (render->subtitle_cond);
409 g_mutex_unlock (render->subtitle_mutex);
411 /* Seek on each sink pad */
412 gst_event_ref (event);
413 ret = gst_pad_push_event (render->video_sinkpad, event);
415 ret = gst_pad_push_event (render->text_sinkpad, event);
417 gst_event_unref (event);
422 gst_event_ref (event);
423 ret = gst_pad_push_event (render->video_sinkpad, event);
424 gst_pad_push_event (render->text_sinkpad, event);
432 gst_ass_render_getcaps (GstPad * pad, GstCaps * filter)
434 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
438 if (pad == render->srcpad)
439 otherpad = render->video_sinkpad;
441 otherpad = render->srcpad;
443 /* we can do what the peer can */
444 caps = gst_pad_peer_query_caps (otherpad, filter);
447 const GstCaps *templ;
449 /* filtered against our padtemplate */
450 templ = gst_pad_get_pad_template_caps (otherpad);
451 temp = gst_caps_intersect (caps, templ);
452 gst_caps_unref (caps);
453 /* this is what we can do */
456 /* no peer, our padtemplate is enough then */
457 caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
460 gst_object_unref (render);
465 #define CREATE_RGB_BLIT_FUNCTION(name,bpp,R,G,B) \
467 blit_##name (GstAssRender * render, ASS_Image * ass_image, GstVideoFrame * frame) \
470 gint alpha, r, g, b, k; \
472 guint8 *dst, *data; \
480 width = GST_VIDEO_FRAME_WIDTH (frame); \
481 height = GST_VIDEO_FRAME_HEIGHT (frame); \
482 dst_stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0); \
483 data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
485 while (ass_image) { \
486 if (ass_image->dst_y > height || ass_image->dst_x > width) \
489 /* blend subtitles onto the video frame */ \
490 alpha = 255 - ((ass_image->color) & 0xff); \
491 r = ((ass_image->color) >> 24) & 0xff; \
492 g = ((ass_image->color) >> 16) & 0xff; \
493 b = ((ass_image->color) >> 8) & 0xff; \
494 src = ass_image->bitmap; \
495 dst = data + ass_image->dst_y * dst_stride + ass_image->dst_x * bpp; \
497 w = MIN (ass_image->w, width - ass_image->dst_x); \
498 h = MIN (ass_image->h, height - ass_image->dst_y); \
499 src_skip = ass_image->stride - w; \
500 dst_skip = dst_stride - w * bpp; \
502 for (y = 0; y < h; y++) { \
503 for (x = 0; x < w; x++) { \
504 k = src[0] * alpha / 255; \
505 dst[R] = (k * r + (255 - k) * dst[R]) / 255; \
506 dst[G] = (k * g + (255 - k) * dst[G]) / 255; \
507 dst[B] = (k * b + (255 - k) * dst[B]) / 255; \
516 ass_image = ass_image->next; \
518 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter); \
521 CREATE_RGB_BLIT_FUNCTION (rgb, 3, 0, 1, 2);
522 CREATE_RGB_BLIT_FUNCTION (bgr, 3, 2, 1, 0);
523 CREATE_RGB_BLIT_FUNCTION (xrgb, 4, 1, 2, 3);
524 CREATE_RGB_BLIT_FUNCTION (xbgr, 4, 3, 2, 1);
525 CREATE_RGB_BLIT_FUNCTION (rgbx, 4, 0, 1, 2);
526 CREATE_RGB_BLIT_FUNCTION (bgrx, 4, 2, 1, 0);
528 #undef CREATE_RGB_BLIT_FUNCTION
531 rgb_to_y (gint r, gint g, gint b)
535 ret = (gint) (((19595 * r) >> 16) + ((38470 * g) >> 16) + ((7471 * b) >> 16));
536 ret = CLAMP (ret, 0, 255);
541 rgb_to_u (gint r, gint g, gint b)
546 (gint) (-((11059 * r) >> 16) - ((21709 * g) >> 16) + ((32768 * b) >> 16) +
548 ret = CLAMP (ret, 0, 255);
553 rgb_to_v (gint r, gint g, gint b)
558 (gint) (((32768 * r) >> 16) - ((27439 * g) >> 16) - ((5329 * b) >> 16) +
560 ret = CLAMP (ret, 0, 255);
565 blit_i420 (GstAssRender * render, ASS_Image * ass_image, GstVideoFrame * frame)
568 gint alpha, r, g, b, k, k2;
571 guint8 *dst_y, *dst_u, *dst_v;
573 /* FIXME ignoring source image stride might be wrong here */
579 guint8 *y_data, *u_data, *v_data;
580 gint y_stride, u_stride, v_stride;
582 width = GST_VIDEO_FRAME_WIDTH (frame);
583 height = GST_VIDEO_FRAME_HEIGHT (frame);
585 y_data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
586 u_data = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
587 v_data = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
589 y_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
590 u_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1);
591 v_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 2);
594 if (ass_image->dst_y > height || ass_image->dst_x > width)
597 /* blend subtitles onto the video frame */
598 alpha = 255 - ((ass_image->color) & 0xff);
599 r = ((ass_image->color) >> 24) & 0xff;
600 g = ((ass_image->color) >> 16) & 0xff;
601 b = ((ass_image->color) >> 8) & 0xff;
603 Y = rgb_to_y (r, g, b);
604 U = rgb_to_u (r, g, b);
605 V = rgb_to_v (r, g, b);
607 w = MIN (ass_image->w, width - ass_image->dst_x);
608 h = MIN (ass_image->h, height - ass_image->dst_y);
613 src_stride = ass_image->stride;
616 src = ass_image->bitmap;
618 dst_y = y_data + ass_image->dst_y * y_stride + ass_image->dst_x;
619 dst_u = u_data + (ass_image->dst_y / 2) * u_stride + ass_image->dst_x / 2;
620 dst_v = v_data + (ass_image->dst_y / 2) * v_stride + ass_image->dst_x / 2;
623 for (y = 0; y < h; y++) {
624 dst_y = y_data + (ass_image->dst_y + y) * y_stride + ass_image->dst_x;
625 for (x = 0; x < w; x++) {
626 k = src[y * ass_image->w + x] * alpha / 255;
627 dst_y[x] = (k * Y + (255 - k) * dst_y[x]) / 255;
632 if (ass_image->dst_y & 1) {
633 dst_u = u_data + (ass_image->dst_y / 2) * u_stride + ass_image->dst_x / 2;
634 dst_v = v_data + (ass_image->dst_y / 2) * v_stride + ass_image->dst_x / 2;
636 if (ass_image->dst_x & 1) {
637 k2 = src[y * ass_image->w + x] * alpha / 255;
639 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
640 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
645 for (; x < w - 1; x += 2) {
646 k2 = src[y * ass_image->w + x] * alpha / 255;
647 k2 += src[y * ass_image->w + x + 1] * alpha / 255;
649 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
650 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
655 k2 = src[y * ass_image->w + x] * alpha / 255;
657 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
658 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
662 for (; y < h - 1; y += 2) {
663 dst_u = u_data + ((ass_image->dst_y + y) / 2) * u_stride +
664 ass_image->dst_x / 2;
665 dst_v = v_data + ((ass_image->dst_y + y) / 2) * v_stride +
666 ass_image->dst_x / 2;
668 if (ass_image->dst_x & 1) {
669 k2 = src[y * ass_image->w + x] * alpha / 255;
670 k2 += src[(y + 1) * ass_image->w + x] * alpha / 255;
672 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
673 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
678 for (; x < w - 1; x += 2) {
679 k2 = src[y * ass_image->w + x] * alpha / 255;
680 k2 += src[y * ass_image->w + x + 1] * alpha / 255;
681 k2 += src[(y + 1) * ass_image->w + x] * alpha / 255;
682 k2 += src[(y + 1) * ass_image->w + x + 1] * alpha / 255;
684 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
685 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
690 k2 = src[y * ass_image->w + x] * alpha / 255;
691 k2 += src[(y + 1) * ass_image->w + x] * alpha / 255;
693 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
694 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
699 dst_u = u_data + (ass_image->dst_y / 2) * u_stride + ass_image->dst_x / 2;
700 dst_v = v_data + (ass_image->dst_y / 2) * v_stride + ass_image->dst_x / 2;
702 if (ass_image->dst_x & 1) {
703 k2 = src[y * ass_image->w + x] * alpha / 255;
705 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
706 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
711 for (; x < w - 1; x += 2) {
712 k2 = src[y * ass_image->w + x] * alpha / 255;
713 k2 += src[y * ass_image->w + x + 1] * alpha / 255;
715 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
716 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
721 k2 = src[y * ass_image->w + x] * alpha / 255;
723 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
724 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
732 ass_image = ass_image->next;
735 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter);
739 gst_ass_render_setcaps_video (GstPad * pad, GstCaps * caps)
741 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
742 gboolean ret = FALSE;
743 gint par_n = 1, par_d = 1;
747 if (!gst_video_info_from_caps (&info, caps))
752 ret = gst_pad_set_caps (render->srcpad, caps);
756 switch (GST_VIDEO_INFO_FORMAT (&info)) {
757 case GST_VIDEO_FORMAT_RGB:
758 render->blit = blit_rgb;
760 case GST_VIDEO_FORMAT_BGR:
761 render->blit = blit_bgr;
763 case GST_VIDEO_FORMAT_xRGB:
764 render->blit = blit_xrgb;
766 case GST_VIDEO_FORMAT_xBGR:
767 render->blit = blit_xbgr;
769 case GST_VIDEO_FORMAT_RGBx:
770 render->blit = blit_rgbx;
772 case GST_VIDEO_FORMAT_BGRx:
773 render->blit = blit_bgrx;
775 case GST_VIDEO_FORMAT_I420:
776 render->blit = blit_i420;
783 g_mutex_lock (render->ass_mutex);
784 ass_set_frame_size (render->ass_renderer, info.width, info.height);
786 dar = (((gdouble) par_n) * ((gdouble) info.width))
787 / (((gdouble) par_d) * ((gdouble) info.height));
788 #if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
789 ass_set_aspect_ratio (render->ass_renderer, dar);
791 ass_set_aspect_ratio (render->ass_renderer,
792 dar, ((gdouble) info.width) / ((gdouble) info.height));
794 ass_set_font_scale (render->ass_renderer, 1.0);
795 ass_set_hinting (render->ass_renderer, ASS_HINTING_LIGHT);
797 #if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
798 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif");
799 ass_set_fonts (render->ass_renderer, NULL, "Sans");
801 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif", 1, NULL, 1);
802 ass_set_fonts (render->ass_renderer, NULL, "Sans", 1, NULL, 1);
804 ass_set_margins (render->ass_renderer, 0, 0, 0, 0);
805 ass_set_use_margins (render->ass_renderer, 0);
806 g_mutex_unlock (render->ass_mutex);
808 render->renderer_init_ok = TRUE;
810 GST_DEBUG_OBJECT (render, "ass renderer setup complete");
813 gst_object_unref (render);
820 GST_ERROR_OBJECT (render, "Can't parse caps: %" GST_PTR_FORMAT, caps);
827 gst_ass_render_setcaps_text (GstPad * pad, GstCaps * caps)
829 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
830 GstStructure *structure;
833 gchar *codec_private;
834 gsize codec_private_size;
835 gboolean ret = FALSE;
837 structure = gst_caps_get_structure (caps, 0);
839 GST_DEBUG_OBJECT (render, "text pad linked with caps: %" GST_PTR_FORMAT,
842 value = gst_structure_get_value (structure, "codec_data");
844 g_mutex_lock (render->ass_mutex);
846 priv = gst_value_get_buffer (value);
847 g_return_val_if_fail (priv != NULL, FALSE);
850 gst_buffer_map (priv, &codec_private_size, NULL, GST_MAP_READ);
852 if (!render->ass_track)
853 render->ass_track = ass_new_track (render->ass_library);
855 ass_process_codec_private (render->ass_track,
856 codec_private, codec_private_size);
858 gst_buffer_unmap (priv, codec_private, codec_private_size);
860 GST_DEBUG_OBJECT (render, "ass track created");
862 render->track_init_ok = TRUE;
865 } else if (!render->ass_track) {
866 render->ass_track = ass_new_track (render->ass_library);
868 render->track_init_ok = TRUE;
872 g_mutex_unlock (render->ass_mutex);
874 gst_object_unref (render);
881 gst_ass_render_process_text (GstAssRender * render, GstBuffer * buffer,
882 GstClockTime running_time, GstClockTime duration)
886 gdouble pts_start, pts_end;
888 pts_start = running_time;
889 pts_start /= GST_MSECOND;
891 pts_end /= GST_MSECOND;
893 GST_DEBUG_OBJECT (render,
894 "Processing subtitles with running time %" GST_TIME_FORMAT
895 " and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time),
896 GST_TIME_ARGS (duration));
898 data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
900 g_mutex_lock (render->ass_mutex);
901 ass_process_chunk (render->ass_track, data, size, pts_start, pts_end);
902 g_mutex_unlock (render->ass_mutex);
904 gst_buffer_unmap (buffer, data, size);
905 gst_buffer_unref (buffer);
909 gst_ass_render_chain_video (GstPad * pad, GstObject * parent,
912 GstAssRender *render = GST_ASS_RENDER (parent);
913 GstFlowReturn ret = GST_FLOW_OK;
914 gboolean in_seg = FALSE;
915 guint64 start, stop, clip_start = 0, clip_stop = 0;
916 ASS_Image *ass_image;
918 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
919 GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
920 gst_buffer_unref (buffer);
924 /* ignore buffers that are outside of the current segment */
925 start = GST_BUFFER_TIMESTAMP (buffer);
927 if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
928 stop = GST_CLOCK_TIME_NONE;
930 stop = start + GST_BUFFER_DURATION (buffer);
933 /* segment_clip() will adjust start unconditionally to segment_start if
934 * no stop time is provided, so handle this ourselves */
935 if (stop == GST_CLOCK_TIME_NONE && start < render->video_segment.start)
939 gst_segment_clip (&render->video_segment, GST_FORMAT_TIME, start, stop,
940 &clip_start, &clip_stop);
945 /* if the buffer is only partially in the segment, fix up stamps */
946 if (clip_start != start || (stop != -1 && clip_stop != stop)) {
947 GST_DEBUG_OBJECT (render, "clipping buffer timestamp/duration to segment");
948 buffer = gst_buffer_make_writable (buffer);
949 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
951 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
954 render->video_segment.position = clip_start;
956 g_mutex_lock (render->subtitle_mutex);
957 if (render->subtitle_pending) {
958 GstClockTime sub_running_time, vid_running_time;
959 GstClockTime sub_running_time_end, vid_running_time_end;
962 gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
963 GST_BUFFER_TIMESTAMP (render->subtitle_pending));
964 sub_running_time_end =
965 gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
966 GST_BUFFER_TIMESTAMP (render->subtitle_pending) +
967 GST_BUFFER_DURATION (render->subtitle_pending));
969 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
970 GST_BUFFER_TIMESTAMP (buffer));
971 vid_running_time_end =
972 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
973 GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer));
975 if (sub_running_time_end < vid_running_time) {
976 gst_buffer_unref (render->subtitle_pending);
977 GST_DEBUG_OBJECT (render,
978 "Too late text buffer, dropping (%" GST_TIME_FORMAT " < %"
979 GST_TIME_FORMAT, GST_TIME_ARGS (sub_running_time_end),
980 GST_TIME_ARGS (vid_running_time));
981 render->subtitle_pending = NULL;
982 g_cond_signal (render->subtitle_cond);
983 } else if (sub_running_time <= vid_running_time_end + GST_SECOND / 2) {
984 gst_ass_render_process_text (render, render->subtitle_pending,
985 sub_running_time, sub_running_time_end - sub_running_time);
986 render->subtitle_pending = NULL;
987 g_cond_signal (render->subtitle_cond);
990 g_mutex_unlock (render->subtitle_mutex);
992 /* now start rendering subtitles, if all conditions are met */
993 if (render->renderer_init_ok && render->track_init_ok && render->enable) {
994 GstClockTime running_time;
996 #ifndef GST_DISABLE_GST_DEBUG
1001 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1002 GST_BUFFER_TIMESTAMP (buffer));
1003 GST_DEBUG_OBJECT (render,
1004 "rendering frame for running time %" GST_TIME_FORMAT,
1005 GST_TIME_ARGS (running_time));
1006 /* libass needs timestamps in ms */
1007 timestamp = running_time / GST_MSECOND;
1009 g_mutex_lock (render->ass_mutex);
1010 #ifndef GST_DISABLE_GST_DEBUG
1011 /* only for testing right now. could possibly be used for optimizations? */
1012 step = ass_step_sub (render->ass_track, timestamp, 1);
1013 GST_DEBUG_OBJECT (render, "Current running time: %" GST_TIME_FORMAT
1014 " // Next event: %" GST_TIME_FORMAT,
1015 GST_TIME_ARGS (running_time), GST_TIME_ARGS (step * GST_MSECOND));
1018 /* not sure what the last parameter to this call is for (detect_change) */
1019 ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
1021 g_mutex_unlock (render->ass_mutex);
1023 if (ass_image != NULL) {
1024 GstVideoFrame frame;
1026 buffer = gst_buffer_make_writable (buffer);
1028 gst_video_frame_map (&frame, &render->info, buffer, GST_MAP_WRITE);
1029 render->blit (render, ass_image, &frame);
1030 gst_video_frame_unmap (&frame);
1032 GST_LOG_OBJECT (render, "nothing to render right now");
1035 GST_LOG_OBJECT (render, "rendering disabled, doing buffer passthrough");
1038 ret = gst_pad_push (render->srcpad, buffer);
1044 GST_DEBUG_OBJECT (render, "buffer out of segment, discarding");
1045 gst_buffer_unref (buffer);
1050 static GstFlowReturn
1051 gst_ass_render_chain_text (GstPad * pad, GstObject * parent, GstBuffer * buffer)
1053 GstFlowReturn ret = GST_FLOW_OK;
1054 GstAssRender *render = GST_ASS_RENDER (parent);
1055 GstClockTime timestamp, duration;
1056 GstClockTime sub_running_time, vid_running_time;
1057 GstClockTime sub_running_time_end;
1058 guint64 cstart, cstop;
1061 if (render->subtitle_flushing) {
1062 gst_buffer_unref (buffer);
1063 return GST_FLOW_WRONG_STATE;
1066 timestamp = GST_BUFFER_TIMESTAMP (buffer);
1067 duration = GST_BUFFER_DURATION (buffer);
1069 if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp)
1070 || !GST_CLOCK_TIME_IS_VALID (duration))) {
1071 GST_WARNING_OBJECT (render,
1072 "Text buffer without valid timestamp" " or duration, dropping");
1073 gst_buffer_unref (buffer);
1078 gst_segment_clip (&render->subtitle_segment, GST_FORMAT_TIME, timestamp,
1079 timestamp + duration, &cstart, &cstop);
1081 GST_DEBUG_OBJECT (render,
1082 "Text buffer before segment start (%" GST_TIME_FORMAT " < %"
1083 GST_TIME_FORMAT ")", GST_TIME_ARGS (timestamp),
1084 GST_TIME_ARGS (render->subtitle_segment.start));
1085 gst_buffer_unref (buffer);
1089 GST_BUFFER_TIMESTAMP (buffer) = timestamp = cstart;
1090 GST_BUFFER_DURATION (buffer) = duration = cstop - cstart;
1092 render->subtitle_segment.position = GST_BUFFER_TIMESTAMP (buffer);
1095 gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
1097 sub_running_time_end =
1098 gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
1099 timestamp + duration);
1101 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1102 render->video_segment.position);
1104 if (render->info.fps_n && render->info.fps_d)
1106 gst_util_uint64_scale (GST_SECOND, render->info.fps_d,
1107 render->info.fps_n);
1109 if (sub_running_time > vid_running_time + GST_SECOND / 2) {
1110 g_assert (render->subtitle_pending == NULL);
1111 g_mutex_lock (render->subtitle_mutex);
1112 if (G_UNLIKELY (render->subtitle_flushing)) {
1113 GST_DEBUG_OBJECT (render, "Text pad flushing");
1114 gst_buffer_unref (buffer);
1115 g_mutex_unlock (render->subtitle_mutex);
1116 return GST_FLOW_WRONG_STATE;
1118 GST_DEBUG_OBJECT (render,
1119 "Too early text buffer, waiting (%" GST_TIME_FORMAT " > %"
1120 GST_TIME_FORMAT, GST_TIME_ARGS (sub_running_time),
1121 GST_TIME_ARGS (vid_running_time));
1122 render->subtitle_pending = buffer;
1123 g_cond_wait (render->subtitle_cond, render->subtitle_mutex);
1124 g_mutex_unlock (render->subtitle_mutex);
1125 } else if (sub_running_time_end < vid_running_time) {
1126 GST_DEBUG_OBJECT (render,
1127 "Too late text buffer, dropping (%" GST_TIME_FORMAT " < %"
1128 GST_TIME_FORMAT, GST_TIME_ARGS (sub_running_time_end),
1129 GST_TIME_ARGS (vid_running_time));
1130 gst_buffer_unref (buffer);
1133 gst_ass_render_process_text (render, buffer, sub_running_time,
1134 sub_running_time_end - sub_running_time);
1138 GST_DEBUG_OBJECT (render,
1139 "processed text packet with timestamp %" GST_TIME_FORMAT
1140 " and duration %" GST_TIME_FORMAT,
1141 GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration));
1147 gst_ass_render_handle_tags (GstAssRender * render, GstTagList * taglist)
1150 static const gchar *mimetypes[] = {
1151 "application/x-font-ttf",
1152 "application/x-font-otf",
1153 "application/x-truetype-font"
1155 static const gchar *extensions[] = {
1165 tag_size = gst_tag_list_get_tag_size (taglist, GST_TAG_ATTACHMENT);
1166 if (tag_size > 0 && render->embeddedfonts) {
1168 const GValue *value;
1171 GstStructure *structure;
1172 gboolean valid_mimetype, valid_extension;
1174 const gchar *filename;
1178 GST_DEBUG_OBJECT (render, "TAG event has attachments");
1180 for (index = 0; index < tag_size; index++) {
1182 value = gst_tag_list_get_value_index (taglist, GST_TAG_ATTACHMENT, index);
1183 buf = gst_value_get_buffer (value);
1184 if (!buf || !GST_BUFFER_CAPS (buf))
1187 caps = GST_BUFFER_CAPS (buf);
1188 structure = gst_caps_get_structure (caps, 0);
1190 valid_mimetype = FALSE;
1191 valid_extension = FALSE;
1193 for (j = 0; j < G_N_ELEMENTS (mimetypes); j++) {
1194 if (gst_structure_has_name (structure, mimetypes[j])) {
1195 valid_mimetype = TRUE;
1199 filename = gst_structure_get_string (structure, "filename");
1203 if (!valid_mimetype) {
1204 guint len = strlen (filename);
1205 const gchar *extension = filename + len - 4;
1206 for (j = 0; j < G_N_ELEMENTS (extensions); j++) {
1207 if (g_ascii_strcasecmp (extension, extensions[j]) == 0) {
1208 valid_extension = TRUE;
1214 if (valid_mimetype || valid_extension) {
1215 g_mutex_lock (render->ass_mutex);
1216 ass_add_font (render->ass_library, (gchar *) filename,
1217 (gchar *) GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
1218 GST_DEBUG_OBJECT (render, "registered new font %s", filename);
1219 g_mutex_unlock (render->ass_mutex);
1227 gst_ass_render_event_video (GstPad * pad, GstObject * parent, GstEvent * event)
1229 gboolean ret = FALSE;
1230 GstAssRender *render = GST_ASS_RENDER (parent);
1232 GST_DEBUG_OBJECT (pad, "received video event %s",
1233 GST_EVENT_TYPE_NAME (event));
1235 switch (GST_EVENT_TYPE (event)) {
1236 case GST_EVENT_CAPS:
1240 gst_event_parse_caps (event, &caps);
1241 ret = gst_ass_render_setcaps_video (pad, caps);
1242 gst_event_unref (event);
1245 case GST_EVENT_SEGMENT:
1249 GST_DEBUG_OBJECT (render, "received new segment");
1251 gst_event_copy_segment (event, &segment);
1253 if (segment.format == GST_FORMAT_TIME) {
1254 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT now: %" GST_SEGMENT_FORMAT,
1255 &render->video_segment);
1257 render->video_segment = segment;
1259 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT after: %" GST_SEGMENT_FORMAT,
1260 &render->video_segment);
1261 ret = gst_pad_push_event (render->srcpad, event);
1263 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1264 ("received non-TIME newsegment event on video input"));
1266 gst_event_unref (event);
1272 GstTagList *taglist = NULL;
1274 /* tag events may contain attachments which might be fonts */
1275 GST_DEBUG_OBJECT (render, "got TAG event");
1277 gst_event_parse_tag (event, &taglist);
1278 gst_ass_render_handle_tags (render, taglist);
1279 ret = gst_pad_push_event (render->srcpad, event);
1282 case GST_EVENT_FLUSH_STOP:
1283 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
1285 ret = gst_pad_push_event (render->srcpad, event);
1293 gst_ass_render_query_video (GstPad * pad, GstObject * parent, GstQuery * query)
1295 gboolean res = FALSE;
1297 switch (GST_QUERY_TYPE (query)) {
1298 case GST_QUERY_CAPS:
1300 GstCaps *filter, *caps;
1302 gst_query_parse_caps (query, &filter);
1303 caps = gst_ass_render_getcaps (pad, filter);
1304 gst_query_set_caps_result (query, caps);
1305 gst_caps_unref (caps);
1310 res = gst_pad_query_default (pad, parent, query);
1318 gst_ass_render_event_text (GstPad * pad, GstObject * parent, GstEvent * event)
1321 gboolean ret = FALSE;
1322 GstAssRender *render = GST_ASS_RENDER (parent);
1324 GST_DEBUG_OBJECT (pad, "received text event %s", GST_EVENT_TYPE_NAME (event));
1326 switch (GST_EVENT_TYPE (event)) {
1327 case GST_EVENT_CAPS:
1331 gst_event_parse_caps (event, &caps);
1332 ret = gst_ass_render_setcaps_text (pad, caps);
1333 gst_event_unref (event);
1336 case GST_EVENT_SEGMENT:
1340 GST_DEBUG_OBJECT (render, "received new segment");
1342 gst_event_copy_segment (event, &segment);
1344 if (segment.format == GST_FORMAT_TIME) {
1345 GST_DEBUG_OBJECT (render, "SUBTITLE SEGMENT now: %" GST_SEGMENT_FORMAT,
1346 &render->subtitle_segment);
1348 render->subtitle_segment = segment;
1350 GST_DEBUG_OBJECT (render,
1351 "SUBTITLE SEGMENT after: %" GST_SEGMENT_FORMAT,
1352 &render->subtitle_segment);
1354 gst_event_unref (event);
1356 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1357 ("received non-TIME newsegment event on subtitle input"));
1359 gst_event_unref (event);
1363 case GST_EVENT_FLUSH_STOP:
1364 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
1365 render->subtitle_flushing = FALSE;
1366 gst_event_unref (event);
1369 case GST_EVENT_FLUSH_START:
1370 GST_DEBUG_OBJECT (render, "begin flushing");
1371 g_mutex_lock (render->ass_mutex);
1372 if (render->ass_track) {
1373 /* delete any events on the ass_track */
1374 for (i = 0; i < render->ass_track->n_events; i++) {
1375 GST_DEBUG_OBJECT (render, "deleted event with eid %i", i);
1376 ass_free_event (render->ass_track, i);
1378 render->ass_track->n_events = 0;
1379 GST_DEBUG_OBJECT (render, "done flushing");
1381 g_mutex_unlock (render->ass_mutex);
1382 g_mutex_lock (render->subtitle_mutex);
1383 if (render->subtitle_pending)
1384 gst_buffer_unref (render->subtitle_pending);
1385 render->subtitle_pending = NULL;
1386 render->subtitle_flushing = TRUE;
1387 g_cond_signal (render->subtitle_cond);
1388 g_mutex_unlock (render->subtitle_mutex);
1389 gst_event_unref (event);
1393 GST_OBJECT_LOCK (render);
1394 GST_INFO_OBJECT (render, "text EOS");
1395 GST_OBJECT_UNLOCK (render);
1396 gst_event_unref (event);
1401 GstTagList *taglist = NULL;
1403 /* tag events may contain attachments which might be fonts */
1404 GST_DEBUG_OBJECT (render, "got TAG event");
1406 gst_event_parse_tag (event, &taglist);
1407 gst_ass_render_handle_tags (render, taglist);
1408 ret = gst_pad_push_event (render->srcpad, event);
1412 ret = gst_pad_push_event (render->srcpad, event);
1420 plugin_init (GstPlugin * plugin)
1422 GST_DEBUG_CATEGORY_INIT (gst_ass_render_debug, "assrender",
1423 0, "ASS/SSA subtitle renderer");
1424 GST_DEBUG_CATEGORY_INIT (gst_ass_render_lib_debug, "assrender_library",
1425 0, "ASS/SSA subtitle renderer library");
1427 return gst_element_register (plugin, "assrender",
1428 GST_RANK_PRIMARY, GST_TYPE_ASS_RENDER);
1431 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1434 "ASS/SSA subtitle renderer",
1435 plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)