2 * Copyright (c) 2008 Benjamin Schmitz <vortex@wolpzone.de>
3 * Copyright (c) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
18 * Boston, MA 02111-1307, USA.
22 * SECTION:element-assrender
24 * Renders timestamped SSA/ASS subtitles on top of a video stream.
27 * <title>Example launch line</title>
29 * gst-launch -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mp3parse ! mad ! audioconvert ! autoaudiosink d. ! queue ! ffdec_h264 ! ffmpegcolorspace ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! ffmpegcolorspace ! autovideosink
30 * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
34 /* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
35 * with newer GLib versions (>= 2.31.0) */
36 #define GLIB_DISABLE_DEPRECATION_WARNINGS
42 #include "gstassrender.h"
46 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_debug);
47 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_lib_debug);
48 #define GST_CAT_DEFAULT gst_ass_render_debug
50 /* Filter signals and props */
63 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
66 GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
67 GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR ";"
68 GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx ";"
69 GST_VIDEO_CAPS_YUV ("I420"))
72 static GstStaticPadTemplate video_sink_factory =
73 GST_STATIC_PAD_TEMPLATE ("video_sink",
76 GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
77 GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR ";"
78 GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx ";"
79 GST_VIDEO_CAPS_YUV ("I420"))
82 static GstStaticPadTemplate text_sink_factory =
83 GST_STATIC_PAD_TEMPLATE ("text_sink",
86 GST_STATIC_CAPS ("application/x-ass; application/x-ssa")
89 static void gst_ass_render_set_property (GObject * object, guint prop_id,
90 const GValue * value, GParamSpec * pspec);
91 static void gst_ass_render_get_property (GObject * object, guint prop_id,
92 GValue * value, GParamSpec * pspec);
94 static void gst_ass_render_finalize (GObject * object);
96 static GstStateChangeReturn gst_ass_render_change_state (GstElement * element,
97 GstStateChange transition);
99 GST_BOILERPLATE (GstAssRender, gst_ass_render, GstElement, GST_TYPE_ELEMENT);
101 static GstCaps *gst_ass_render_getcaps (GstPad * pad);
103 static gboolean gst_ass_render_setcaps_video (GstPad * pad, GstCaps * caps);
104 static gboolean gst_ass_render_setcaps_text (GstPad * pad, GstCaps * caps);
106 static GstFlowReturn gst_ass_render_chain_video (GstPad * pad, GstBuffer * buf);
107 static GstFlowReturn gst_ass_render_chain_text (GstPad * pad, GstBuffer * buf);
109 static gboolean gst_ass_render_event_video (GstPad * pad, GstEvent * event);
110 static gboolean gst_ass_render_event_text (GstPad * pad, GstEvent * event);
111 static gboolean gst_ass_render_event_src (GstPad * pad, GstEvent * event);
113 static GstFlowReturn gst_ass_render_bufferalloc_video (GstPad * pad,
114 guint64 offset, guint size, GstCaps * caps, GstBuffer ** buffer);
116 static gboolean gst_ass_render_query_src (GstPad * pad, GstQuery * query);
119 gst_ass_render_base_init (gpointer gclass)
121 GstElementClass *element_class = (GstElementClass *) gclass;
123 gst_element_class_add_static_pad_template (element_class, &src_factory);
124 gst_element_class_add_static_pad_template (element_class,
125 &video_sink_factory);
126 gst_element_class_add_static_pad_template (element_class,
129 gst_element_class_set_details_simple (element_class, "ASS/SSA Render",
130 "Mixer/Video/Overlay/Subtitle",
131 "Renders ASS/SSA subtitles with libass",
132 "Benjamin Schmitz <vortex@wolpzone.de>, "
133 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
136 /* initialize the plugin's class */
138 gst_ass_render_class_init (GstAssRenderClass * klass)
140 GObjectClass *gobject_class = (GObjectClass *) klass;
141 GstElementClass *gstelement_class = (GstElementClass *) klass;
143 gobject_class->set_property = gst_ass_render_set_property;
144 gobject_class->get_property = gst_ass_render_get_property;
145 gobject_class->finalize = gst_ass_render_finalize;
147 g_object_class_install_property (gobject_class, PROP_ENABLE,
148 g_param_spec_boolean ("enable", "Enable",
149 "Enable rendering of subtitles", TRUE,
150 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
151 g_object_class_install_property (gobject_class, PROP_EMBEDDEDFONTS,
152 g_param_spec_boolean ("embeddedfonts", "Embedded Fonts",
153 "Extract and use fonts embedded in the stream", TRUE,
154 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
156 gstelement_class->change_state =
157 GST_DEBUG_FUNCPTR (gst_ass_render_change_state);
160 #if defined(LIBASS_VERSION) && LIBASS_VERSION >= 0x00907000
162 _libass_message_cb (gint level, const gchar * fmt, va_list args,
165 gchar *message = g_strdup_vprintf (fmt, args);
168 GST_CAT_ERROR_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
170 GST_CAT_WARNING_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
172 GST_CAT_INFO_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
174 GST_CAT_DEBUG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
176 GST_CAT_LOG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
183 gst_ass_render_init (GstAssRender * render, GstAssRenderClass * gclass)
185 GST_DEBUG_OBJECT (render, "init");
187 render->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
188 render->video_sinkpad =
189 gst_pad_new_from_static_template (&video_sink_factory, "video_sink");
190 render->text_sinkpad =
191 gst_pad_new_from_static_template (&text_sink_factory, "text_sink");
193 gst_pad_set_setcaps_function (render->video_sinkpad,
194 GST_DEBUG_FUNCPTR (gst_ass_render_setcaps_video));
195 gst_pad_set_setcaps_function (render->text_sinkpad,
196 GST_DEBUG_FUNCPTR (gst_ass_render_setcaps_text));
198 gst_pad_set_getcaps_function (render->srcpad,
199 GST_DEBUG_FUNCPTR (gst_ass_render_getcaps));
200 gst_pad_set_getcaps_function (render->video_sinkpad,
201 GST_DEBUG_FUNCPTR (gst_ass_render_getcaps));
203 gst_pad_set_chain_function (render->video_sinkpad,
204 GST_DEBUG_FUNCPTR (gst_ass_render_chain_video));
205 gst_pad_set_chain_function (render->text_sinkpad,
206 GST_DEBUG_FUNCPTR (gst_ass_render_chain_text));
208 gst_pad_set_event_function (render->video_sinkpad,
209 GST_DEBUG_FUNCPTR (gst_ass_render_event_video));
210 gst_pad_set_event_function (render->text_sinkpad,
211 GST_DEBUG_FUNCPTR (gst_ass_render_event_text));
212 gst_pad_set_event_function (render->srcpad,
213 GST_DEBUG_FUNCPTR (gst_ass_render_event_src));
215 gst_pad_set_bufferalloc_function (render->video_sinkpad,
216 GST_DEBUG_FUNCPTR (gst_ass_render_bufferalloc_video));
218 gst_pad_set_query_function (render->srcpad,
219 GST_DEBUG_FUNCPTR (gst_ass_render_query_src));
221 gst_element_add_pad (GST_ELEMENT (render), render->srcpad);
222 gst_element_add_pad (GST_ELEMENT (render), render->video_sinkpad);
223 gst_element_add_pad (GST_ELEMENT (render), render->text_sinkpad);
228 render->subtitle_mutex = g_mutex_new ();
229 render->subtitle_cond = g_cond_new ();
231 render->renderer_init_ok = FALSE;
232 render->track_init_ok = FALSE;
233 render->enable = TRUE;
234 render->embeddedfonts = TRUE;
236 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
237 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
239 render->ass_mutex = g_mutex_new ();
240 render->ass_library = ass_library_init ();
241 #if defined(LIBASS_VERSION) && LIBASS_VERSION >= 0x00907000
242 ass_set_message_cb (render->ass_library, _libass_message_cb, render);
244 ass_set_extract_fonts (render->ass_library, 1);
246 render->ass_renderer = ass_renderer_init (render->ass_library);
247 if (!render->ass_renderer) {
248 GST_WARNING_OBJECT (render, "cannot create renderer instance");
249 g_assert_not_reached ();
252 render->ass_track = NULL;
254 GST_DEBUG_OBJECT (render, "init complete");
258 gst_ass_render_finalize (GObject * object)
260 GstAssRender *render = GST_ASS_RENDER (object);
262 if (render->subtitle_mutex)
263 g_mutex_free (render->subtitle_mutex);
265 if (render->subtitle_cond)
266 g_cond_free (render->subtitle_cond);
268 if (render->ass_track) {
269 ass_free_track (render->ass_track);
272 if (render->ass_renderer) {
273 ass_renderer_done (render->ass_renderer);
276 if (render->ass_library) {
277 ass_library_done (render->ass_library);
280 if (render->ass_mutex)
281 g_mutex_free (render->ass_mutex);
283 G_OBJECT_CLASS (parent_class)->finalize (object);
287 gst_ass_render_set_property (GObject * object, guint prop_id,
288 const GValue * value, GParamSpec * pspec)
290 GstAssRender *render = GST_ASS_RENDER (object);
294 render->enable = g_value_get_boolean (value);
296 case PROP_EMBEDDEDFONTS:
297 render->embeddedfonts = g_value_get_boolean (value);
298 g_mutex_lock (render->ass_mutex);
299 ass_set_extract_fonts (render->ass_library, render->embeddedfonts);
300 g_mutex_unlock (render->ass_mutex);
303 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
309 gst_ass_render_get_property (GObject * object, guint prop_id,
310 GValue * value, GParamSpec * pspec)
312 GstAssRender *render = GST_ASS_RENDER (object);
316 g_value_set_boolean (value, render->enable);
318 case PROP_EMBEDDEDFONTS:
319 g_value_set_boolean (value, render->embeddedfonts);
322 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
327 static GstStateChangeReturn
328 gst_ass_render_change_state (GstElement * element, GstStateChange transition)
330 GstAssRender *render = GST_ASS_RENDER (element);
331 GstStateChangeReturn ret;
333 switch (transition) {
334 case GST_STATE_CHANGE_READY_TO_PAUSED:
335 render->subtitle_flushing = FALSE;
336 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
337 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
339 case GST_STATE_CHANGE_NULL_TO_READY:
340 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
344 case GST_STATE_CHANGE_PAUSED_TO_READY:
345 g_mutex_lock (render->subtitle_mutex);
346 render->subtitle_flushing = TRUE;
347 if (render->subtitle_pending)
348 gst_buffer_unref (render->subtitle_pending);
349 render->subtitle_pending = NULL;
350 g_cond_signal (render->subtitle_cond);
351 g_mutex_unlock (render->subtitle_mutex);
355 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
357 switch (transition) {
358 case GST_STATE_CHANGE_PAUSED_TO_READY:
359 g_mutex_lock (render->ass_mutex);
360 if (render->ass_track)
361 ass_free_track (render->ass_track);
362 render->ass_track = NULL;
363 g_mutex_unlock (render->ass_mutex);
364 render->track_init_ok = FALSE;
365 render->renderer_init_ok = FALSE;
367 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
368 case GST_STATE_CHANGE_READY_TO_NULL:
378 gst_ass_render_query_src (GstPad * pad, GstQuery * query)
380 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
383 ret = gst_pad_peer_query (render->video_sinkpad, query);
385 gst_object_unref (render);
390 gst_ass_render_event_src (GstPad * pad, GstEvent * event)
392 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
393 gboolean ret = FALSE;
395 switch (GST_EVENT_TYPE (event)) {
396 case GST_EVENT_SEEK:{
399 GST_DEBUG_OBJECT (render, "seek received, driving from here");
401 gst_event_parse_seek (event, NULL, NULL, &flags, NULL, NULL, NULL, NULL);
403 /* Flush downstream, only for flushing seek */
404 if (flags & GST_SEEK_FLAG_FLUSH)
405 gst_pad_push_event (render->srcpad, gst_event_new_flush_start ());
407 /* Mark subtitle as flushing, unblocks chains */
408 g_mutex_lock (render->subtitle_mutex);
409 if (render->subtitle_pending)
410 gst_buffer_unref (render->subtitle_pending);
411 render->subtitle_pending = NULL;
412 render->subtitle_flushing = TRUE;
413 g_cond_signal (render->subtitle_cond);
414 g_mutex_unlock (render->subtitle_mutex);
416 /* Seek on each sink pad */
417 gst_event_ref (event);
418 ret = gst_pad_push_event (render->video_sinkpad, event);
420 ret = gst_pad_push_event (render->text_sinkpad, event);
422 gst_event_unref (event);
427 gst_event_ref (event);
428 ret = gst_pad_push_event (render->video_sinkpad, event);
429 gst_pad_push_event (render->text_sinkpad, event);
433 gst_object_unref (render);
439 gst_ass_render_getcaps (GstPad * pad)
441 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
445 if (pad == render->srcpad)
446 otherpad = render->video_sinkpad;
448 otherpad = render->srcpad;
450 /* we can do what the peer can */
451 caps = gst_pad_peer_get_caps (otherpad);
454 const GstCaps *templ;
456 /* filtered against our padtemplate */
457 templ = gst_pad_get_pad_template_caps (otherpad);
458 temp = gst_caps_intersect (caps, templ);
459 gst_caps_unref (caps);
460 /* this is what we can do */
463 /* no peer, our padtemplate is enough then */
464 caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
467 gst_object_unref (render);
472 #define CREATE_RGB_BLIT_FUNCTION(name,bpp,R,G,B) \
474 blit_##name (GstAssRender * render, ASS_Image * ass_image, GstBuffer * buffer) \
477 gint alpha, r, g, b, k; \
481 gint width = render->width; \
482 gint height = render->height; \
483 gint dst_stride = GST_ROUND_UP_4 (width * bpp); \
487 while (ass_image) { \
488 if (ass_image->dst_y > height || ass_image->dst_x > width) \
491 /* blend subtitles onto the video frame */ \
492 alpha = 255 - ((ass_image->color) & 0xff); \
493 r = ((ass_image->color) >> 24) & 0xff; \
494 g = ((ass_image->color) >> 16) & 0xff; \
495 b = ((ass_image->color) >> 8) & 0xff; \
496 src = ass_image->bitmap; \
497 dst = buffer->data + ass_image->dst_y * dst_stride + ass_image->dst_x * bpp; \
499 w = MIN (ass_image->w, width - ass_image->dst_x); \
500 h = MIN (ass_image->h, height - ass_image->dst_y); \
501 src_skip = ass_image->stride - w; \
502 dst_skip = dst_stride - w * bpp; \
504 for (y = 0; y < h; y++) { \
505 for (x = 0; x < w; x++) { \
506 k = src[0] * alpha / 255; \
507 dst[R] = (k * r + (255 - k) * dst[R]) / 255; \
508 dst[G] = (k * g + (255 - k) * dst[G]) / 255; \
509 dst[B] = (k * b + (255 - k) * dst[B]) / 255; \
518 ass_image = ass_image->next; \
520 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter); \
523 CREATE_RGB_BLIT_FUNCTION (rgb, 3, 0, 1, 2);
524 CREATE_RGB_BLIT_FUNCTION (bgr, 3, 2, 1, 0);
525 CREATE_RGB_BLIT_FUNCTION (xrgb, 4, 1, 2, 3);
526 CREATE_RGB_BLIT_FUNCTION (xbgr, 4, 3, 2, 1);
527 CREATE_RGB_BLIT_FUNCTION (rgbx, 4, 0, 1, 2);
528 CREATE_RGB_BLIT_FUNCTION (bgrx, 4, 2, 1, 0);
530 #undef CREATE_RGB_BLIT_FUNCTION
533 rgb_to_y (gint r, gint g, gint b)
537 ret = (gint) (((19595 * r) >> 16) + ((38470 * g) >> 16) + ((7471 * b) >> 16));
538 ret = CLAMP (ret, 0, 255);
543 rgb_to_u (gint r, gint g, gint b)
548 (gint) (-((11059 * r) >> 16) - ((21709 * g) >> 16) + ((32768 * b) >> 16) +
550 ret = CLAMP (ret, 0, 255);
555 rgb_to_v (gint r, gint g, gint b)
560 (gint) (((32768 * r) >> 16) - ((27439 * g) >> 16) - ((5329 * b) >> 16) +
562 ret = CLAMP (ret, 0, 255);
567 blit_i420 (GstAssRender * render, ASS_Image * ass_image, GstBuffer * buffer)
570 gint alpha, r, g, b, k, k2;
573 guint8 *dst_y, *dst_u, *dst_v;
575 /* FIXME ignoring source image stride might be wrong here */
580 gint width = render->width;
581 gint height = render->height;
582 gint y_offset, y_stride;
583 gint u_offset, u_stride;
584 gint v_offset, v_stride;
587 gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420, 0, width,
590 gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420, 1, width,
593 gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420, 2, width,
596 y_stride = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 0, width);
597 u_stride = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 1, width);
598 v_stride = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 2, width);
601 if (ass_image->dst_y > height || ass_image->dst_x > width)
604 /* blend subtitles onto the video frame */
605 alpha = 255 - ((ass_image->color) & 0xff);
606 r = ((ass_image->color) >> 24) & 0xff;
607 g = ((ass_image->color) >> 16) & 0xff;
608 b = ((ass_image->color) >> 8) & 0xff;
610 Y = rgb_to_y (r, g, b);
611 U = rgb_to_u (r, g, b);
612 V = rgb_to_v (r, g, b);
614 w = MIN (ass_image->w, width - ass_image->dst_x);
615 h = MIN (ass_image->h, height - ass_image->dst_y);
620 src_stride = ass_image->stride;
623 src = ass_image->bitmap;
625 buffer->data + y_offset + ass_image->dst_y * y_stride +
628 buffer->data + u_offset + (ass_image->dst_y / 2) * u_stride +
629 ass_image->dst_x / 2;
631 buffer->data + v_offset + (ass_image->dst_y / 2) * v_stride +
632 ass_image->dst_x / 2;
634 for (y = 0; y < h; y++) {
635 dst_y = buffer->data + y_offset + (ass_image->dst_y + y) * y_stride +
637 for (x = 0; x < w; x++) {
638 k = src[y * ass_image->w + x] * alpha / 255;
639 dst_y[x] = (k * Y + (255 - k) * dst_y[x]) / 255;
644 if (ass_image->dst_y & 1) {
646 buffer->data + u_offset + (ass_image->dst_y / 2) * u_stride +
647 ass_image->dst_x / 2;
649 buffer->data + v_offset + (ass_image->dst_y / 2) * v_stride +
650 ass_image->dst_x / 2;
652 if (ass_image->dst_x & 1) {
653 k2 = src[y * ass_image->w + x] * alpha / 255;
655 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
656 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
661 for (; x < w - 1; x += 2) {
662 k2 = src[y * ass_image->w + x] * alpha / 255;
663 k2 += src[y * ass_image->w + x + 1] * alpha / 255;
665 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
666 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
671 k2 = src[y * ass_image->w + x] * alpha / 255;
673 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
674 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
678 for (; y < h - 1; y += 2) {
680 buffer->data + u_offset + ((ass_image->dst_y + y) / 2) * u_stride +
681 ass_image->dst_x / 2;
683 buffer->data + v_offset + ((ass_image->dst_y + y) / 2) * v_stride +
684 ass_image->dst_x / 2;
686 if (ass_image->dst_x & 1) {
687 k2 = src[y * ass_image->w + x] * alpha / 255;
688 k2 += src[(y + 1) * ass_image->w + x] * alpha / 255;
690 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
691 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
696 for (; x < w - 1; x += 2) {
697 k2 = src[y * ass_image->w + x] * alpha / 255;
698 k2 += src[y * ass_image->w + x + 1] * alpha / 255;
699 k2 += src[(y + 1) * ass_image->w + x] * alpha / 255;
700 k2 += src[(y + 1) * ass_image->w + x + 1] * alpha / 255;
702 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
703 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
708 k2 = src[y * ass_image->w + x] * alpha / 255;
709 k2 += src[(y + 1) * ass_image->w + x] * alpha / 255;
711 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
712 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
718 buffer->data + u_offset + (ass_image->dst_y / 2) * u_stride +
719 ass_image->dst_x / 2;
721 buffer->data + v_offset + (ass_image->dst_y / 2) * v_stride +
722 ass_image->dst_x / 2;
724 if (ass_image->dst_x & 1) {
725 k2 = src[y * ass_image->w + x] * alpha / 255;
727 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
728 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
733 for (; x < w - 1; x += 2) {
734 k2 = src[y * ass_image->w + x] * alpha / 255;
735 k2 += src[y * ass_image->w + x + 1] * alpha / 255;
737 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
738 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
743 k2 = src[y * ass_image->w + x] * alpha / 255;
745 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
746 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
754 ass_image = ass_image->next;
757 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter);
761 gst_ass_render_setcaps_video (GstPad * pad, GstCaps * caps)
763 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
764 gboolean ret = FALSE;
765 gint par_n = 1, par_d = 1;
771 if (!gst_video_format_parse_caps (caps, &render->format, &render->width,
773 !gst_video_parse_caps_framerate (caps, &render->fps_n, &render->fps_d)) {
774 GST_ERROR_OBJECT (render, "Can't parse caps: %" GST_PTR_FORMAT, caps);
779 gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d);
781 ret = gst_pad_set_caps (render->srcpad, caps);
785 switch (render->format) {
786 case GST_VIDEO_FORMAT_RGB:
787 render->blit = blit_rgb;
789 case GST_VIDEO_FORMAT_BGR:
790 render->blit = blit_bgr;
792 case GST_VIDEO_FORMAT_xRGB:
793 render->blit = blit_xrgb;
795 case GST_VIDEO_FORMAT_xBGR:
796 render->blit = blit_xbgr;
798 case GST_VIDEO_FORMAT_RGBx:
799 render->blit = blit_rgbx;
801 case GST_VIDEO_FORMAT_BGRx:
802 render->blit = blit_bgrx;
804 case GST_VIDEO_FORMAT_I420:
805 render->blit = blit_i420;
812 g_mutex_lock (render->ass_mutex);
813 ass_set_frame_size (render->ass_renderer, render->width, render->height);
815 dar = (((gdouble) par_n) * ((gdouble) render->width))
816 / (((gdouble) par_d) * ((gdouble) render->height));
817 #if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
818 ass_set_aspect_ratio (render->ass_renderer, dar);
820 ass_set_aspect_ratio (render->ass_renderer,
821 dar, ((gdouble) render->width) / ((gdouble) render->height));
823 ass_set_font_scale (render->ass_renderer, 1.0);
824 ass_set_hinting (render->ass_renderer, ASS_HINTING_LIGHT);
826 #if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
827 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif");
828 ass_set_fonts (render->ass_renderer, NULL, "Sans");
830 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif", 1, NULL, 1);
831 ass_set_fonts (render->ass_renderer, NULL, "Sans", 1, NULL, 1);
833 ass_set_margins (render->ass_renderer, 0, 0, 0, 0);
834 ass_set_use_margins (render->ass_renderer, 0);
835 g_mutex_unlock (render->ass_mutex);
837 render->renderer_init_ok = TRUE;
839 GST_DEBUG_OBJECT (render, "ass renderer setup complete");
842 gst_object_unref (render);
848 gst_ass_render_setcaps_text (GstPad * pad, GstCaps * caps)
850 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
851 GstStructure *structure;
854 gchar *codec_private;
855 guint codec_private_size;
856 gboolean ret = FALSE;
858 structure = gst_caps_get_structure (caps, 0);
860 GST_DEBUG_OBJECT (render, "text pad linked with caps: %" GST_PTR_FORMAT,
863 value = gst_structure_get_value (structure, "codec_data");
865 g_mutex_lock (render->ass_mutex);
867 priv = gst_value_get_buffer (value);
868 g_return_val_if_fail (priv != NULL, FALSE);
870 codec_private = (gchar *) GST_BUFFER_DATA (priv);
871 codec_private_size = GST_BUFFER_SIZE (priv);
873 if (!render->ass_track)
874 render->ass_track = ass_new_track (render->ass_library);
876 ass_process_codec_private (render->ass_track,
877 codec_private, codec_private_size);
879 GST_DEBUG_OBJECT (render, "ass track created");
881 render->track_init_ok = TRUE;
884 } else if (!render->ass_track) {
885 render->ass_track = ass_new_track (render->ass_library);
887 render->track_init_ok = TRUE;
891 g_mutex_unlock (render->ass_mutex);
893 gst_object_unref (render);
900 gst_ass_render_process_text (GstAssRender * render, GstBuffer * buffer,
901 GstClockTime running_time, GstClockTime duration)
903 gchar *data = (gchar *) GST_BUFFER_DATA (buffer);
904 guint size = GST_BUFFER_SIZE (buffer);
905 gdouble pts_start, pts_end;
907 pts_start = running_time;
908 pts_start /= GST_MSECOND;
910 pts_end /= GST_MSECOND;
912 GST_DEBUG_OBJECT (render,
913 "Processing subtitles with running time %" GST_TIME_FORMAT
914 " and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time),
915 GST_TIME_ARGS (duration));
916 g_mutex_lock (render->ass_mutex);
917 ass_process_chunk (render->ass_track, data, size, pts_start, pts_end);
918 g_mutex_unlock (render->ass_mutex);
919 gst_buffer_unref (buffer);
923 gst_ass_render_bufferalloc_video (GstPad * pad, guint64 offset, guint size,
924 GstCaps * caps, GstBuffer ** buffer)
926 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
927 GstFlowReturn ret = GST_FLOW_WRONG_STATE;
930 GST_OBJECT_LOCK (render);
931 allocpad = render->srcpad ? gst_object_ref (render->srcpad) : NULL;
932 GST_OBJECT_UNLOCK (render);
935 ret = gst_pad_alloc_buffer (allocpad, offset, size, caps, buffer);
936 gst_object_unref (allocpad);
939 gst_object_unref (render);
945 gst_ass_render_chain_video (GstPad * pad, GstBuffer * buffer)
947 GstAssRender *render = GST_ASS_RENDER (GST_PAD_PARENT (pad));
948 GstFlowReturn ret = GST_FLOW_OK;
949 gboolean in_seg = FALSE;
950 gint64 start, stop, clip_start = 0, clip_stop = 0;
951 ASS_Image *ass_image;
953 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
954 GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
955 gst_buffer_unref (buffer);
959 /* ignore buffers that are outside of the current segment */
960 start = GST_BUFFER_TIMESTAMP (buffer);
962 if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
963 stop = GST_CLOCK_TIME_NONE;
965 stop = start + GST_BUFFER_DURATION (buffer);
968 /* segment_clip() will adjust start unconditionally to segment_start if
969 * no stop time is provided, so handle this ourselves */
970 if (stop == GST_CLOCK_TIME_NONE && start < render->video_segment.start)
974 gst_segment_clip (&render->video_segment, GST_FORMAT_TIME, start, stop,
975 &clip_start, &clip_stop);
980 /* if the buffer is only partially in the segment, fix up stamps */
981 if (clip_start != start || (stop != -1 && clip_stop != stop)) {
982 GST_DEBUG_OBJECT (render, "clipping buffer timestamp/duration to segment");
983 buffer = gst_buffer_make_metadata_writable (buffer);
984 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
986 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
989 gst_segment_set_last_stop (&render->video_segment, GST_FORMAT_TIME,
992 g_mutex_lock (render->subtitle_mutex);
993 if (render->subtitle_pending) {
994 GstClockTime sub_running_time, vid_running_time;
995 GstClockTime sub_running_time_end, vid_running_time_end;
998 gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
999 GST_BUFFER_TIMESTAMP (render->subtitle_pending));
1000 sub_running_time_end =
1001 gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
1002 GST_BUFFER_TIMESTAMP (render->subtitle_pending) +
1003 GST_BUFFER_DURATION (render->subtitle_pending));
1005 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1006 GST_BUFFER_TIMESTAMP (buffer));
1007 vid_running_time_end =
1008 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1009 GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer));
1011 if (sub_running_time_end < vid_running_time) {
1012 gst_buffer_unref (render->subtitle_pending);
1013 GST_DEBUG_OBJECT (render,
1014 "Too late text buffer, dropping (%" GST_TIME_FORMAT " < %"
1015 GST_TIME_FORMAT, GST_TIME_ARGS (sub_running_time_end),
1016 GST_TIME_ARGS (vid_running_time));
1017 render->subtitle_pending = NULL;
1018 g_cond_signal (render->subtitle_cond);
1019 } else if (sub_running_time <= vid_running_time_end + GST_SECOND / 2) {
1020 gst_ass_render_process_text (render, render->subtitle_pending,
1021 sub_running_time, sub_running_time_end - sub_running_time);
1022 render->subtitle_pending = NULL;
1023 g_cond_signal (render->subtitle_cond);
1026 g_mutex_unlock (render->subtitle_mutex);
1028 /* now start rendering subtitles, if all conditions are met */
1029 if (render->renderer_init_ok && render->track_init_ok && render->enable) {
1030 GstClockTime running_time;
1032 #ifndef GST_DISABLE_GST_DEBUG
1037 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1038 GST_BUFFER_TIMESTAMP (buffer));
1039 GST_DEBUG_OBJECT (render,
1040 "rendering frame for running time %" GST_TIME_FORMAT,
1041 GST_TIME_ARGS (running_time));
1042 /* libass needs timestamps in ms */
1043 timestamp = running_time / GST_MSECOND;
1045 g_mutex_lock (render->ass_mutex);
1046 #ifndef GST_DISABLE_GST_DEBUG
1047 /* only for testing right now. could possibly be used for optimizations? */
1048 step = ass_step_sub (render->ass_track, timestamp, 1);
1049 GST_DEBUG_OBJECT (render, "Current running time: %" GST_TIME_FORMAT
1050 " // Next event: %" GST_TIME_FORMAT,
1051 GST_TIME_ARGS (running_time), GST_TIME_ARGS (step * GST_MSECOND));
1054 /* not sure what the last parameter to this call is for (detect_change) */
1055 ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
1057 g_mutex_unlock (render->ass_mutex);
1059 if (ass_image != NULL) {
1060 buffer = gst_buffer_make_writable (buffer);
1061 render->blit (render, ass_image, buffer);
1063 GST_LOG_OBJECT (render, "nothing to render right now");
1066 GST_LOG_OBJECT (render, "rendering disabled, doing buffer passthrough");
1069 ret = gst_pad_push (render->srcpad, buffer);
1075 GST_DEBUG_OBJECT (render, "buffer out of segment, discarding");
1076 gst_buffer_unref (buffer);
1081 static GstFlowReturn
1082 gst_ass_render_chain_text (GstPad * pad, GstBuffer * buffer)
1084 GstFlowReturn ret = GST_FLOW_OK;
1085 GstAssRender *render = GST_ASS_RENDER (GST_PAD_PARENT (pad));
1086 GstClockTime timestamp, duration;
1087 GstClockTime sub_running_time, vid_running_time;
1088 GstClockTime sub_running_time_end;
1089 gint64 cstart, cstop;
1092 if (render->subtitle_flushing) {
1093 gst_buffer_unref (buffer);
1094 return GST_FLOW_WRONG_STATE;
1097 timestamp = GST_BUFFER_TIMESTAMP (buffer);
1098 duration = GST_BUFFER_DURATION (buffer);
1100 if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp)
1101 || !GST_CLOCK_TIME_IS_VALID (duration))) {
1102 GST_WARNING_OBJECT (render,
1103 "Text buffer without valid timestamp" " or duration, dropping");
1104 gst_buffer_unref (buffer);
1109 gst_segment_clip (&render->subtitle_segment, GST_FORMAT_TIME, timestamp,
1110 timestamp + duration, &cstart, &cstop);
1112 GST_DEBUG_OBJECT (render,
1113 "Text buffer before segment start (%" GST_TIME_FORMAT " < %"
1114 GST_TIME_FORMAT ")", GST_TIME_ARGS (timestamp),
1115 GST_TIME_ARGS (render->subtitle_segment.start));
1116 gst_buffer_unref (buffer);
1120 GST_BUFFER_TIMESTAMP (buffer) = timestamp = cstart;
1121 GST_BUFFER_DURATION (buffer) = duration = cstop - cstart;
1123 gst_segment_set_last_stop (&render->subtitle_segment, GST_FORMAT_TIME,
1124 GST_BUFFER_TIMESTAMP (buffer));
1127 gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
1129 sub_running_time_end =
1130 gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
1131 timestamp + duration);
1133 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1134 render->video_segment.last_stop);
1136 if (render->fps_n && render->fps_d)
1138 gst_util_uint64_scale (GST_SECOND, render->fps_d, render->fps_n);
1140 if (sub_running_time > vid_running_time + GST_SECOND / 2) {
1141 g_assert (render->subtitle_pending == NULL);
1142 g_mutex_lock (render->subtitle_mutex);
1143 if (G_UNLIKELY (render->subtitle_flushing)) {
1144 GST_DEBUG_OBJECT (render, "Text pad flushing");
1145 gst_buffer_unref (buffer);
1146 g_mutex_unlock (render->subtitle_mutex);
1147 return GST_FLOW_WRONG_STATE;
1149 GST_DEBUG_OBJECT (render,
1150 "Too early text buffer, waiting (%" GST_TIME_FORMAT " > %"
1151 GST_TIME_FORMAT, GST_TIME_ARGS (sub_running_time),
1152 GST_TIME_ARGS (vid_running_time));
1153 render->subtitle_pending = buffer;
1154 g_cond_wait (render->subtitle_cond, render->subtitle_mutex);
1155 g_mutex_unlock (render->subtitle_mutex);
1156 } else if (sub_running_time_end < vid_running_time) {
1157 GST_DEBUG_OBJECT (render,
1158 "Too late text buffer, dropping (%" GST_TIME_FORMAT " < %"
1159 GST_TIME_FORMAT, GST_TIME_ARGS (sub_running_time_end),
1160 GST_TIME_ARGS (vid_running_time));
1161 gst_buffer_unref (buffer);
1164 gst_ass_render_process_text (render, buffer, sub_running_time,
1165 sub_running_time_end - sub_running_time);
1169 GST_DEBUG_OBJECT (render,
1170 "processed text packet with timestamp %" GST_TIME_FORMAT
1171 " and duration %" GST_TIME_FORMAT,
1172 GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration));
1178 gst_ass_render_handle_tags (GstAssRender * render, GstTagList * taglist)
1180 static const gchar *mimetypes[] = {
1181 "application/x-font-ttf",
1182 "application/x-font-otf",
1183 "application/x-truetype-font"
1185 static const gchar *extensions[] = {
1195 tag_size = gst_tag_list_get_tag_size (taglist, GST_TAG_ATTACHMENT);
1196 if (tag_size > 0 && render->embeddedfonts) {
1197 const GValue *value;
1200 GstStructure *structure;
1201 gboolean valid_mimetype, valid_extension;
1203 const gchar *filename;
1205 GST_DEBUG_OBJECT (render, "TAG event has attachments");
1207 for (index = 0; index < tag_size; index++) {
1208 value = gst_tag_list_get_value_index (taglist, GST_TAG_ATTACHMENT, index);
1209 buf = gst_value_get_buffer (value);
1210 if (!buf || !GST_BUFFER_CAPS (buf))
1213 caps = GST_BUFFER_CAPS (buf);
1214 structure = gst_caps_get_structure (caps, 0);
1216 valid_mimetype = FALSE;
1217 valid_extension = FALSE;
1219 for (j = 0; j < G_N_ELEMENTS (mimetypes); j++) {
1220 if (gst_structure_has_name (structure, mimetypes[j])) {
1221 valid_mimetype = TRUE;
1225 filename = gst_structure_get_string (structure, "filename");
1229 if (!valid_mimetype) {
1230 guint len = strlen (filename);
1231 const gchar *extension = filename + len - 4;
1232 for (j = 0; j < G_N_ELEMENTS (extensions); j++) {
1233 if (g_ascii_strcasecmp (extension, extensions[j]) == 0) {
1234 valid_extension = TRUE;
1240 if (valid_mimetype || valid_extension) {
1241 g_mutex_lock (render->ass_mutex);
1242 ass_add_font (render->ass_library, (gchar *) filename,
1243 (gchar *) GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
1244 GST_DEBUG_OBJECT (render, "registered new font %s", filename);
1245 g_mutex_unlock (render->ass_mutex);
1252 gst_ass_render_event_video (GstPad * pad, GstEvent * event)
1254 gboolean ret = FALSE;
1255 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
1257 GST_DEBUG_OBJECT (pad, "received video event %s",
1258 GST_EVENT_TYPE_NAME (event));
1260 switch (GST_EVENT_TYPE (event)) {
1261 case GST_EVENT_NEWSEGMENT:
1265 gint64 start, stop, time;
1268 GST_DEBUG_OBJECT (render, "received new segment");
1270 gst_event_parse_new_segment (event, &update, &rate, &format, &start,
1273 if (format == GST_FORMAT_TIME) {
1274 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT now: %" GST_SEGMENT_FORMAT,
1275 &render->video_segment);
1277 gst_segment_set_newsegment (&render->video_segment, update, rate,
1278 format, start, stop, time);
1280 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT after: %" GST_SEGMENT_FORMAT,
1281 &render->video_segment);
1282 ret = gst_pad_push_event (render->srcpad, event);
1284 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1285 ("received non-TIME newsegment event on video input"));
1287 gst_event_unref (event);
1293 GstTagList *taglist = NULL;
1295 /* tag events may contain attachments which might be fonts */
1296 GST_DEBUG_OBJECT (render, "got TAG event");
1298 gst_event_parse_tag (event, &taglist);
1299 gst_ass_render_handle_tags (render, taglist);
1300 ret = gst_pad_push_event (render->srcpad, event);
1303 case GST_EVENT_FLUSH_STOP:
1304 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
1306 ret = gst_pad_push_event (render->srcpad, event);
1310 gst_object_unref (render);
1316 gst_ass_render_event_text (GstPad * pad, GstEvent * event)
1319 gboolean ret = FALSE;
1320 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
1322 GST_DEBUG_OBJECT (pad, "received text event %s", GST_EVENT_TYPE_NAME (event));
1324 switch (GST_EVENT_TYPE (event)) {
1325 case GST_EVENT_NEWSEGMENT:
1329 gint64 start, stop, time;
1332 GST_DEBUG_OBJECT (render, "received new segment");
1334 gst_event_parse_new_segment (event, &update, &rate, &format, &start,
1337 if (format == GST_FORMAT_TIME) {
1338 GST_DEBUG_OBJECT (render, "SUBTITLE SEGMENT now: %" GST_SEGMENT_FORMAT,
1339 &render->subtitle_segment);
1341 gst_segment_set_newsegment (&render->subtitle_segment, update, rate,
1342 format, start, stop, time);
1344 GST_DEBUG_OBJECT (render,
1345 "SUBTITLE SEGMENT after: %" GST_SEGMENT_FORMAT,
1346 &render->subtitle_segment);
1348 gst_event_unref (event);
1350 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1351 ("received non-TIME newsegment event on subtitle input"));
1353 gst_event_unref (event);
1357 case GST_EVENT_FLUSH_STOP:
1358 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
1359 render->subtitle_flushing = FALSE;
1360 gst_event_unref (event);
1363 case GST_EVENT_FLUSH_START:
1364 GST_DEBUG_OBJECT (render, "begin flushing");
1365 g_mutex_lock (render->ass_mutex);
1366 if (render->ass_track) {
1367 /* delete any events on the ass_track */
1368 for (i = 0; i < render->ass_track->n_events; i++) {
1369 GST_DEBUG_OBJECT (render, "deleted event with eid %i", i);
1370 ass_free_event (render->ass_track, i);
1372 render->ass_track->n_events = 0;
1373 GST_DEBUG_OBJECT (render, "done flushing");
1375 g_mutex_unlock (render->ass_mutex);
1376 g_mutex_lock (render->subtitle_mutex);
1377 if (render->subtitle_pending)
1378 gst_buffer_unref (render->subtitle_pending);
1379 render->subtitle_pending = NULL;
1380 render->subtitle_flushing = TRUE;
1381 g_cond_signal (render->subtitle_cond);
1382 g_mutex_unlock (render->subtitle_mutex);
1383 gst_event_unref (event);
1387 GST_OBJECT_LOCK (render);
1388 GST_INFO_OBJECT (render, "text EOS");
1389 GST_OBJECT_UNLOCK (render);
1390 gst_event_unref (event);
1395 GstTagList *taglist = NULL;
1397 /* tag events may contain attachments which might be fonts */
1398 GST_DEBUG_OBJECT (render, "got TAG event");
1400 gst_event_parse_tag (event, &taglist);
1401 gst_ass_render_handle_tags (render, taglist);
1402 ret = gst_pad_push_event (render->srcpad, event);
1406 ret = gst_pad_push_event (render->srcpad, event);
1410 gst_object_unref (render);
1416 plugin_init (GstPlugin * plugin)
1418 GST_DEBUG_CATEGORY_INIT (gst_ass_render_debug, "assrender",
1419 0, "ASS/SSA subtitle renderer");
1420 GST_DEBUG_CATEGORY_INIT (gst_ass_render_lib_debug, "assrender_library",
1421 0, "ASS/SSA subtitle renderer library");
1423 return gst_element_register (plugin, "assrender",
1424 GST_RANK_PRIMARY, GST_TYPE_ASS_RENDER);
1427 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1430 "ASS/SSA subtitle renderer",
1431 plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)