2 * Copyright (c) 2008 Benjamin Schmitz <vortex@wolpzone.de>
3 * Copyright (c) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
18 * Boston, MA 02111-1307, USA.
22 * SECTION:element-assrender
24 * Renders timestamped SSA/ASS subtitles on top of a video stream.
27 * <title>Example launch line</title>
29 * gst-launch -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mp3parse ! mad ! audioconvert ! autoaudiosink d. ! queue ! ffdec_h264 ! ffmpegcolorspace ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! ffmpegcolorspace ! autovideosink
30 * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
39 #include "gstassrender.h"
43 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_debug);
44 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_lib_debug);
45 #define GST_CAT_DEFAULT gst_ass_render_debug
47 /* Filter signals and props */
60 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
63 GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
64 GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR ";"
65 GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx ";"
66 GST_VIDEO_CAPS_YUV ("I420"))
69 static GstStaticPadTemplate video_sink_factory =
70 GST_STATIC_PAD_TEMPLATE ("video_sink",
73 GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
74 GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR ";"
75 GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx ";"
76 GST_VIDEO_CAPS_YUV ("I420"))
79 static GstStaticPadTemplate text_sink_factory =
80 GST_STATIC_PAD_TEMPLATE ("text_sink",
83 GST_STATIC_CAPS ("application/x-ass; application/x-ssa")
86 static void gst_ass_render_set_property (GObject * object, guint prop_id,
87 const GValue * value, GParamSpec * pspec);
88 static void gst_ass_render_get_property (GObject * object, guint prop_id,
89 GValue * value, GParamSpec * pspec);
91 static void gst_ass_render_finalize (GObject * object);
93 static GstStateChangeReturn gst_ass_render_change_state (GstElement * element,
94 GstStateChange transition);
96 GST_BOILERPLATE (GstAssRender, gst_ass_render, GstElement, GST_TYPE_ELEMENT);
98 static GstCaps *gst_ass_render_getcaps (GstPad * pad);
100 static gboolean gst_ass_render_setcaps_video (GstPad * pad, GstCaps * caps);
101 static gboolean gst_ass_render_setcaps_text (GstPad * pad, GstCaps * caps);
103 static GstFlowReturn gst_ass_render_chain_video (GstPad * pad, GstBuffer * buf);
104 static GstFlowReturn gst_ass_render_chain_text (GstPad * pad, GstBuffer * buf);
106 static gboolean gst_ass_render_event_video (GstPad * pad, GstEvent * event);
107 static gboolean gst_ass_render_event_text (GstPad * pad, GstEvent * event);
108 static gboolean gst_ass_render_event_src (GstPad * pad, GstEvent * event);
110 static GstFlowReturn gst_ass_render_bufferalloc_video (GstPad * pad,
111 guint64 offset, guint size, GstCaps * caps, GstBuffer ** buffer);
113 static gboolean gst_ass_render_query_src (GstPad * pad, GstQuery * query);
116 gst_ass_render_base_init (gpointer gclass)
118 GstElementClass *element_class = (GstElementClass *) gclass;
120 gst_element_class_add_pad_template (element_class,
121 gst_static_pad_template_get (&src_factory));
122 gst_element_class_add_pad_template (element_class,
123 gst_static_pad_template_get (&video_sink_factory));
124 gst_element_class_add_pad_template (element_class,
125 gst_static_pad_template_get (&text_sink_factory));
127 gst_element_class_set_details_simple (element_class, "ASS/SSA Render",
128 "Mixer/Video/Overlay/Subtitle",
129 "Renders ASS/SSA subtitles with libass",
130 "Benjamin Schmitz <vortex@wolpzone.de>, "
131 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
134 /* initialize the plugin's class */
136 gst_ass_render_class_init (GstAssRenderClass * klass)
138 GObjectClass *gobject_class = (GObjectClass *) klass;
139 GstElementClass *gstelement_class = (GstElementClass *) klass;
141 gobject_class->set_property = gst_ass_render_set_property;
142 gobject_class->get_property = gst_ass_render_get_property;
143 gobject_class->finalize = gst_ass_render_finalize;
145 g_object_class_install_property (gobject_class, PROP_ENABLE,
146 g_param_spec_boolean ("enable", "Enable",
147 "Enable rendering of subtitles", TRUE,
148 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
149 g_object_class_install_property (gobject_class, PROP_EMBEDDEDFONTS,
150 g_param_spec_boolean ("embeddedfonts", "Embedded Fonts",
151 "Extract and use fonts embedded in the stream", TRUE,
152 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
154 gstelement_class->change_state =
155 GST_DEBUG_FUNCPTR (gst_ass_render_change_state);
158 #if defined(LIBASS_VERSION) && LIBASS_VERSION >= 0x00907000
160 _libass_message_cb (gint level, const gchar * fmt, va_list args, gpointer data)
162 GstAssRender *render = GST_ASS_RENDER (data);
163 gchar *message = g_strdup_vprintf (fmt, args);
166 GST_CAT_ERROR_OBJECT (gst_ass_render_lib_debug, render, message);
168 GST_CAT_WARNING_OBJECT (gst_ass_render_lib_debug, render, message);
170 GST_CAT_INFO_OBJECT (gst_ass_render_lib_debug, render, message);
172 GST_CAT_DEBUG_OBJECT (gst_ass_render_lib_debug, render, message);
174 GST_CAT_LOG_OBJECT (gst_ass_render_lib_debug, render, message);
181 gst_ass_render_init (GstAssRender * render, GstAssRenderClass * gclass)
183 GST_DEBUG_OBJECT (render, "init");
185 render->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
186 render->video_sinkpad =
187 gst_pad_new_from_static_template (&video_sink_factory, "video_sink");
188 render->text_sinkpad =
189 gst_pad_new_from_static_template (&text_sink_factory, "text_sink");
191 gst_pad_set_setcaps_function (render->video_sinkpad,
192 GST_DEBUG_FUNCPTR (gst_ass_render_setcaps_video));
193 gst_pad_set_setcaps_function (render->text_sinkpad,
194 GST_DEBUG_FUNCPTR (gst_ass_render_setcaps_text));
196 gst_pad_set_getcaps_function (render->srcpad,
197 GST_DEBUG_FUNCPTR (gst_ass_render_getcaps));
198 gst_pad_set_getcaps_function (render->video_sinkpad,
199 GST_DEBUG_FUNCPTR (gst_ass_render_getcaps));
201 gst_pad_set_chain_function (render->video_sinkpad,
202 GST_DEBUG_FUNCPTR (gst_ass_render_chain_video));
203 gst_pad_set_chain_function (render->text_sinkpad,
204 GST_DEBUG_FUNCPTR (gst_ass_render_chain_text));
206 gst_pad_set_event_function (render->video_sinkpad,
207 GST_DEBUG_FUNCPTR (gst_ass_render_event_video));
208 gst_pad_set_event_function (render->text_sinkpad,
209 GST_DEBUG_FUNCPTR (gst_ass_render_event_text));
210 gst_pad_set_event_function (render->srcpad,
211 GST_DEBUG_FUNCPTR (gst_ass_render_event_src));
213 gst_pad_set_bufferalloc_function (render->video_sinkpad,
214 GST_DEBUG_FUNCPTR (gst_ass_render_bufferalloc_video));
216 gst_pad_set_query_function (render->srcpad,
217 GST_DEBUG_FUNCPTR (gst_ass_render_query_src));
219 gst_element_add_pad (GST_ELEMENT (render), render->srcpad);
220 gst_element_add_pad (GST_ELEMENT (render), render->video_sinkpad);
221 gst_element_add_pad (GST_ELEMENT (render), render->text_sinkpad);
226 render->subtitle_mutex = g_mutex_new ();
227 render->subtitle_cond = g_cond_new ();
229 render->renderer_init_ok = FALSE;
230 render->track_init_ok = FALSE;
231 render->enable = TRUE;
232 render->embeddedfonts = TRUE;
234 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
235 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
237 render->ass_library = ass_library_init ();
238 #if defined(LIBASS_VERSION) && LIBASS_VERSION >= 0x00907000
239 ass_set_message_cb (render->ass_library, _libass_message_cb, render);
241 ass_set_fonts_dir (render->ass_library, "./");
242 ass_set_extract_fonts (render->ass_library, 1);
244 render->ass_renderer = ass_renderer_init (render->ass_library);
245 if (!render->ass_renderer) {
246 GST_WARNING_OBJECT (render, "cannot create renderer instance");
247 g_assert_not_reached ();
250 render->ass_track = NULL;
252 GST_DEBUG_OBJECT (render, "init complete");
256 gst_ass_render_finalize (GObject * object)
258 GstAssRender *render = GST_ASS_RENDER (object);
260 if (render->subtitle_mutex)
261 g_mutex_free (render->subtitle_mutex);
263 if (render->subtitle_cond)
264 g_cond_free (render->subtitle_cond);
266 if (render->ass_track) {
267 ass_free_track (render->ass_track);
270 if (render->ass_renderer) {
271 ass_renderer_done (render->ass_renderer);
274 if (render->ass_library) {
275 ass_library_done (render->ass_library);
278 G_OBJECT_CLASS (parent_class)->finalize (object);
282 gst_ass_render_set_property (GObject * object, guint prop_id,
283 const GValue * value, GParamSpec * pspec)
285 GstAssRender *render = GST_ASS_RENDER (object);
289 render->enable = g_value_get_boolean (value);
291 case PROP_EMBEDDEDFONTS:
292 render->embeddedfonts = g_value_get_boolean (value);
293 ass_set_extract_fonts (render->ass_library, render->embeddedfonts);
296 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
302 gst_ass_render_get_property (GObject * object, guint prop_id,
303 GValue * value, GParamSpec * pspec)
305 GstAssRender *render = GST_ASS_RENDER (object);
309 g_value_set_boolean (value, render->enable);
311 case PROP_EMBEDDEDFONTS:
312 g_value_set_boolean (value, render->embeddedfonts);
315 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
320 static GstStateChangeReturn
321 gst_ass_render_change_state (GstElement * element, GstStateChange transition)
323 GstAssRender *render = GST_ASS_RENDER (element);
324 GstStateChangeReturn ret;
326 switch (transition) {
327 case GST_STATE_CHANGE_READY_TO_PAUSED:
328 render->subtitle_flushing = FALSE;
329 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
330 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
332 case GST_STATE_CHANGE_NULL_TO_READY:
333 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
337 case GST_STATE_CHANGE_PAUSED_TO_READY:
338 g_mutex_lock (render->subtitle_mutex);
339 render->subtitle_flushing = TRUE;
340 if (render->subtitle_pending)
341 gst_buffer_unref (render->subtitle_pending);
342 render->subtitle_pending = NULL;
343 g_cond_signal (render->subtitle_cond);
344 g_mutex_unlock (render->subtitle_mutex);
348 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
350 switch (transition) {
351 case GST_STATE_CHANGE_PAUSED_TO_READY:
352 if (render->ass_track)
353 ass_free_track (render->ass_track);
354 render->ass_track = NULL;
355 render->track_init_ok = FALSE;
356 render->renderer_init_ok = FALSE;
358 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
359 case GST_STATE_CHANGE_READY_TO_NULL:
369 gst_ass_render_query_src (GstPad * pad, GstQuery * query)
371 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
374 ret = gst_pad_peer_query (render->video_sinkpad, query);
376 gst_object_unref (render);
381 gst_ass_render_event_src (GstPad * pad, GstEvent * event)
383 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
384 gboolean ret = FALSE;
386 switch (GST_EVENT_TYPE (event)) {
387 case GST_EVENT_SEEK:{
390 GST_DEBUG_OBJECT (render, "seek received, driving from here");
392 gst_event_parse_seek (event, NULL, NULL, &flags, NULL, NULL, NULL, NULL);
394 /* Flush downstream, only for flushing seek */
395 if (flags & GST_SEEK_FLAG_FLUSH)
396 gst_pad_push_event (render->srcpad, gst_event_new_flush_start ());
398 /* Mark subtitle as flushing, unblocks chains */
399 g_mutex_lock (render->subtitle_mutex);
400 if (render->subtitle_pending)
401 gst_buffer_unref (render->subtitle_pending);
402 render->subtitle_pending = NULL;
403 render->subtitle_flushing = TRUE;
404 g_cond_signal (render->subtitle_cond);
405 g_mutex_unlock (render->subtitle_mutex);
407 /* Seek on each sink pad */
408 gst_event_ref (event);
409 ret = gst_pad_push_event (render->video_sinkpad, event);
411 ret = gst_pad_push_event (render->text_sinkpad, event);
413 gst_event_unref (event);
418 gst_event_ref (event);
419 ret = gst_pad_push_event (render->video_sinkpad, event);
420 gst_pad_push_event (render->text_sinkpad, event);
424 gst_object_unref (render);
430 gst_ass_render_getcaps (GstPad * pad)
432 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
436 if (pad == render->srcpad)
437 otherpad = render->video_sinkpad;
439 otherpad = render->srcpad;
441 /* we can do what the peer can */
442 caps = gst_pad_peer_get_caps (otherpad);
445 const GstCaps *templ;
447 /* filtered against our padtemplate */
448 templ = gst_pad_get_pad_template_caps (otherpad);
449 temp = gst_caps_intersect (caps, templ);
450 gst_caps_unref (caps);
451 /* this is what we can do */
454 /* no peer, our padtemplate is enough then */
455 caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
458 gst_object_unref (render);
463 #define CREATE_RGB_BLIT_FUNCTION(name,bpp,R,G,B) \
465 blit_##name (GstAssRender * render, ASS_Image * ass_image, GstBuffer * buffer) \
468 gint alpha, r, g, b, k; \
472 gint width = render->width; \
473 gint height = render->height; \
474 gint dst_stride = GST_ROUND_UP_4 (width * bpp); \
476 gint src_stride, src_skip; \
478 while (ass_image) { \
479 if (ass_image->dst_y > height || ass_image->dst_x > width) \
482 /* blend subtitles onto the video frame */ \
483 alpha = 255 - ((ass_image->color) & 0xff); \
484 r = ((ass_image->color) >> 24) & 0xff; \
485 g = ((ass_image->color) >> 16) & 0xff; \
486 b = ((ass_image->color) >> 8) & 0xff; \
487 src = ass_image->bitmap; \
488 dst = buffer->data + ass_image->dst_y * dst_stride + ass_image->dst_x * bpp; \
490 w = MIN (ass_image->w, width - ass_image->dst_x); \
491 h = MIN (ass_image->h, height - ass_image->dst_y); \
492 src_stride = ass_image->stride; \
493 src_skip = ass_image->stride - w; \
494 dst_skip = dst_stride - w * bpp; \
496 for (y = 0; y < h; y++) { \
497 for (x = 0; x < w; x++) { \
498 k = src[0] * alpha / 255; \
499 dst[R] = (k * r + (255 - k) * dst[R]) / 255; \
500 dst[G] = (k * g + (255 - k) * dst[G]) / 255; \
501 dst[B] = (k * b + (255 - k) * dst[B]) / 255; \
510 ass_image = ass_image->next; \
512 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter); \
515 CREATE_RGB_BLIT_FUNCTION (rgb, 3, 0, 1, 2);
516 CREATE_RGB_BLIT_FUNCTION (bgr, 3, 2, 1, 0);
517 CREATE_RGB_BLIT_FUNCTION (xrgb, 4, 1, 2, 3);
518 CREATE_RGB_BLIT_FUNCTION (xbgr, 4, 3, 2, 1);
519 CREATE_RGB_BLIT_FUNCTION (rgbx, 4, 0, 1, 2);
520 CREATE_RGB_BLIT_FUNCTION (bgrx, 4, 2, 1, 0);
522 #undef CREATE_RGB_BLIT_FUNCTION
525 rgb_to_y (gint r, gint g, gint b)
529 ret = (gint) (((19595 * r) >> 16) + ((38470 * g) >> 16) + ((7471 * b) >> 16));
530 ret = CLAMP (ret, 0, 255);
535 rgb_to_u (gint r, gint g, gint b)
540 (gint) (-((11059 * r) >> 16) - ((21709 * g) >> 16) + ((32768 * b) >> 16) +
542 ret = CLAMP (ret, 0, 255);
547 rgb_to_v (gint r, gint g, gint b)
552 (gint) (((32768 * r) >> 16) - ((27439 * g) >> 16) - ((5329 * b) >> 16) +
554 ret = CLAMP (ret, 0, 255);
559 blit_i420 (GstAssRender * render, ASS_Image * ass_image, GstBuffer * buffer)
562 gint alpha, r, g, b, k, k2;
565 guint8 *dst_y, *dst_u, *dst_v;
568 gint width = render->width;
569 gint height = render->height;
571 gint y_offset, y_height, y_width, y_stride;
572 gint u_offset, u_height, u_width, u_stride;
573 gint v_offset, v_height, v_width, v_stride;
576 gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420, 0, width,
579 gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420, 1, width,
582 gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420, 2, width,
586 gst_video_format_get_component_height (GST_VIDEO_FORMAT_I420, 0, height);
588 gst_video_format_get_component_height (GST_VIDEO_FORMAT_I420, 1, height);
590 gst_video_format_get_component_height (GST_VIDEO_FORMAT_I420, 2, height);
593 gst_video_format_get_component_width (GST_VIDEO_FORMAT_I420, 0, width);
595 gst_video_format_get_component_width (GST_VIDEO_FORMAT_I420, 1, width);
597 gst_video_format_get_component_width (GST_VIDEO_FORMAT_I420, 2, width);
599 y_stride = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 0, width);
600 u_stride = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 1, width);
601 v_stride = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 2, width);
604 if (ass_image->dst_y > height || ass_image->dst_x > width)
607 /* blend subtitles onto the video frame */
608 alpha = 255 - ((ass_image->color) & 0xff);
609 r = ((ass_image->color) >> 24) & 0xff;
610 g = ((ass_image->color) >> 16) & 0xff;
611 b = ((ass_image->color) >> 8) & 0xff;
613 Y = rgb_to_y (r, g, b);
614 U = rgb_to_u (r, g, b);
615 V = rgb_to_v (r, g, b);
617 w = MIN (ass_image->w, width - ass_image->dst_x);
618 h = MIN (ass_image->h, height - ass_image->dst_y);
623 src_stride = ass_image->stride;
625 src = ass_image->bitmap;
627 buffer->data + y_offset + ass_image->dst_y * y_stride +
630 buffer->data + u_offset + ((ass_image->dst_y + 1) / 2) * u_stride +
631 (ass_image->dst_x + 1) / 2;
633 buffer->data + v_offset + ((ass_image->dst_y + 1) / 2) * v_stride +
634 (ass_image->dst_x + 1) / 2;
636 for (y = 0; y < h - 1; y += 2) {
637 for (x = 0; x < w - 1; x += 2) {
638 k = src[0] * alpha / 255;
640 dst_y[0] = (k * Y + (255 - k) * dst_y[0]) / 255;
642 k = src[1] * alpha / 255;
644 dst_y[1] = (k * Y + (255 - k) * dst_y[1]) / 255;
649 k = src[0] * alpha / 255;
651 dst_y[0] = (k * Y + (255 - k) * dst_y[0]) / 255;
653 k = src[1] * alpha / 255;
655 dst_y[1] = (k * Y + (255 - k) * dst_y[1]) / 255;
658 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
659 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
663 src += -src_stride + 2;
664 dst_y += -y_stride + 2;
668 k = src[0] * alpha / 255;
670 dst_y[0] = (k * Y + (255 - k) * dst_y[0]) / 255;
675 k = src[0] * alpha / 255;
677 dst_y[0] = (k * Y + (255 - k) * dst_y[0]) / 255;
680 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
681 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
685 src += -src_stride + 1;
686 dst_y += -y_stride + 1;
689 src += src_stride + (src_stride - w);
690 dst_y += y_stride + (y_stride - w);
691 dst_u += u_stride - w2;
692 dst_v += v_stride - w2;
696 for (x = 0; x < w - 1; x += 2) {
697 k = src[0] * alpha / 255;
699 dst_y[0] = (k * Y + (255 - k) * dst_y[0]) / 255;
701 k = src[1] * alpha / 255;
703 dst_y[1] = (k * Y + (255 - k) * dst_y[1]) / 255;
706 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
707 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
716 k = src[0] * alpha / 255;
718 dst_y[0] = (k * Y + (255 - k) * dst_y[0]) / 255;
720 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
721 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
727 ass_image = ass_image->next;
730 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter);
734 gst_ass_render_setcaps_video (GstPad * pad, GstCaps * caps)
736 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
737 gboolean ret = FALSE;
738 gint par_n = 1, par_d = 1;
744 if (!gst_video_format_parse_caps (caps, &render->format, &render->width,
746 !gst_video_parse_caps_framerate (caps, &render->fps_n, &render->fps_d)) {
747 GST_ERROR_OBJECT (render, "Can't parse caps: %" GST_PTR_FORMAT, caps);
752 gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d);
754 ret = gst_pad_set_caps (render->srcpad, caps);
758 switch (render->format) {
759 case GST_VIDEO_FORMAT_RGB:
760 render->blit = blit_rgb;
762 case GST_VIDEO_FORMAT_BGR:
763 render->blit = blit_bgr;
765 case GST_VIDEO_FORMAT_xRGB:
766 render->blit = blit_xrgb;
768 case GST_VIDEO_FORMAT_xBGR:
769 render->blit = blit_xbgr;
771 case GST_VIDEO_FORMAT_RGBx:
772 render->blit = blit_rgbx;
774 case GST_VIDEO_FORMAT_BGRx:
775 render->blit = blit_bgrx;
777 case GST_VIDEO_FORMAT_I420:
778 render->blit = blit_i420;
785 ass_set_frame_size (render->ass_renderer, render->width, render->height);
787 dar = (((gdouble) par_n) * ((gdouble) render->width))
788 / (((gdouble) par_d) * ((gdouble) render->height));
789 #if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
790 ass_set_aspect_ratio (render->ass_renderer, dar);
792 ass_set_aspect_ratio (render->ass_renderer,
793 dar, ((gdouble) render->width) / ((gdouble) render->height));
795 ass_set_font_scale (render->ass_renderer, 1.0);
796 ass_set_hinting (render->ass_renderer, ASS_HINTING_NATIVE);
798 #if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
799 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif");
800 ass_set_fonts (render->ass_renderer, NULL, "Sans");
802 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif", 1, NULL, 1);
803 ass_set_fonts (render->ass_renderer, NULL, "Sans", 1, NULL, 1);
805 ass_set_margins (render->ass_renderer, 0, 0, 0, 0);
806 ass_set_use_margins (render->ass_renderer, 0);
808 render->renderer_init_ok = TRUE;
810 GST_DEBUG_OBJECT (render, "ass renderer setup complete");
813 gst_object_unref (render);
819 gst_ass_render_setcaps_text (GstPad * pad, GstCaps * caps)
821 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
822 GstStructure *structure;
825 gchar *codec_private;
826 guint codec_private_size;
827 gboolean ret = FALSE;
829 structure = gst_caps_get_structure (caps, 0);
831 GST_DEBUG_OBJECT (render, "text pad linked with caps: %" GST_PTR_FORMAT,
834 value = gst_structure_get_value (structure, "codec_data");
837 priv = gst_value_get_buffer (value);
838 g_return_val_if_fail (priv != NULL, FALSE);
840 codec_private = (gchar *) GST_BUFFER_DATA (priv);
841 codec_private_size = GST_BUFFER_SIZE (priv);
843 if (!render->ass_track)
844 render->ass_track = ass_new_track (render->ass_library);
846 ass_process_codec_private (render->ass_track,
847 codec_private, codec_private_size);
849 GST_DEBUG_OBJECT (render, "ass track created");
851 render->track_init_ok = TRUE;
854 } else if (!render->ass_track) {
855 render->ass_track = ass_new_track (render->ass_library);
859 gst_object_unref (render);
866 gst_ass_render_process_text (GstAssRender * render, GstBuffer * buffer,
867 GstClockTime running_time, GstClockTime duration)
869 gchar *data = (gchar *) GST_BUFFER_DATA (buffer);
870 guint size = GST_BUFFER_SIZE (buffer);
871 gdouble pts_start, pts_end;
873 pts_start = running_time;
874 pts_start /= GST_MSECOND;
876 pts_end /= GST_MSECOND;
878 GST_DEBUG_OBJECT (render,
879 "Processing subtitles with running time %" GST_TIME_FORMAT
880 " and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time),
881 GST_TIME_ARGS (duration));
882 ass_process_chunk (render->ass_track, data, size, pts_start, pts_end);
883 gst_buffer_unref (buffer);
887 gst_ass_render_bufferalloc_video (GstPad * pad, guint64 offset, guint size,
888 GstCaps * caps, GstBuffer ** buffer)
890 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
891 GstFlowReturn ret = GST_FLOW_WRONG_STATE;
894 GST_OBJECT_LOCK (render);
895 allocpad = render->srcpad ? gst_object_ref (render->srcpad) : NULL;
896 GST_OBJECT_UNLOCK (render);
899 ret = gst_pad_alloc_buffer (allocpad, offset, size, caps, buffer);
900 gst_object_unref (allocpad);
903 gst_object_unref (render);
909 gst_ass_render_chain_video (GstPad * pad, GstBuffer * buffer)
911 GstAssRender *render = GST_ASS_RENDER (GST_PAD_PARENT (pad));
912 GstFlowReturn ret = GST_FLOW_OK;
913 gboolean in_seg = FALSE;
914 gint64 start, stop, clip_start = 0, clip_stop = 0;
915 ASS_Image *ass_image;
917 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
918 GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
919 gst_buffer_unref (buffer);
923 /* ignore buffers that are outside of the current segment */
924 start = GST_BUFFER_TIMESTAMP (buffer);
926 if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
927 stop = GST_CLOCK_TIME_NONE;
929 stop = start + GST_BUFFER_DURATION (buffer);
932 /* segment_clip() will adjust start unconditionally to segment_start if
933 * no stop time is provided, so handle this ourselves */
934 if (stop == GST_CLOCK_TIME_NONE && start < render->video_segment.start)
938 gst_segment_clip (&render->video_segment, GST_FORMAT_TIME, start, stop,
939 &clip_start, &clip_stop);
944 /* if the buffer is only partially in the segment, fix up stamps */
945 if (clip_start != start || (stop != -1 && clip_stop != stop)) {
946 GST_DEBUG_OBJECT (render, "clipping buffer timestamp/duration to segment");
947 buffer = gst_buffer_make_metadata_writable (buffer);
948 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
950 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
953 gst_segment_set_last_stop (&render->video_segment, GST_FORMAT_TIME,
956 g_mutex_lock (render->subtitle_mutex);
957 if (render->subtitle_pending) {
958 GstClockTime sub_running_time, vid_running_time;
959 GstClockTime sub_running_time_end, vid_running_time_end;
962 gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
963 GST_BUFFER_TIMESTAMP (render->subtitle_pending));
964 sub_running_time_end =
965 gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
966 GST_BUFFER_TIMESTAMP (render->subtitle_pending) +
967 GST_BUFFER_DURATION (render->subtitle_pending));
969 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
970 GST_BUFFER_TIMESTAMP (buffer));
971 vid_running_time_end =
972 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
973 GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer));
975 if (sub_running_time <= vid_running_time_end) {
976 gst_ass_render_process_text (render, render->subtitle_pending,
977 sub_running_time, sub_running_time_end - sub_running_time);
978 render->subtitle_pending = NULL;
979 g_cond_signal (render->subtitle_cond);
980 } else if (sub_running_time_end < vid_running_time) {
981 gst_buffer_unref (render->subtitle_pending);
982 GST_DEBUG_OBJECT (render,
983 "Too late text buffer, dropping (%" GST_TIME_FORMAT " < %"
984 GST_TIME_FORMAT, GST_TIME_ARGS (sub_running_time_end),
985 GST_TIME_ARGS (vid_running_time));
986 render->subtitle_pending = NULL;
987 g_cond_signal (render->subtitle_cond);
990 g_mutex_unlock (render->subtitle_mutex);
992 /* now start rendering subtitles, if all conditions are met */
993 if (render->renderer_init_ok && render->track_init_ok && render->enable) {
994 GstClockTime running_time;
996 #ifndef GST_DISABLE_GST_DEBUG
1001 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1002 GST_BUFFER_TIMESTAMP (buffer));
1003 GST_DEBUG_OBJECT (render,
1004 "rendering frame for running time %" GST_TIME_FORMAT,
1005 GST_TIME_ARGS (running_time));
1006 /* libass needs timestamps in ms */
1007 timestamp = running_time / GST_MSECOND;
1009 #ifndef GST_DISABLE_GST_DEBUG
1010 /* only for testing right now. could possibly be used for optimizations? */
1011 step = ass_step_sub (render->ass_track, timestamp, 1);
1012 GST_DEBUG_OBJECT (render, "Current running time: %" GST_TIME_FORMAT
1013 " // Next event: %" GST_TIME_FORMAT,
1014 GST_TIME_ARGS (running_time), GST_TIME_ARGS (step * GST_MSECOND));
1017 /* not sure what the last parameter to this call is for (detect_change) */
1018 ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
1021 if (ass_image == NULL) {
1022 GST_LOG_OBJECT (render, "nothing to render right now");
1023 ret = gst_pad_push (render->srcpad, buffer);
1027 buffer = gst_buffer_make_writable (buffer);
1028 render->blit (render, ass_image, buffer);
1031 ret = gst_pad_push (render->srcpad, buffer);
1037 GST_DEBUG_OBJECT (render, "buffer out of segment, discarding");
1038 gst_buffer_unref (buffer);
1043 static GstFlowReturn
1044 gst_ass_render_chain_text (GstPad * pad, GstBuffer * buffer)
1046 GstFlowReturn ret = GST_FLOW_OK;
1047 GstAssRender *render = GST_ASS_RENDER (GST_PAD_PARENT (pad));
1048 GstClockTime timestamp, duration;
1049 GstClockTime sub_running_time, vid_running_time;
1050 GstClockTime sub_running_time_end;
1052 if (render->subtitle_flushing) {
1053 gst_buffer_unref (buffer);
1054 return GST_FLOW_WRONG_STATE;
1057 timestamp = GST_BUFFER_TIMESTAMP (buffer);
1058 duration = GST_BUFFER_DURATION (buffer);
1060 if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp)
1061 || !GST_CLOCK_TIME_IS_VALID (duration))) {
1062 GST_WARNING_OBJECT (render,
1063 "Text buffer without valid timestamp" " or duration, dropping");
1064 gst_buffer_unref (buffer);
1068 gst_segment_set_last_stop (&render->subtitle_segment, GST_FORMAT_TIME,
1069 GST_BUFFER_TIMESTAMP (buffer));
1072 gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
1074 sub_running_time_end =
1075 gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
1076 timestamp + duration);
1078 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1079 render->video_segment.last_stop);
1081 if (render->fps_n && render->fps_d)
1083 gst_util_uint64_scale (GST_SECOND, render->fps_d, render->fps_n);
1085 if (sub_running_time > vid_running_time) {
1086 g_assert (render->subtitle_pending == NULL);
1087 g_mutex_lock (render->subtitle_mutex);
1088 if (G_UNLIKELY (render->subtitle_flushing)) {
1089 GST_DEBUG_OBJECT (render, "Text pad flushing");
1090 gst_object_unref (buffer);
1091 g_mutex_unlock (render->subtitle_mutex);
1092 return GST_FLOW_WRONG_STATE;
1094 GST_DEBUG_OBJECT (render,
1095 "Too early text buffer, waiting (%" GST_TIME_FORMAT " > %"
1096 GST_TIME_FORMAT, GST_TIME_ARGS (sub_running_time),
1097 GST_TIME_ARGS (vid_running_time));
1098 render->subtitle_pending = buffer;
1099 g_cond_wait (render->subtitle_cond, render->subtitle_mutex);
1100 g_mutex_unlock (render->subtitle_mutex);
1101 } else if (sub_running_time_end < vid_running_time) {
1102 GST_DEBUG_OBJECT (render,
1103 "Too late text buffer, dropping (%" GST_TIME_FORMAT " < %"
1104 GST_TIME_FORMAT, GST_TIME_ARGS (sub_running_time_end),
1105 GST_TIME_ARGS (vid_running_time));
1106 gst_buffer_unref (buffer);
1109 gst_ass_render_process_text (render, buffer, sub_running_time,
1110 sub_running_time_end - sub_running_time);
1114 GST_DEBUG_OBJECT (render,
1115 "processed text packet with timestamp %" GST_TIME_FORMAT
1116 " and duration %" GST_TIME_FORMAT,
1117 GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration));
1123 gst_ass_render_handle_tags (GstAssRender * render, GstTagList * taglist)
1125 static const gchar *mimetypes[] = {
1126 "application/x-font-ttf",
1127 "application/x-font-otf",
1128 "application/x-truetype-font"
1130 static const gchar *extensions[] = {
1140 tag_size = gst_tag_list_get_tag_size (taglist, GST_TAG_ATTACHMENT);
1141 if (tag_size > 0 && render->embeddedfonts) {
1142 const GValue *value;
1145 GstStructure *structure;
1146 gboolean valid_mimetype, valid_extension;
1148 const gchar *filename;
1150 GST_DEBUG_OBJECT (render, "TAG event has attachments");
1152 for (index = 0; index < tag_size; index++) {
1153 value = gst_tag_list_get_value_index (taglist, GST_TAG_ATTACHMENT, index);
1154 buf = gst_value_get_buffer (value);
1155 if (!buf || !GST_BUFFER_CAPS (buf))
1158 caps = GST_BUFFER_CAPS (buf);
1159 structure = gst_caps_get_structure (caps, 0);
1161 valid_mimetype = FALSE;
1162 valid_extension = FALSE;
1164 for (j = 0; j < G_N_ELEMENTS (mimetypes); j++) {
1165 if (gst_structure_has_name (structure, mimetypes[j])) {
1166 valid_mimetype = TRUE;
1170 filename = gst_structure_get_string (structure, "filename");
1174 if (!valid_mimetype) {
1175 guint len = strlen (filename);
1176 const gchar *extension = filename + len - 4;
1177 for (j = 0; j < G_N_ELEMENTS (extensions); j++) {
1178 if (g_ascii_strcasecmp (extension, extensions[j]) == 0) {
1179 valid_extension = TRUE;
1185 if (valid_mimetype || valid_extension) {
1186 ass_add_font (render->ass_library, (gchar *) filename,
1187 (gchar *) GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
1188 GST_DEBUG_OBJECT (render, "registered new font %s", filename);
1195 gst_ass_render_event_video (GstPad * pad, GstEvent * event)
1197 gboolean ret = FALSE;
1198 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
1200 GST_DEBUG_OBJECT (pad, "received video event %s",
1201 GST_EVENT_TYPE_NAME (event));
1203 switch (GST_EVENT_TYPE (event)) {
1204 case GST_EVENT_NEWSEGMENT:
1208 gint64 start, stop, time;
1211 GST_DEBUG_OBJECT (render, "received new segment");
1213 gst_event_parse_new_segment (event, &update, &rate, &format, &start,
1216 if (format == GST_FORMAT_TIME) {
1217 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT now: %" GST_SEGMENT_FORMAT,
1218 &render->video_segment);
1220 gst_segment_set_newsegment (&render->video_segment, update, rate,
1221 format, start, stop, time);
1223 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT after: %" GST_SEGMENT_FORMAT,
1224 &render->video_segment);
1225 ret = gst_pad_push_event (render->srcpad, event);
1227 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1228 ("received non-TIME newsegment event on video input"));
1230 gst_event_unref (event);
1236 GstTagList *taglist = NULL;
1238 /* tag events may contain attachments which might be fonts */
1239 GST_DEBUG_OBJECT (render, "got TAG event");
1241 gst_event_parse_tag (event, &taglist);
1242 gst_ass_render_handle_tags (render, taglist);
1243 ret = gst_pad_push_event (render->srcpad, event);
1246 case GST_EVENT_FLUSH_STOP:
1247 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
1249 ret = gst_pad_push_event (render->srcpad, event);
1253 gst_object_unref (render);
1259 gst_ass_render_event_text (GstPad * pad, GstEvent * event)
1262 gboolean ret = FALSE;
1263 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
1265 GST_DEBUG_OBJECT (pad, "received text event %s", GST_EVENT_TYPE_NAME (event));
1267 switch (GST_EVENT_TYPE (event)) {
1268 case GST_EVENT_NEWSEGMENT:
1272 gint64 start, stop, time;
1275 GST_DEBUG_OBJECT (render, "received new segment");
1277 gst_event_parse_new_segment (event, &update, &rate, &format, &start,
1280 if (format == GST_FORMAT_TIME) {
1281 GST_DEBUG_OBJECT (render, "SUBTITLE SEGMENT now: %" GST_SEGMENT_FORMAT,
1282 &render->subtitle_segment);
1284 gst_segment_set_newsegment (&render->subtitle_segment, update, rate,
1285 format, start, stop, time);
1287 GST_DEBUG_OBJECT (render,
1288 "SUBTITLE SEGMENT after: %" GST_SEGMENT_FORMAT,
1289 &render->subtitle_segment);
1291 gst_event_unref (event);
1293 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1294 ("received non-TIME newsegment event on subtitle input"));
1296 gst_event_unref (event);
1300 case GST_EVENT_FLUSH_STOP:
1301 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
1302 render->subtitle_flushing = FALSE;
1303 gst_event_unref (event);
1306 case GST_EVENT_FLUSH_START:
1307 GST_DEBUG_OBJECT (render, "begin flushing");
1308 if (render->ass_track) {
1309 GST_OBJECT_LOCK (render);
1310 /* delete any events on the ass_track */
1311 for (i = 0; i < render->ass_track->n_events; i++) {
1312 GST_DEBUG_OBJECT (render, "deleted event with eid %i", i);
1313 ass_free_event (render->ass_track, i);
1315 render->ass_track->n_events = 0;
1316 GST_OBJECT_UNLOCK (render);
1317 GST_DEBUG_OBJECT (render, "done flushing");
1319 g_mutex_lock (render->subtitle_mutex);
1320 if (render->subtitle_pending)
1321 gst_buffer_unref (render->subtitle_pending);
1322 render->subtitle_pending = NULL;
1323 render->subtitle_flushing = TRUE;
1324 g_cond_signal (render->subtitle_cond);
1325 g_mutex_unlock (render->subtitle_mutex);
1326 gst_event_unref (event);
1330 GST_OBJECT_LOCK (render);
1331 GST_INFO_OBJECT (render, "text EOS");
1332 GST_OBJECT_UNLOCK (render);
1333 gst_event_unref (event);
1338 GstTagList *taglist = NULL;
1340 /* tag events may contain attachments which might be fonts */
1341 GST_DEBUG_OBJECT (render, "got TAG event");
1343 gst_event_parse_tag (event, &taglist);
1344 gst_ass_render_handle_tags (render, taglist);
1345 ret = gst_pad_push_event (render->srcpad, event);
1349 ret = gst_pad_push_event (render->srcpad, event);
1353 gst_object_unref (render);
1359 plugin_init (GstPlugin * plugin)
1361 GST_DEBUG_CATEGORY_INIT (gst_ass_render_debug, "assrender",
1362 0, "ASS/SSA subtitle renderer");
1363 GST_DEBUG_CATEGORY_INIT (gst_ass_render_lib_debug, "assrender_library",
1364 0, "ASS/SSA subtitle renderer library");
1366 return gst_element_register (plugin, "assrender",
1367 GST_RANK_PRIMARY, GST_TYPE_ASS_RENDER);
1370 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1373 "ASS/SSA subtitle renderer",
1374 plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)