2 * Copyright (c) 2008 Benjamin Schmitz <vortex@wolpzone.de>
3 * Copyright (c) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
22 * SECTION:element-assrender
24 * Renders timestamped SSA/ASS subtitles on top of a video stream.
27 * <title>Example launch line</title>
29 * gst-launch -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mp3parse ! mad ! audioconvert ! autoaudiosink d. ! queue ! ffdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink
30 * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
34 /* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
35 * with newer GLib versions (>= 2.31.0) */
36 #define GLIB_DISABLE_DEPRECATION_WARNINGS
42 #include "gstassrender.h"
46 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_debug);
47 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_lib_debug);
48 #define GST_CAT_DEFAULT gst_ass_render_debug
50 /* Filter signals and props */
63 #define FORMATS "{ RGB, BGR, xRGB, xBGR, RGBx, BGRx, I420 }"
65 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
68 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
71 static GstStaticPadTemplate video_sink_factory =
72 GST_STATIC_PAD_TEMPLATE ("video_sink",
75 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
78 static GstStaticPadTemplate text_sink_factory =
79 GST_STATIC_PAD_TEMPLATE ("text_sink",
82 GST_STATIC_CAPS ("application/x-ass; application/x-ssa")
85 #define GST_ASS_RENDER_GET_LOCK(ass) (&GST_ASS_RENDER (ass)->lock)
86 #define GST_ASS_RENDER_GET_COND(ass) (&GST_ASS_RENDER (ass)->cond)
87 #define GST_ASS_RENDER_LOCK(ass) (g_mutex_lock (GST_ASS_RENDER_GET_LOCK (ass)))
88 #define GST_ASS_RENDER_UNLOCK(ass) (g_mutex_unlock (GST_ASS_RENDER_GET_LOCK (ass)))
89 #define GST_ASS_RENDER_WAIT(ass) (g_cond_wait (GST_ASS_RENDER_GET_COND (ass), GST_ASS_RENDER_GET_LOCK (ass)))
90 #define GST_ASS_RENDER_SIGNAL(ass) (g_cond_signal (GST_ASS_RENDER_GET_COND (ass)))
91 #define GST_ASS_RENDER_BROADCAST(ass)(g_cond_broadcast (GST_ASS_RENDER_GET_COND (ass)))
93 static void gst_ass_render_set_property (GObject * object, guint prop_id,
94 const GValue * value, GParamSpec * pspec);
95 static void gst_ass_render_get_property (GObject * object, guint prop_id,
96 GValue * value, GParamSpec * pspec);
98 static void gst_ass_render_finalize (GObject * object);
100 static GstStateChangeReturn gst_ass_render_change_state (GstElement * element,
101 GstStateChange transition);
103 #define gst_ass_render_parent_class parent_class
104 G_DEFINE_TYPE (GstAssRender, gst_ass_render, GST_TYPE_ELEMENT);
106 static GstCaps *gst_ass_render_getcaps (GstPad * pad, GstCaps * filter);
108 static gboolean gst_ass_render_setcaps_video (GstPad * pad, GstCaps * caps);
109 static gboolean gst_ass_render_setcaps_text (GstPad * pad, GstCaps * caps);
111 static GstFlowReturn gst_ass_render_chain_video (GstPad * pad,
112 GstObject * parent, GstBuffer * buf);
113 static GstFlowReturn gst_ass_render_chain_text (GstPad * pad,
114 GstObject * parent, GstBuffer * buf);
116 static gboolean gst_ass_render_event_video (GstPad * pad, GstObject * parent,
118 static gboolean gst_ass_render_event_text (GstPad * pad, GstObject * parent,
120 static gboolean gst_ass_render_event_src (GstPad * pad, GstObject * parent,
123 static gboolean gst_ass_render_query_video (GstPad * pad, GstObject * parent,
125 static gboolean gst_ass_render_query_src (GstPad * pad, GstObject * parent,
128 /* initialize the plugin's class */
130 gst_ass_render_class_init (GstAssRenderClass * klass)
132 GObjectClass *gobject_class = (GObjectClass *) klass;
133 GstElementClass *gstelement_class = (GstElementClass *) klass;
135 gobject_class->set_property = gst_ass_render_set_property;
136 gobject_class->get_property = gst_ass_render_get_property;
137 gobject_class->finalize = gst_ass_render_finalize;
139 g_object_class_install_property (gobject_class, PROP_ENABLE,
140 g_param_spec_boolean ("enable", "Enable",
141 "Enable rendering of subtitles", TRUE,
142 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
143 g_object_class_install_property (gobject_class, PROP_EMBEDDEDFONTS,
144 g_param_spec_boolean ("embeddedfonts", "Embedded Fonts",
145 "Extract and use fonts embedded in the stream", TRUE,
146 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
148 gstelement_class->change_state =
149 GST_DEBUG_FUNCPTR (gst_ass_render_change_state);
151 gst_element_class_add_pad_template (gstelement_class,
152 gst_static_pad_template_get (&src_factory));
153 gst_element_class_add_pad_template (gstelement_class,
154 gst_static_pad_template_get (&video_sink_factory));
155 gst_element_class_add_pad_template (gstelement_class,
156 gst_static_pad_template_get (&text_sink_factory));
158 gst_element_class_set_static_metadata (gstelement_class, "ASS/SSA Render",
159 "Mixer/Video/Overlay/Subtitle",
160 "Renders ASS/SSA subtitles with libass",
161 "Benjamin Schmitz <vortex@wolpzone.de>, "
162 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
165 #if defined(LIBASS_VERSION) && LIBASS_VERSION >= 0x00907000
167 _libass_message_cb (gint level, const gchar * fmt, va_list args,
170 gchar *message = g_strdup_vprintf (fmt, args);
173 GST_CAT_ERROR_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
175 GST_CAT_WARNING_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
177 GST_CAT_INFO_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
179 GST_CAT_DEBUG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
181 GST_CAT_LOG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
188 gst_ass_render_init (GstAssRender * render)
190 GST_DEBUG_OBJECT (render, "init");
192 render->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
193 render->video_sinkpad =
194 gst_pad_new_from_static_template (&video_sink_factory, "video_sink");
195 render->text_sinkpad =
196 gst_pad_new_from_static_template (&text_sink_factory, "text_sink");
198 gst_pad_set_chain_function (render->video_sinkpad,
199 GST_DEBUG_FUNCPTR (gst_ass_render_chain_video));
200 gst_pad_set_chain_function (render->text_sinkpad,
201 GST_DEBUG_FUNCPTR (gst_ass_render_chain_text));
203 gst_pad_set_event_function (render->video_sinkpad,
204 GST_DEBUG_FUNCPTR (gst_ass_render_event_video));
205 gst_pad_set_event_function (render->text_sinkpad,
206 GST_DEBUG_FUNCPTR (gst_ass_render_event_text));
207 gst_pad_set_event_function (render->srcpad,
208 GST_DEBUG_FUNCPTR (gst_ass_render_event_src));
210 gst_pad_set_query_function (render->srcpad,
211 GST_DEBUG_FUNCPTR (gst_ass_render_query_src));
212 gst_pad_set_query_function (render->video_sinkpad,
213 GST_DEBUG_FUNCPTR (gst_ass_render_query_video));
215 GST_PAD_SET_PROXY_ALLOCATION (render->video_sinkpad);
217 gst_element_add_pad (GST_ELEMENT (render), render->srcpad);
218 gst_element_add_pad (GST_ELEMENT (render), render->video_sinkpad);
219 gst_element_add_pad (GST_ELEMENT (render), render->text_sinkpad);
221 gst_video_info_init (&render->info);
223 g_mutex_init (&render->lock);
224 g_cond_init (&render->cond);
226 render->renderer_init_ok = FALSE;
227 render->track_init_ok = FALSE;
228 render->enable = TRUE;
229 render->embeddedfonts = TRUE;
231 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
232 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
234 g_mutex_init (&render->ass_mutex);
235 render->ass_library = ass_library_init ();
236 #if defined(LIBASS_VERSION) && LIBASS_VERSION >= 0x00907000
237 ass_set_message_cb (render->ass_library, _libass_message_cb, render);
239 ass_set_extract_fonts (render->ass_library, 1);
241 render->ass_renderer = ass_renderer_init (render->ass_library);
242 if (!render->ass_renderer) {
243 GST_WARNING_OBJECT (render, "cannot create renderer instance");
244 g_assert_not_reached ();
247 render->ass_track = NULL;
249 GST_DEBUG_OBJECT (render, "init complete");
253 gst_ass_render_finalize (GObject * object)
255 GstAssRender *render = GST_ASS_RENDER (object);
257 g_mutex_clear (&render->lock);
258 g_cond_clear (&render->cond);
260 if (render->ass_track) {
261 ass_free_track (render->ass_track);
264 if (render->ass_renderer) {
265 ass_renderer_done (render->ass_renderer);
268 if (render->ass_library) {
269 ass_library_done (render->ass_library);
272 g_mutex_clear (&render->ass_mutex);
274 G_OBJECT_CLASS (parent_class)->finalize (object);
278 gst_ass_render_set_property (GObject * object, guint prop_id,
279 const GValue * value, GParamSpec * pspec)
281 GstAssRender *render = GST_ASS_RENDER (object);
283 GST_ASS_RENDER_LOCK (render);
286 render->enable = g_value_get_boolean (value);
288 case PROP_EMBEDDEDFONTS:
289 render->embeddedfonts = g_value_get_boolean (value);
290 g_mutex_lock (&render->ass_mutex);
291 ass_set_extract_fonts (render->ass_library, render->embeddedfonts);
292 g_mutex_unlock (&render->ass_mutex);
295 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
298 GST_ASS_RENDER_UNLOCK (render);
302 gst_ass_render_get_property (GObject * object, guint prop_id,
303 GValue * value, GParamSpec * pspec)
305 GstAssRender *render = GST_ASS_RENDER (object);
307 GST_ASS_RENDER_LOCK (render);
310 g_value_set_boolean (value, render->enable);
312 case PROP_EMBEDDEDFONTS:
313 g_value_set_boolean (value, render->embeddedfonts);
316 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
319 GST_ASS_RENDER_UNLOCK (render);
322 static GstStateChangeReturn
323 gst_ass_render_change_state (GstElement * element, GstStateChange transition)
325 GstAssRender *render = GST_ASS_RENDER (element);
326 GstStateChangeReturn ret;
328 switch (transition) {
329 case GST_STATE_CHANGE_PAUSED_TO_READY:
330 GST_ASS_RENDER_LOCK (render);
331 render->subtitle_flushing = TRUE;
332 render->video_flushing = TRUE;
333 if (render->subtitle_pending)
334 gst_buffer_unref (render->subtitle_pending);
335 render->subtitle_pending = NULL;
336 GST_ASS_RENDER_BROADCAST (render);
337 GST_ASS_RENDER_UNLOCK (render);
343 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
344 if (ret == GST_STATE_CHANGE_FAILURE)
347 switch (transition) {
348 case GST_STATE_CHANGE_PAUSED_TO_READY:
349 g_mutex_lock (&render->ass_mutex);
350 if (render->ass_track)
351 ass_free_track (render->ass_track);
352 render->ass_track = NULL;
353 render->track_init_ok = FALSE;
354 render->renderer_init_ok = FALSE;
355 g_mutex_unlock (&render->ass_mutex);
357 case GST_STATE_CHANGE_READY_TO_PAUSED:
358 GST_ASS_RENDER_LOCK (render);
359 render->subtitle_flushing = FALSE;
360 render->video_flushing = FALSE;
361 render->video_eos = FALSE;
362 render->subtitle_eos = FALSE;
363 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
364 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
365 GST_ASS_RENDER_UNLOCK (render);
376 gst_ass_render_query_src (GstPad * pad, GstObject * parent, GstQuery * query)
378 gboolean res = FALSE;
380 switch (GST_QUERY_TYPE (query)) {
383 GstCaps *filter, *caps;
385 gst_query_parse_caps (query, &filter);
386 caps = gst_ass_render_getcaps (pad, filter);
387 gst_query_set_caps_result (query, caps);
388 gst_caps_unref (caps);
393 res = gst_pad_query_default (pad, parent, query);
401 gst_ass_render_event_src (GstPad * pad, GstObject * parent, GstEvent * event)
403 GstAssRender *render = GST_ASS_RENDER (parent);
404 gboolean ret = FALSE;
406 GST_DEBUG_OBJECT (render, "received src event %" GST_PTR_FORMAT, event);
408 switch (GST_EVENT_TYPE (event)) {
409 case GST_EVENT_SEEK:{
412 if (!render->track_init_ok) {
413 GST_DEBUG_OBJECT (render, "seek received, pushing upstream");
414 ret = gst_pad_push_event (render->video_sinkpad, event);
418 GST_DEBUG_OBJECT (render, "seek received, driving from here");
420 gst_event_parse_seek (event, NULL, NULL, &flags, NULL, NULL, NULL, NULL);
422 /* Flush downstream, only for flushing seek */
423 if (flags & GST_SEEK_FLAG_FLUSH)
424 gst_pad_push_event (render->srcpad, gst_event_new_flush_start ());
426 /* Mark subtitle as flushing, unblocks chains */
427 GST_ASS_RENDER_LOCK (render);
428 render->subtitle_flushing = TRUE;
429 render->video_flushing = TRUE;
430 if (render->subtitle_pending)
431 gst_buffer_unref (render->subtitle_pending);
432 render->subtitle_pending = NULL;
433 GST_ASS_RENDER_BROADCAST (render);
434 GST_ASS_RENDER_UNLOCK (render);
436 /* Seek on each sink pad */
437 gst_event_ref (event);
438 ret = gst_pad_push_event (render->video_sinkpad, event);
440 ret = gst_pad_push_event (render->text_sinkpad, event);
442 gst_event_unref (event);
447 if (render->track_init_ok) {
448 gst_event_ref (event);
449 ret = gst_pad_push_event (render->video_sinkpad, event);
450 gst_pad_push_event (render->text_sinkpad, event);
452 ret = gst_pad_push_event (render->video_sinkpad, event);
461 gst_ass_render_getcaps (GstPad * pad, GstCaps * filter)
463 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
468 if (pad == render->srcpad)
469 otherpad = render->video_sinkpad;
471 otherpad = render->srcpad;
473 templ = gst_pad_get_pad_template_caps (otherpad);
475 /* we can do what the peer can */
476 caps = gst_pad_peer_query_caps (otherpad, filter);
480 /* filtered against our padtemplate */
481 temp = gst_caps_intersect (caps, templ);
482 gst_caps_unref (caps);
483 gst_caps_unref (templ);
484 /* this is what we can do */
487 /* no peer, our padtemplate is enough then */
491 gst_object_unref (render);
496 #define CREATE_RGB_BLIT_FUNCTION(name,bpp,R,G,B) \
498 blit_##name (GstAssRender * render, ASS_Image * ass_image, GstVideoFrame * frame) \
501 gint alpha, r, g, b, k; \
503 guint8 *dst, *data; \
511 width = GST_VIDEO_FRAME_WIDTH (frame); \
512 height = GST_VIDEO_FRAME_HEIGHT (frame); \
513 dst_stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0); \
514 data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
516 while (ass_image) { \
517 if (ass_image->dst_y > height || ass_image->dst_x > width) \
520 /* blend subtitles onto the video frame */ \
521 alpha = 255 - ((ass_image->color) & 0xff); \
522 r = ((ass_image->color) >> 24) & 0xff; \
523 g = ((ass_image->color) >> 16) & 0xff; \
524 b = ((ass_image->color) >> 8) & 0xff; \
525 src = ass_image->bitmap; \
526 dst = data + ass_image->dst_y * dst_stride + ass_image->dst_x * bpp; \
528 w = MIN (ass_image->w, width - ass_image->dst_x); \
529 h = MIN (ass_image->h, height - ass_image->dst_y); \
530 src_skip = ass_image->stride - w; \
531 dst_skip = dst_stride - w * bpp; \
533 for (y = 0; y < h; y++) { \
534 for (x = 0; x < w; x++) { \
535 k = src[0] * alpha / 255; \
536 dst[R] = (k * r + (255 - k) * dst[R]) / 255; \
537 dst[G] = (k * g + (255 - k) * dst[G]) / 255; \
538 dst[B] = (k * b + (255 - k) * dst[B]) / 255; \
547 ass_image = ass_image->next; \
549 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter); \
552 CREATE_RGB_BLIT_FUNCTION (rgb, 3, 0, 1, 2);
553 CREATE_RGB_BLIT_FUNCTION (bgr, 3, 2, 1, 0);
554 CREATE_RGB_BLIT_FUNCTION (xrgb, 4, 1, 2, 3);
555 CREATE_RGB_BLIT_FUNCTION (xbgr, 4, 3, 2, 1);
556 CREATE_RGB_BLIT_FUNCTION (rgbx, 4, 0, 1, 2);
557 CREATE_RGB_BLIT_FUNCTION (bgrx, 4, 2, 1, 0);
559 #undef CREATE_RGB_BLIT_FUNCTION
562 rgb_to_y (gint r, gint g, gint b)
566 ret = (gint) (((19595 * r) >> 16) + ((38470 * g) >> 16) + ((7471 * b) >> 16));
567 ret = CLAMP (ret, 0, 255);
572 rgb_to_u (gint r, gint g, gint b)
577 (gint) (-((11059 * r) >> 16) - ((21709 * g) >> 16) + ((32768 * b) >> 16) +
579 ret = CLAMP (ret, 0, 255);
584 rgb_to_v (gint r, gint g, gint b)
589 (gint) (((32768 * r) >> 16) - ((27439 * g) >> 16) - ((5329 * b) >> 16) +
591 ret = CLAMP (ret, 0, 255);
596 blit_i420 (GstAssRender * render, ASS_Image * ass_image, GstVideoFrame * frame)
599 gint alpha, r, g, b, k, k2;
602 guint8 *dst_y, *dst_u, *dst_v;
604 /* FIXME ignoring source image stride might be wrong here */
610 guint8 *y_data, *u_data, *v_data;
611 gint y_stride, u_stride, v_stride;
613 width = GST_VIDEO_FRAME_WIDTH (frame);
614 height = GST_VIDEO_FRAME_HEIGHT (frame);
616 y_data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
617 u_data = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
618 v_data = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
620 y_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
621 u_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1);
622 v_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 2);
625 if (ass_image->dst_y > height || ass_image->dst_x > width)
628 /* blend subtitles onto the video frame */
629 alpha = 255 - ((ass_image->color) & 0xff);
630 r = ((ass_image->color) >> 24) & 0xff;
631 g = ((ass_image->color) >> 16) & 0xff;
632 b = ((ass_image->color) >> 8) & 0xff;
634 Y = rgb_to_y (r, g, b);
635 U = rgb_to_u (r, g, b);
636 V = rgb_to_v (r, g, b);
638 w = MIN (ass_image->w, width - ass_image->dst_x);
639 h = MIN (ass_image->h, height - ass_image->dst_y);
644 src_stride = ass_image->stride;
647 src = ass_image->bitmap;
649 dst_y = y_data + ass_image->dst_y * y_stride + ass_image->dst_x;
650 dst_u = u_data + (ass_image->dst_y / 2) * u_stride + ass_image->dst_x / 2;
651 dst_v = v_data + (ass_image->dst_y / 2) * v_stride + ass_image->dst_x / 2;
654 for (y = 0; y < h; y++) {
655 dst_y = y_data + (ass_image->dst_y + y) * y_stride + ass_image->dst_x;
656 for (x = 0; x < w; x++) {
657 k = src[y * ass_image->w + x] * alpha / 255;
658 dst_y[x] = (k * Y + (255 - k) * dst_y[x]) / 255;
663 if (ass_image->dst_y & 1) {
664 dst_u = u_data + (ass_image->dst_y / 2) * u_stride + ass_image->dst_x / 2;
665 dst_v = v_data + (ass_image->dst_y / 2) * v_stride + ass_image->dst_x / 2;
667 if (ass_image->dst_x & 1) {
668 k2 = src[y * ass_image->w + x] * alpha / 255;
670 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
671 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
676 for (; x < w - 1; x += 2) {
677 k2 = src[y * ass_image->w + x] * alpha / 255;
678 k2 += src[y * ass_image->w + x + 1] * alpha / 255;
680 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
681 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
686 k2 = src[y * ass_image->w + x] * alpha / 255;
688 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
689 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
693 for (; y < h - 1; y += 2) {
694 dst_u = u_data + ((ass_image->dst_y + y) / 2) * u_stride +
695 ass_image->dst_x / 2;
696 dst_v = v_data + ((ass_image->dst_y + y) / 2) * v_stride +
697 ass_image->dst_x / 2;
699 if (ass_image->dst_x & 1) {
700 k2 = src[y * ass_image->w + x] * alpha / 255;
701 k2 += src[(y + 1) * ass_image->w + x] * alpha / 255;
703 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
704 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
709 for (; x < w - 1; x += 2) {
710 k2 = src[y * ass_image->w + x] * alpha / 255;
711 k2 += src[y * ass_image->w + x + 1] * alpha / 255;
712 k2 += src[(y + 1) * ass_image->w + x] * alpha / 255;
713 k2 += src[(y + 1) * ass_image->w + x + 1] * alpha / 255;
715 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
716 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
721 k2 = src[y * ass_image->w + x] * alpha / 255;
722 k2 += src[(y + 1) * ass_image->w + x] * alpha / 255;
724 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
725 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
730 dst_u = u_data + (ass_image->dst_y / 2) * u_stride + ass_image->dst_x / 2;
731 dst_v = v_data + (ass_image->dst_y / 2) * v_stride + ass_image->dst_x / 2;
733 if (ass_image->dst_x & 1) {
734 k2 = src[y * ass_image->w + x] * alpha / 255;
736 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
737 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
742 for (; x < w - 1; x += 2) {
743 k2 = src[y * ass_image->w + x] * alpha / 255;
744 k2 += src[y * ass_image->w + x + 1] * alpha / 255;
746 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
747 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
752 k2 = src[y * ass_image->w + x] * alpha / 255;
754 dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
755 dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
763 ass_image = ass_image->next;
766 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter);
770 gst_ass_render_setcaps_video (GstPad * pad, GstCaps * caps)
772 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
773 gboolean ret = FALSE;
774 gint par_n = 1, par_d = 1;
778 if (!gst_video_info_from_caps (&info, caps))
783 ret = gst_pad_set_caps (render->srcpad, caps);
787 switch (GST_VIDEO_INFO_FORMAT (&info)) {
788 case GST_VIDEO_FORMAT_RGB:
789 render->blit = blit_rgb;
791 case GST_VIDEO_FORMAT_BGR:
792 render->blit = blit_bgr;
794 case GST_VIDEO_FORMAT_xRGB:
795 render->blit = blit_xrgb;
797 case GST_VIDEO_FORMAT_xBGR:
798 render->blit = blit_xbgr;
800 case GST_VIDEO_FORMAT_RGBx:
801 render->blit = blit_rgbx;
803 case GST_VIDEO_FORMAT_BGRx:
804 render->blit = blit_bgrx;
806 case GST_VIDEO_FORMAT_I420:
807 render->blit = blit_i420;
814 g_mutex_lock (&render->ass_mutex);
815 ass_set_frame_size (render->ass_renderer, info.width, info.height);
817 dar = (((gdouble) par_n) * ((gdouble) info.width))
818 / (((gdouble) par_d) * ((gdouble) info.height));
819 #if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
820 ass_set_aspect_ratio (render->ass_renderer, dar);
822 ass_set_aspect_ratio (render->ass_renderer,
823 dar, ((gdouble) info.width) / ((gdouble) info.height));
825 ass_set_font_scale (render->ass_renderer, 1.0);
826 ass_set_hinting (render->ass_renderer, ASS_HINTING_LIGHT);
828 #if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
829 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif");
830 ass_set_fonts (render->ass_renderer, NULL, "Sans");
832 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif", 1, NULL, 1);
833 ass_set_fonts (render->ass_renderer, NULL, "Sans", 1, NULL, 1);
835 ass_set_margins (render->ass_renderer, 0, 0, 0, 0);
836 ass_set_use_margins (render->ass_renderer, 0);
837 g_mutex_unlock (&render->ass_mutex);
839 render->renderer_init_ok = TRUE;
841 GST_DEBUG_OBJECT (render, "ass renderer setup complete");
844 gst_object_unref (render);
851 GST_ERROR_OBJECT (render, "Can't parse caps: %" GST_PTR_FORMAT, caps);
858 gst_ass_render_setcaps_text (GstPad * pad, GstCaps * caps)
860 GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
861 GstStructure *structure;
865 gboolean ret = FALSE;
867 structure = gst_caps_get_structure (caps, 0);
869 GST_DEBUG_OBJECT (render, "text pad linked with caps: %" GST_PTR_FORMAT,
872 value = gst_structure_get_value (structure, "codec_data");
874 g_mutex_lock (&render->ass_mutex);
876 priv = gst_value_get_buffer (value);
877 g_return_val_if_fail (priv != NULL, FALSE);
879 gst_buffer_map (priv, &map, GST_MAP_READ);
881 if (!render->ass_track)
882 render->ass_track = ass_new_track (render->ass_library);
884 ass_process_codec_private (render->ass_track, (char *) map.data, map.size);
886 gst_buffer_unmap (priv, &map);
888 GST_DEBUG_OBJECT (render, "ass track created");
890 render->track_init_ok = TRUE;
893 } else if (!render->ass_track) {
894 render->ass_track = ass_new_track (render->ass_library);
896 render->track_init_ok = TRUE;
900 g_mutex_unlock (&render->ass_mutex);
902 gst_object_unref (render);
909 gst_ass_render_process_text (GstAssRender * render, GstBuffer * buffer,
910 GstClockTime running_time, GstClockTime duration)
913 gdouble pts_start, pts_end;
915 pts_start = running_time;
916 pts_start /= GST_MSECOND;
918 pts_end /= GST_MSECOND;
920 GST_DEBUG_OBJECT (render,
921 "Processing subtitles with running time %" GST_TIME_FORMAT
922 " and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time),
923 GST_TIME_ARGS (duration));
925 gst_buffer_map (buffer, &map, GST_MAP_READ);
927 g_mutex_lock (&render->ass_mutex);
928 ass_process_chunk (render->ass_track, (gchar *) map.data, map.size,
930 g_mutex_unlock (&render->ass_mutex);
932 gst_buffer_unmap (buffer, &map);
933 gst_buffer_unref (buffer);
937 gst_ass_render_chain_video (GstPad * pad, GstObject * parent,
940 GstAssRender *render = GST_ASS_RENDER (parent);
941 GstFlowReturn ret = GST_FLOW_OK;
942 gboolean in_seg = FALSE;
943 guint64 start, stop, clip_start = 0, clip_stop = 0;
944 ASS_Image *ass_image;
946 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
947 goto missing_timestamp;
949 /* ignore buffers that are outside of the current segment */
950 start = GST_BUFFER_TIMESTAMP (buffer);
952 if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
953 stop = GST_CLOCK_TIME_NONE;
955 stop = start + GST_BUFFER_DURATION (buffer);
958 /* segment_clip() will adjust start unconditionally to segment_start if
959 * no stop time is provided, so handle this ourselves */
960 if (stop == GST_CLOCK_TIME_NONE && start < render->video_segment.start)
964 gst_segment_clip (&render->video_segment, GST_FORMAT_TIME, start, stop,
965 &clip_start, &clip_stop);
970 /* if the buffer is only partially in the segment, fix up stamps */
971 if (clip_start != start || (stop != -1 && clip_stop != stop)) {
972 GST_DEBUG_OBJECT (render, "clipping buffer timestamp/duration to segment");
973 buffer = gst_buffer_make_writable (buffer);
974 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
976 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
979 /* now, after we've done the clipping, fix up end time if there's no
980 * duration (we only use those estimated values internally though, we
981 * don't want to set bogus values on the buffer itself) */
983 if (render->info.fps_n && render->info.fps_d) {
984 GST_DEBUG_OBJECT (render, "estimating duration based on framerate");
986 start + gst_util_uint64_scale_int (GST_SECOND, render->info.fps_d,
989 GST_WARNING_OBJECT (render, "no duration, assuming minimal duration");
990 stop = start + 1; /* we need to assume some interval */
996 GST_ASS_RENDER_LOCK (render);
998 if (render->video_flushing)
1001 if (render->video_eos)
1004 if (render->renderer_init_ok && render->track_init_ok && render->enable) {
1005 /* Text pad linked, check if we have a text buffer queued */
1006 if (render->subtitle_pending) {
1007 gboolean pop_text = FALSE, valid_text_time = TRUE;
1008 GstClockTime text_start = GST_CLOCK_TIME_NONE;
1009 GstClockTime text_end = GST_CLOCK_TIME_NONE;
1010 GstClockTime text_running_time = GST_CLOCK_TIME_NONE;
1011 GstClockTime text_running_time_end = GST_CLOCK_TIME_NONE;
1012 GstClockTime vid_running_time, vid_running_time_end;
1014 /* if the text buffer isn't stamped right, pop it off the
1015 * queue and display it for the current video frame only */
1016 if (!GST_BUFFER_TIMESTAMP_IS_VALID (render->subtitle_pending) ||
1017 !GST_BUFFER_DURATION_IS_VALID (render->subtitle_pending)) {
1018 GST_WARNING_OBJECT (render,
1019 "Got text buffer with invalid timestamp or duration");
1020 valid_text_time = FALSE;
1022 text_start = GST_BUFFER_TIMESTAMP (render->subtitle_pending);
1023 text_end = text_start + GST_BUFFER_DURATION (render->subtitle_pending);
1027 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1029 vid_running_time_end =
1030 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1033 /* If timestamp and duration are valid */
1034 if (valid_text_time) {
1036 gst_segment_to_running_time (&render->video_segment,
1037 GST_FORMAT_TIME, text_start);
1038 text_running_time_end =
1039 gst_segment_to_running_time (&render->video_segment,
1040 GST_FORMAT_TIME, text_end);
1043 GST_LOG_OBJECT (render, "T: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1044 GST_TIME_ARGS (text_running_time),
1045 GST_TIME_ARGS (text_running_time_end));
1046 GST_LOG_OBJECT (render, "V: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1047 GST_TIME_ARGS (vid_running_time),
1048 GST_TIME_ARGS (vid_running_time_end));
1050 /* Text too old or in the future */
1051 if (valid_text_time && text_running_time_end <= vid_running_time) {
1052 /* text buffer too old, get rid of it and do nothing */
1053 GST_DEBUG_OBJECT (render, "text buffer too old, popping");
1055 gst_buffer_unref (render->subtitle_pending);
1056 render->subtitle_pending = NULL;
1057 GST_ASS_RENDER_BROADCAST (render);
1058 GST_ASS_RENDER_UNLOCK (render);
1059 goto wait_for_text_buf;
1060 } else if (valid_text_time && vid_running_time_end <= text_running_time) {
1063 GST_ASS_RENDER_UNLOCK (render);
1065 timestamp = vid_running_time / GST_MSECOND;
1067 g_mutex_lock (&render->ass_mutex);
1069 /* not sure what the last parameter to this call is for (detect_change) */
1070 ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
1073 g_mutex_unlock (&render->ass_mutex);
1075 if (ass_image != NULL) {
1076 GstVideoFrame frame;
1078 buffer = gst_buffer_make_writable (buffer);
1079 gst_video_frame_map (&frame, &render->info, buffer, GST_MAP_WRITE);
1080 render->blit (render, ass_image, &frame);
1081 gst_video_frame_unmap (&frame);
1083 GST_LOG_OBJECT (render, "nothing to render right now");
1086 /* Push the video frame */
1087 ret = gst_pad_push (render->srcpad, buffer);
1091 gst_ass_render_process_text (render, render->subtitle_pending,
1092 text_running_time, text_running_time_end - text_running_time);
1093 render->subtitle_pending = NULL;
1094 GST_ASS_RENDER_BROADCAST (render);
1095 GST_ASS_RENDER_UNLOCK (render);
1097 /* libass needs timestamps in ms */
1098 timestamp = vid_running_time / GST_MSECOND;
1100 g_mutex_lock (&render->ass_mutex);
1101 /* not sure what the last parameter to this call is for (detect_change) */
1102 ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
1104 g_mutex_unlock (&render->ass_mutex);
1106 if (ass_image != NULL) {
1107 GstVideoFrame frame;
1109 buffer = gst_buffer_make_writable (buffer);
1110 gst_video_frame_map (&frame, &render->info, buffer, GST_MAP_WRITE);
1111 render->blit (render, ass_image, &frame);
1112 gst_video_frame_unmap (&frame);
1114 GST_DEBUG_OBJECT (render, "nothing to render right now");
1117 ret = gst_pad_push (render->srcpad, buffer);
1119 if (valid_text_time && text_running_time_end <= vid_running_time_end) {
1120 GST_LOG_OBJECT (render, "text buffer not needed any longer");
1125 GST_ASS_RENDER_LOCK (render);
1126 if (render->subtitle_pending)
1127 gst_buffer_unref (render->subtitle_pending);
1128 render->subtitle_pending = NULL;
1129 GST_ASS_RENDER_BROADCAST (render);
1130 GST_ASS_RENDER_UNLOCK (render);
1133 gboolean wait_for_text_buf = TRUE;
1135 if (render->subtitle_eos)
1136 wait_for_text_buf = FALSE;
1138 /* Text pad linked, but no text buffer available - what now? */
1139 if (render->subtitle_segment.format == GST_FORMAT_TIME) {
1140 GstClockTime text_start_running_time, text_last_stop_running_time;
1141 GstClockTime vid_running_time;
1144 gst_segment_to_running_time (&render->video_segment,
1145 GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer));
1146 text_start_running_time =
1147 gst_segment_to_running_time (&render->subtitle_segment,
1148 GST_FORMAT_TIME, render->subtitle_segment.start);
1149 text_last_stop_running_time =
1150 gst_segment_to_running_time (&render->subtitle_segment,
1151 GST_FORMAT_TIME, render->subtitle_segment.position);
1153 if ((GST_CLOCK_TIME_IS_VALID (text_start_running_time) &&
1154 vid_running_time < text_start_running_time) ||
1155 (GST_CLOCK_TIME_IS_VALID (text_last_stop_running_time) &&
1156 vid_running_time < text_last_stop_running_time)) {
1157 wait_for_text_buf = FALSE;
1161 if (wait_for_text_buf) {
1162 GST_DEBUG_OBJECT (render, "no text buffer, need to wait for one");
1163 GST_ASS_RENDER_WAIT (render);
1164 GST_DEBUG_OBJECT (render, "resuming");
1165 GST_ASS_RENDER_UNLOCK (render);
1166 goto wait_for_text_buf;
1168 GST_ASS_RENDER_UNLOCK (render);
1169 GST_LOG_OBJECT (render, "no need to wait for a text buffer");
1170 ret = gst_pad_push (render->srcpad, buffer);
1174 GST_LOG_OBJECT (render, "rendering disabled, doing buffer passthrough");
1176 GST_ASS_RENDER_UNLOCK (render);
1177 ret = gst_pad_push (render->srcpad, buffer);
1181 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p ret=%d", buffer, ret);
1183 /* Update last_stop */
1184 render->video_segment.position = clip_start;
1190 GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
1191 gst_buffer_unref (buffer);
1196 GST_ASS_RENDER_UNLOCK (render);
1197 GST_DEBUG_OBJECT (render, "flushing, discarding buffer");
1198 gst_buffer_unref (buffer);
1199 return GST_FLOW_FLUSHING;
1203 GST_ASS_RENDER_UNLOCK (render);
1204 GST_DEBUG_OBJECT (render, "eos, discarding buffer");
1205 gst_buffer_unref (buffer);
1206 return GST_FLOW_EOS;
1210 GST_DEBUG_OBJECT (render, "buffer out of segment, discarding");
1211 gst_buffer_unref (buffer);
1216 static GstFlowReturn
1217 gst_ass_render_chain_text (GstPad * pad, GstObject * parent, GstBuffer * buffer)
1219 GstFlowReturn ret = GST_FLOW_OK;
1220 GstAssRender *render = GST_ASS_RENDER (parent);
1221 gboolean in_seg = FALSE;
1222 guint64 clip_start = 0, clip_stop = 0;
1224 GST_DEBUG_OBJECT (render, "entering chain for buffer %p", buffer);
1226 GST_ASS_RENDER_LOCK (render);
1228 if (render->subtitle_flushing) {
1229 GST_ASS_RENDER_UNLOCK (render);
1230 ret = GST_FLOW_FLUSHING;
1231 GST_LOG_OBJECT (render, "text flushing");
1235 if (render->subtitle_eos) {
1236 GST_ASS_RENDER_UNLOCK (render);
1238 GST_LOG_OBJECT (render, "text EOS");
1242 if (G_LIKELY (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))) {
1245 if (G_LIKELY (GST_BUFFER_DURATION_IS_VALID (buffer)))
1246 stop = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
1248 stop = GST_CLOCK_TIME_NONE;
1250 in_seg = gst_segment_clip (&render->subtitle_segment, GST_FORMAT_TIME,
1251 GST_BUFFER_TIMESTAMP (buffer), stop, &clip_start, &clip_stop);
1257 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1258 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1259 else if (GST_BUFFER_DURATION_IS_VALID (buffer))
1260 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1262 if (render->subtitle_pending
1263 && (!GST_BUFFER_TIMESTAMP_IS_VALID (render->subtitle_pending)
1264 || !GST_BUFFER_DURATION_IS_VALID (render->subtitle_pending))) {
1265 gst_buffer_unref (render->subtitle_pending);
1266 render->subtitle_pending = NULL;
1267 GST_ASS_RENDER_BROADCAST (render);
1269 /* Wait for the previous buffer to go away */
1270 while (render->subtitle_pending != NULL) {
1271 GST_DEBUG ("Pad %s:%s has a buffer queued, waiting",
1272 GST_DEBUG_PAD_NAME (pad));
1273 GST_ASS_RENDER_WAIT (render);
1274 GST_DEBUG ("Pad %s:%s resuming", GST_DEBUG_PAD_NAME (pad));
1275 if (render->subtitle_flushing) {
1276 GST_ASS_RENDER_UNLOCK (render);
1277 ret = GST_FLOW_FLUSHING;
1283 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1284 render->subtitle_segment.position = clip_start;
1286 GST_DEBUG_OBJECT (render,
1287 "New buffer arrived for timestamp %" GST_TIME_FORMAT,
1288 GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
1289 render->subtitle_pending = gst_buffer_ref (buffer);
1291 /* in case the video chain is waiting for a text buffer, wake it up */
1292 GST_ASS_RENDER_BROADCAST (render);
1295 GST_ASS_RENDER_UNLOCK (render);
1298 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p", buffer);
1300 gst_buffer_unref (buffer);
1305 gst_ass_render_handle_tags (GstAssRender * render, GstTagList * taglist)
1307 static const gchar *mimetypes[] = {
1308 "application/x-font-ttf",
1309 "application/x-font-otf",
1310 "application/x-truetype-font"
1312 static const gchar *extensions[] = {
1321 tag_size = gst_tag_list_get_tag_size (taglist, GST_TAG_ATTACHMENT);
1322 if (tag_size > 0 && render->embeddedfonts) {
1325 const GstStructure *structure;
1326 gboolean valid_mimetype, valid_extension;
1328 const gchar *filename;
1332 GST_DEBUG_OBJECT (render, "TAG event has attachments");
1334 for (index = 0; index < tag_size; index++) {
1335 if (!gst_tag_list_get_sample_index (taglist, GST_TAG_ATTACHMENT, index,
1338 buf = gst_sample_get_buffer (sample);
1339 structure = gst_sample_get_info (sample);
1340 if (!buf || !structure)
1343 valid_mimetype = FALSE;
1344 valid_extension = FALSE;
1346 for (j = 0; j < G_N_ELEMENTS (mimetypes); j++) {
1347 if (gst_structure_has_name (structure, mimetypes[j])) {
1348 valid_mimetype = TRUE;
1352 filename = gst_structure_get_string (structure, "filename");
1356 if (!valid_mimetype) {
1357 guint len = strlen (filename);
1358 const gchar *extension = filename + len - 4;
1359 for (j = 0; j < G_N_ELEMENTS (extensions); j++) {
1360 if (g_ascii_strcasecmp (extension, extensions[j]) == 0) {
1361 valid_extension = TRUE;
1367 if (valid_mimetype || valid_extension) {
1368 g_mutex_lock (&render->ass_mutex);
1369 gst_buffer_map (buf, &map, GST_MAP_READ);
1370 ass_add_font (render->ass_library, (gchar *) filename,
1371 (gchar *) map.data, map.size);
1372 gst_buffer_unmap (buf, &map);
1373 GST_DEBUG_OBJECT (render, "registered new font %s", filename);
1374 g_mutex_unlock (&render->ass_mutex);
1381 gst_ass_render_event_video (GstPad * pad, GstObject * parent, GstEvent * event)
1383 gboolean ret = FALSE;
1384 GstAssRender *render = GST_ASS_RENDER (parent);
1386 GST_DEBUG_OBJECT (pad, "received video event %" GST_PTR_FORMAT, event);
1388 switch (GST_EVENT_TYPE (event)) {
1389 case GST_EVENT_CAPS:
1393 gst_event_parse_caps (event, &caps);
1394 ret = gst_ass_render_setcaps_video (pad, caps);
1395 gst_event_unref (event);
1398 case GST_EVENT_SEGMENT:
1402 GST_DEBUG_OBJECT (render, "received new segment");
1404 gst_event_copy_segment (event, &segment);
1406 if (segment.format == GST_FORMAT_TIME) {
1407 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT now: %" GST_SEGMENT_FORMAT,
1408 &render->video_segment);
1410 render->video_segment = segment;
1412 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT after: %" GST_SEGMENT_FORMAT,
1413 &render->video_segment);
1414 ret = gst_pad_event_default (pad, parent, event);
1416 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1417 ("received non-TIME newsegment event on video input"));
1419 gst_event_unref (event);
1425 GstTagList *taglist = NULL;
1427 /* tag events may contain attachments which might be fonts */
1428 GST_DEBUG_OBJECT (render, "got TAG event");
1430 gst_event_parse_tag (event, &taglist);
1431 gst_ass_render_handle_tags (render, taglist);
1432 ret = gst_pad_event_default (pad, parent, event);
1436 GST_ASS_RENDER_LOCK (render);
1437 GST_INFO_OBJECT (render, "video EOS");
1438 render->video_eos = TRUE;
1439 GST_ASS_RENDER_UNLOCK (render);
1440 ret = gst_pad_event_default (pad, parent, event);
1442 case GST_EVENT_FLUSH_START:
1443 GST_ASS_RENDER_LOCK (render);
1444 GST_INFO_OBJECT (render, "video flush start");
1445 render->video_flushing = TRUE;
1446 GST_ASS_RENDER_BROADCAST (render);
1447 GST_ASS_RENDER_UNLOCK (render);
1448 ret = gst_pad_event_default (pad, parent, event);
1450 case GST_EVENT_FLUSH_STOP:
1451 GST_ASS_RENDER_LOCK (render);
1452 GST_INFO_OBJECT (render, "video flush stop");
1453 render->video_flushing = FALSE;
1454 render->video_eos = FALSE;
1455 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
1456 GST_ASS_RENDER_UNLOCK (render);
1457 ret = gst_pad_event_default (pad, parent, event);
1460 ret = gst_pad_event_default (pad, parent, event);
1468 gst_ass_render_query_video (GstPad * pad, GstObject * parent, GstQuery * query)
1470 gboolean res = FALSE;
1472 switch (GST_QUERY_TYPE (query)) {
1473 case GST_QUERY_CAPS:
1475 GstCaps *filter, *caps;
1477 gst_query_parse_caps (query, &filter);
1478 caps = gst_ass_render_getcaps (pad, filter);
1479 gst_query_set_caps_result (query, caps);
1480 gst_caps_unref (caps);
1485 res = gst_pad_query_default (pad, parent, query);
1493 gst_ass_render_event_text (GstPad * pad, GstObject * parent, GstEvent * event)
1496 gboolean ret = FALSE;
1497 GstAssRender *render = GST_ASS_RENDER (parent);
1499 GST_DEBUG_OBJECT (pad, "received text event %" GST_PTR_FORMAT, event);
1501 switch (GST_EVENT_TYPE (event)) {
1502 case GST_EVENT_CAPS:
1506 gst_event_parse_caps (event, &caps);
1507 ret = gst_ass_render_setcaps_text (pad, caps);
1508 gst_event_unref (event);
1511 case GST_EVENT_SEGMENT:
1515 GST_ASS_RENDER_LOCK (render);
1516 render->subtitle_eos = FALSE;
1517 GST_ASS_RENDER_UNLOCK (render);
1519 gst_event_copy_segment (event, &segment);
1521 GST_ASS_RENDER_LOCK (render);
1522 if (segment.format == GST_FORMAT_TIME) {
1523 GST_DEBUG_OBJECT (render, "TEXT SEGMENT now: %" GST_SEGMENT_FORMAT,
1524 &render->subtitle_segment);
1526 render->subtitle_segment = segment;
1528 GST_DEBUG_OBJECT (render,
1529 "TEXT SEGMENT after: %" GST_SEGMENT_FORMAT,
1530 &render->subtitle_segment);
1532 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1533 ("received non-TIME newsegment event on subtitle input"));
1536 gst_event_unref (event);
1539 /* wake up the video chain, it might be waiting for a text buffer or
1540 * a text segment update */
1541 GST_ASS_RENDER_BROADCAST (render);
1542 GST_ASS_RENDER_UNLOCK (render);
1545 case GST_EVENT_GAP:{
1546 GstClockTime start, duration;
1548 gst_event_parse_gap (event, &start, &duration);
1549 if (GST_CLOCK_TIME_IS_VALID (duration))
1551 /* we do not expect another buffer until after gap,
1552 * so that is our position now */
1553 GST_ASS_RENDER_LOCK (render);
1554 render->subtitle_segment.position = start;
1556 /* wake up the video chain, it might be waiting for a text buffer or
1557 * a text segment update */
1558 GST_ASS_RENDER_BROADCAST (render);
1559 GST_ASS_RENDER_UNLOCK (render);
1562 case GST_EVENT_FLUSH_STOP:
1563 GST_ASS_RENDER_LOCK (render);
1564 GST_INFO_OBJECT (render, "text flush stop");
1565 render->subtitle_flushing = FALSE;
1566 render->subtitle_eos = FALSE;
1567 if (render->subtitle_pending)
1568 gst_buffer_unref (render->subtitle_pending);
1569 render->subtitle_pending = NULL;
1570 GST_ASS_RENDER_BROADCAST (render);
1571 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
1572 GST_ASS_RENDER_UNLOCK (render);
1573 gst_event_unref (event);
1576 case GST_EVENT_FLUSH_START:
1577 GST_DEBUG_OBJECT (render, "text flush start");
1578 g_mutex_lock (&render->ass_mutex);
1579 if (render->ass_track) {
1580 /* delete any events on the ass_track */
1581 for (i = 0; i < render->ass_track->n_events; i++) {
1582 GST_DEBUG_OBJECT (render, "deleted event with eid %i", i);
1583 ass_free_event (render->ass_track, i);
1585 render->ass_track->n_events = 0;
1586 GST_DEBUG_OBJECT (render, "done flushing");
1588 g_mutex_unlock (&render->ass_mutex);
1589 GST_ASS_RENDER_LOCK (render);
1590 render->subtitle_flushing = TRUE;
1591 GST_ASS_RENDER_BROADCAST (render);
1592 GST_ASS_RENDER_UNLOCK (render);
1593 gst_event_unref (event);
1597 GST_ASS_RENDER_LOCK (render);
1598 render->subtitle_eos = TRUE;
1599 GST_INFO_OBJECT (render, "text EOS");
1600 /* wake up the video chain, it might be waiting for a text buffer or
1601 * a text segment update */
1602 GST_ASS_RENDER_BROADCAST (render);
1603 GST_ASS_RENDER_UNLOCK (render);
1604 gst_event_unref (event);
1609 GstTagList *taglist = NULL;
1611 /* tag events may contain attachments which might be fonts */
1612 GST_DEBUG_OBJECT (render, "got TAG event");
1614 gst_event_parse_tag (event, &taglist);
1615 gst_ass_render_handle_tags (render, taglist);
1616 ret = gst_pad_event_default (pad, parent, event);
1620 ret = gst_pad_event_default (pad, parent, event);
1628 plugin_init (GstPlugin * plugin)
1630 GST_DEBUG_CATEGORY_INIT (gst_ass_render_debug, "assrender",
1631 0, "ASS/SSA subtitle renderer");
1632 GST_DEBUG_CATEGORY_INIT (gst_ass_render_lib_debug, "assrender_library",
1633 0, "ASS/SSA subtitle renderer library");
1635 /* FIXME: fix unit tests before upping rank again */
1636 return gst_element_register (plugin, "assrender",
1637 GST_RANK_NONE, GST_TYPE_ASS_RENDER);
1640 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1643 "ASS/SSA subtitle renderer",
1644 plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)