*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
*/
/**
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mp3parse ! mad ! audioconvert ! autoaudiosink d. ! queue ! ffdec_h264 ! ffmpegcolorspace ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mp3parse ! mad ! audioconvert ! autoaudiosink d. ! queue ! ffdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink
* ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
* </refsect2>
*/
-
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
+#include <gst/video/gstvideometa.h>
+
#include "gstassrender.h"
#include <string.h>
{
PROP_0,
PROP_ENABLE,
- PROP_EMBEDDEDFONTS
+ PROP_EMBEDDEDFONTS,
+ PROP_WAIT_TEXT
};
-#define FORMATS "{ RGB, BGR, xRGB, xBGR, RGBx, BGRx, I420 }"
+/* FIXME: video-blend.c doesn't support formats with more than 8 bit per
+ * component (which get unpacked into ARGB64 or AYUV64) yet, such as:
+ * v210, v216, UYVP, GRAY16_LE, GRAY16_BE */
+#define FORMATS "{ BGRx, RGBx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, \
+ I420, YV12, AYUV, YUY2, UYVY, v308, Y41B, Y42B, Y444, \
+ NV12, NV21, A420, YUV9, YVU9, IYU1, GRAY8 }"
+
+#define ASSRENDER_CAPS GST_VIDEO_CAPS_MAKE(FORMATS)
+
+#define ASSRENDER_ALL_CAPS ASSRENDER_CAPS ";" \
+ GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
+
+static GstStaticCaps sw_template_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
+ GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
);
static GstStaticPadTemplate video_sink_factory =
GST_STATIC_PAD_TEMPLATE ("video_sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
+ GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
);
static GstStaticPadTemplate text_sink_factory =
GST_STATIC_CAPS ("application/x-ass; application/x-ssa")
);
+#define GST_ASS_RENDER_GET_LOCK(ass) (&GST_ASS_RENDER (ass)->lock)
+#define GST_ASS_RENDER_GET_COND(ass) (&GST_ASS_RENDER (ass)->cond)
+#define GST_ASS_RENDER_LOCK(ass) (g_mutex_lock (GST_ASS_RENDER_GET_LOCK (ass)))
+#define GST_ASS_RENDER_UNLOCK(ass) (g_mutex_unlock (GST_ASS_RENDER_GET_LOCK (ass)))
+#define GST_ASS_RENDER_WAIT(ass) (g_cond_wait (GST_ASS_RENDER_GET_COND (ass), GST_ASS_RENDER_GET_LOCK (ass)))
+#define GST_ASS_RENDER_SIGNAL(ass) (g_cond_signal (GST_ASS_RENDER_GET_COND (ass)))
+#define GST_ASS_RENDER_BROADCAST(ass)(g_cond_broadcast (GST_ASS_RENDER_GET_COND (ass)))
+
static void gst_ass_render_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_ass_render_get_property (GObject * object, guint prop_id,
#define gst_ass_render_parent_class parent_class
G_DEFINE_TYPE (GstAssRender, gst_ass_render, GST_TYPE_ELEMENT);
-static GstCaps *gst_ass_render_getcaps (GstPad * pad, GstCaps * filter);
+static GstCaps *gst_ass_render_get_videosink_caps (GstPad * pad,
+ GstAssRender * render, GstCaps * filter);
+static GstCaps *gst_ass_render_get_src_caps (GstPad * pad,
+ GstAssRender * render, GstCaps * filter);
-static gboolean gst_ass_render_setcaps_video (GstPad * pad, GstCaps * caps);
-static gboolean gst_ass_render_setcaps_text (GstPad * pad, GstCaps * caps);
+static gboolean gst_ass_render_setcaps_video (GstPad * pad,
+ GstAssRender * render, GstCaps * caps);
+static gboolean gst_ass_render_setcaps_text (GstPad * pad,
+ GstAssRender * render, GstCaps * caps);
static GstFlowReturn gst_ass_render_chain_video (GstPad * pad,
GstObject * parent, GstBuffer * buf);
g_param_spec_boolean ("enable", "Enable",
"Enable rendering of subtitles", TRUE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
g_object_class_install_property (gobject_class, PROP_EMBEDDEDFONTS,
g_param_spec_boolean ("embeddedfonts", "Embedded Fonts",
"Extract and use fonts embedded in the stream", TRUE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_WAIT_TEXT,
+ g_param_spec_boolean ("wait-text", "Wait Text",
+ "Whether to wait for subtitles", TRUE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_ass_render_change_state);
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&text_sink_factory));
- gst_element_class_set_details_simple (gstelement_class, "ASS/SSA Render",
+ gst_element_class_set_static_metadata (gstelement_class, "ASS/SSA Render",
"Mixer/Video/Overlay/Subtitle",
"Renders ASS/SSA subtitles with libass",
"Benjamin Schmitz <vortex@wolpzone.de>, "
gst_pad_set_query_function (render->video_sinkpad,
GST_DEBUG_FUNCPTR (gst_ass_render_query_video));
+ GST_PAD_SET_PROXY_ALLOCATION (render->video_sinkpad);
+
gst_element_add_pad (GST_ELEMENT (render), render->srcpad);
gst_element_add_pad (GST_ELEMENT (render), render->video_sinkpad);
gst_element_add_pad (GST_ELEMENT (render), render->text_sinkpad);
gst_video_info_init (&render->info);
- g_mutex_init (&render->subtitle_mutex);
- g_cond_init (&render->subtitle_cond);
+ g_mutex_init (&render->lock);
+ g_cond_init (&render->cond);
render->renderer_init_ok = FALSE;
render->track_init_ok = FALSE;
render->enable = TRUE;
render->embeddedfonts = TRUE;
+ render->wait_text = FALSE;
gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
{
GstAssRender *render = GST_ASS_RENDER (object);
- g_mutex_clear (&render->subtitle_mutex);
- g_cond_clear (&render->subtitle_cond);
+ g_mutex_clear (&render->lock);
+ g_cond_clear (&render->cond);
if (render->ass_track) {
ass_free_track (render->ass_track);
{
GstAssRender *render = GST_ASS_RENDER (object);
+ GST_ASS_RENDER_LOCK (render);
switch (prop_id) {
case PROP_ENABLE:
render->enable = g_value_get_boolean (value);
ass_set_extract_fonts (render->ass_library, render->embeddedfonts);
g_mutex_unlock (&render->ass_mutex);
break;
+ case PROP_WAIT_TEXT:
+ render->wait_text = g_value_get_boolean (value);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
+ GST_ASS_RENDER_UNLOCK (render);
}
static void
{
GstAssRender *render = GST_ASS_RENDER (object);
+ GST_ASS_RENDER_LOCK (render);
switch (prop_id) {
case PROP_ENABLE:
g_value_set_boolean (value, render->enable);
case PROP_EMBEDDEDFONTS:
g_value_set_boolean (value, render->embeddedfonts);
break;
+ case PROP_WAIT_TEXT:
+ g_value_set_boolean (value, render->wait_text);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
+ GST_ASS_RENDER_UNLOCK (render);
+}
+
+/* Called with lock held */
+static void
+gst_ass_render_pop_text (GstAssRender * render)
+{
+ if (render->subtitle_pending) {
+ GST_DEBUG_OBJECT (render, "releasing text buffer %p",
+ render->subtitle_pending);
+ gst_buffer_unref (render->subtitle_pending);
+ render->subtitle_pending = NULL;
+ }
+
+ /* Let the text task know we used that buffer */
+ GST_ASS_RENDER_BROADCAST (render);
}
static GstStateChangeReturn
GstStateChangeReturn ret;
switch (transition) {
- case GST_STATE_CHANGE_READY_TO_PAUSED:
- render->subtitle_flushing = FALSE;
- gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
- gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
- break;
- case GST_STATE_CHANGE_NULL_TO_READY:
- case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
- default:
- break;
-
case GST_STATE_CHANGE_PAUSED_TO_READY:
- g_mutex_lock (&render->subtitle_mutex);
+ GST_ASS_RENDER_LOCK (render);
render->subtitle_flushing = TRUE;
- if (render->subtitle_pending)
- gst_buffer_unref (render->subtitle_pending);
- render->subtitle_pending = NULL;
- g_cond_signal (&render->subtitle_cond);
- g_mutex_unlock (&render->subtitle_mutex);
+ render->video_flushing = TRUE;
+ gst_ass_render_pop_text (render);
+ GST_ASS_RENDER_UNLOCK (render);
+ break;
+ default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
if (render->ass_track)
ass_free_track (render->ass_track);
render->ass_track = NULL;
- g_mutex_unlock (&render->ass_mutex);
+ if (render->composition) {
+ gst_video_overlay_composition_unref (render->composition);
+ render->composition = NULL;
+ }
render->track_init_ok = FALSE;
render->renderer_init_ok = FALSE;
+ g_mutex_unlock (&render->ass_mutex);
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ GST_ASS_RENDER_LOCK (render);
+ render->subtitle_flushing = FALSE;
+ render->video_flushing = FALSE;
+ render->video_eos = FALSE;
+ render->subtitle_eos = FALSE;
+ gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
+ gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
+ GST_ASS_RENDER_UNLOCK (render);
break;
- case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
- case GST_STATE_CHANGE_READY_TO_NULL:
default:
break;
}
GstCaps *filter, *caps;
gst_query_parse_caps (query, &filter);
- caps = gst_ass_render_getcaps (pad, filter);
+ caps = gst_ass_render_get_src_caps (pad, (GstAssRender *) parent, filter);
gst_query_set_caps_result (query, caps);
gst_caps_unref (caps);
res = TRUE;
GstAssRender *render = GST_ASS_RENDER (parent);
gboolean ret = FALSE;
+ GST_DEBUG_OBJECT (render, "received src event %" GST_PTR_FORMAT, event);
+
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:{
GstSeekFlags flags;
+ if (!render->track_init_ok) {
+ GST_DEBUG_OBJECT (render, "seek received, pushing upstream");
+ ret = gst_pad_push_event (render->video_sinkpad, event);
+ return ret;
+ }
+
GST_DEBUG_OBJECT (render, "seek received, driving from here");
gst_event_parse_seek (event, NULL, NULL, &flags, NULL, NULL, NULL, NULL);
gst_pad_push_event (render->srcpad, gst_event_new_flush_start ());
/* Mark subtitle as flushing, unblocks chains */
- g_mutex_lock (&render->subtitle_mutex);
- if (render->subtitle_pending)
- gst_buffer_unref (render->subtitle_pending);
- render->subtitle_pending = NULL;
+ GST_ASS_RENDER_LOCK (render);
render->subtitle_flushing = TRUE;
- g_cond_signal (&render->subtitle_cond);
- g_mutex_unlock (&render->subtitle_mutex);
+ render->video_flushing = TRUE;
+ gst_ass_render_pop_text (render);
+ GST_ASS_RENDER_UNLOCK (render);
/* Seek on each sink pad */
gst_event_ref (event);
break;
}
default:
- gst_event_ref (event);
- ret = gst_pad_push_event (render->video_sinkpad, event);
- gst_pad_push_event (render->text_sinkpad, event);
+ if (render->track_init_ok) {
+ gst_event_ref (event);
+ ret = gst_pad_push_event (render->video_sinkpad, event);
+ gst_pad_push_event (render->text_sinkpad, event);
+ } else {
+ ret = gst_pad_push_event (render->video_sinkpad, event);
+ }
break;
}
return ret;
}
+/**
+ * gst_ass_render_add_feature_and_intersect:
+ *
+ * Creates a new #GstCaps containing the (given caps +
+ * given caps feature) + (given caps intersected by the
+ * given filter).
+ *
+ * Returns: the new #GstCaps
+ */
static GstCaps *
-gst_ass_render_getcaps (GstPad * pad, GstCaps * filter)
+gst_ass_render_add_feature_and_intersect (GstCaps * caps,
+ const gchar * feature, GstCaps * filter)
{
- GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
- GstPad *otherpad;
- GstCaps *caps;
+ int i, caps_size;
+ GstCaps *new_caps;
- if (pad == render->srcpad)
- otherpad = render->video_sinkpad;
- else
- otherpad = render->srcpad;
+ new_caps = gst_caps_copy (caps);
- /* we can do what the peer can */
- caps = gst_pad_peer_query_caps (otherpad, filter);
- if (caps) {
- GstCaps *temp;
- const GstCaps *templ;
+ caps_size = gst_caps_get_size (new_caps);
+ for (i = 0; i < caps_size; i++) {
+ GstCapsFeatures *features = gst_caps_get_features (new_caps, i);
+ if (!gst_caps_features_is_any (features)) {
+ gst_caps_features_add (features, feature);
+ }
+ }
+
+ gst_caps_append (new_caps, gst_caps_intersect_full (caps,
+ filter, GST_CAPS_INTERSECT_FIRST));
+
+ return new_caps;
+}
+
+/**
+ * gst_ass_render_intersect_by_feature:
+ *
+ * Creates a new #GstCaps based on the following filtering rule.
+ *
+ * For each individual caps contained in given caps, if the
+ * caps uses the given caps feature, keep a version of the caps
+ * with the feature and an another one without. Otherwise, intersect
+ * the caps with the given filter.
+ *
+ * Returns: the new #GstCaps
+ */
+static GstCaps *
+gst_ass_render_intersect_by_feature (GstCaps * caps,
+ const gchar * feature, GstCaps * filter)
+{
+ int i, caps_size;
+ GstCaps *new_caps;
+
+ new_caps = gst_caps_new_empty ();
+
+ caps_size = gst_caps_get_size (caps);
+ for (i = 0; i < caps_size; i++) {
+ GstStructure *caps_structure = gst_caps_get_structure (caps, i);
+ GstCapsFeatures *caps_features =
+ gst_caps_features_copy (gst_caps_get_features (caps, i));
+ GstCaps *filtered_caps;
+ GstCaps *simple_caps =
+ gst_caps_new_full (gst_structure_copy (caps_structure), NULL);
+ gst_caps_set_features (simple_caps, 0, caps_features);
+
+ if (gst_caps_features_contains (caps_features, feature)) {
+ gst_caps_append (new_caps, gst_caps_copy (simple_caps));
+
+ gst_caps_features_remove (caps_features, feature);
+ filtered_caps = gst_caps_ref (simple_caps);
+ } else {
+ filtered_caps = gst_caps_intersect_full (simple_caps, filter,
+ GST_CAPS_INTERSECT_FIRST);
+ }
+
+ gst_caps_unref (simple_caps);
+ gst_caps_append (new_caps, filtered_caps);
+ }
+
+ return new_caps;
+}
+
+static GstCaps *
+gst_ass_render_get_videosink_caps (GstPad * pad, GstAssRender * render,
+ GstCaps * filter)
+{
+ GstPad *srcpad = render->srcpad;
+ GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
+
+ if (filter) {
+ /* filter caps + composition feature + filter caps
+ * filtered by the software caps. */
+ GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
+ assrender_filter = gst_ass_render_add_feature_and_intersect (filter,
+ GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
+ gst_caps_unref (sw_caps);
+
+ GST_DEBUG_OBJECT (render, "assrender filter %" GST_PTR_FORMAT,
+ assrender_filter);
+ }
+
+ peer_caps = gst_pad_peer_query_caps (srcpad, assrender_filter);
+
+ if (assrender_filter)
+ gst_caps_unref (assrender_filter);
+
+ if (peer_caps) {
+
+ GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
+
+ if (gst_caps_is_any (peer_caps)) {
+
+ /* if peer returns ANY caps, return filtered src pad template caps */
+ caps = gst_caps_copy (gst_pad_get_pad_template_caps (srcpad));
+ } else {
+
+ /* duplicate caps which contains the composition into one version with
+ * the meta and one without. Filter the other caps by the software caps */
+ GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
+ caps = gst_ass_render_intersect_by_feature (peer_caps,
+ GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
+ gst_caps_unref (sw_caps);
+ }
+
+ gst_caps_unref (peer_caps);
- /* filtered against our padtemplate */
- templ = gst_pad_get_pad_template_caps (otherpad);
- temp = gst_caps_intersect (caps, templ);
- gst_caps_unref (caps);
- /* this is what we can do */
- caps = temp;
} else {
/* no peer, our padtemplate is enough then */
- caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
+ caps = gst_pad_get_pad_template_caps (pad);
+ }
+
+ if (filter) {
+ GstCaps *intersection = gst_caps_intersect_full (filter, caps,
+ GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = intersection;
}
- gst_object_unref (render);
+ GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
return caps;
}
-#define CREATE_RGB_BLIT_FUNCTION(name,bpp,R,G,B) \
-static void \
-blit_##name (GstAssRender * render, ASS_Image * ass_image, GstVideoFrame * frame) \
-{ \
- guint counter = 0; \
- gint alpha, r, g, b, k; \
- const guint8 *src; \
- guint8 *dst, *data; \
- gint x, y, w, h; \
- gint width; \
- gint height; \
- gint dst_stride; \
- gint dst_skip; \
- gint src_skip; \
- \
- width = GST_VIDEO_FRAME_WIDTH (frame); \
- height = GST_VIDEO_FRAME_HEIGHT (frame); \
- dst_stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0); \
- data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
- \
- while (ass_image) { \
- if (ass_image->dst_y > height || ass_image->dst_x > width) \
- goto next; \
- \
- /* blend subtitles onto the video frame */ \
- alpha = 255 - ((ass_image->color) & 0xff); \
- r = ((ass_image->color) >> 24) & 0xff; \
- g = ((ass_image->color) >> 16) & 0xff; \
- b = ((ass_image->color) >> 8) & 0xff; \
- src = ass_image->bitmap; \
- dst = data + ass_image->dst_y * dst_stride + ass_image->dst_x * bpp; \
- \
- w = MIN (ass_image->w, width - ass_image->dst_x); \
- h = MIN (ass_image->h, height - ass_image->dst_y); \
- src_skip = ass_image->stride - w; \
- dst_skip = dst_stride - w * bpp; \
- \
- for (y = 0; y < h; y++) { \
- for (x = 0; x < w; x++) { \
- k = src[0] * alpha / 255; \
- dst[R] = (k * r + (255 - k) * dst[R]) / 255; \
- dst[G] = (k * g + (255 - k) * dst[G]) / 255; \
- dst[B] = (k * b + (255 - k) * dst[B]) / 255; \
- src++; \
- dst += bpp; \
- } \
- src += src_skip; \
- dst += dst_skip; \
- } \
-next: \
- counter++; \
- ass_image = ass_image->next; \
- } \
- GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter); \
-}
+static GstCaps *
+gst_ass_render_get_src_caps (GstPad * pad, GstAssRender * render,
+ GstCaps * filter)
+{
+ GstPad *sinkpad = render->video_sinkpad;
+ GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
+
+ if (filter) {
+ /* duplicate filter caps which contains the composition into one version
+ * with the meta and one without. Filter the other caps by the software
+ * caps */
+ GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
+ assrender_filter =
+ gst_ass_render_intersect_by_feature (filter,
+ GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
+ gst_caps_unref (sw_caps);
+ }
-CREATE_RGB_BLIT_FUNCTION (rgb, 3, 0, 1, 2);
-CREATE_RGB_BLIT_FUNCTION (bgr, 3, 2, 1, 0);
-CREATE_RGB_BLIT_FUNCTION (xrgb, 4, 1, 2, 3);
-CREATE_RGB_BLIT_FUNCTION (xbgr, 4, 3, 2, 1);
-CREATE_RGB_BLIT_FUNCTION (rgbx, 4, 0, 1, 2);
-CREATE_RGB_BLIT_FUNCTION (bgrx, 4, 2, 1, 0);
+ peer_caps = gst_pad_peer_query_caps (sinkpad, assrender_filter);
-#undef CREATE_RGB_BLIT_FUNCTION
+ if (assrender_filter)
+ gst_caps_unref (assrender_filter);
-static inline gint
-rgb_to_y (gint r, gint g, gint b)
-{
- gint ret;
+ if (peer_caps) {
- ret = (gint) (((19595 * r) >> 16) + ((38470 * g) >> 16) + ((7471 * b) >> 16));
- ret = CLAMP (ret, 0, 255);
- return ret;
-}
+ GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
-static inline gint
-rgb_to_u (gint r, gint g, gint b)
-{
- gint ret;
+ if (gst_caps_is_any (peer_caps)) {
- ret =
- (gint) (-((11059 * r) >> 16) - ((21709 * g) >> 16) + ((32768 * b) >> 16) +
- 128);
- ret = CLAMP (ret, 0, 255);
- return ret;
-}
+ /* if peer returns ANY caps, return filtered sink pad template caps */
+ caps = gst_caps_copy (gst_pad_get_pad_template_caps (sinkpad));
-static inline gint
-rgb_to_v (gint r, gint g, gint b)
-{
- gint ret;
+ } else {
- ret =
- (gint) (((32768 * r) >> 16) - ((27439 * g) >> 16) - ((5329 * b) >> 16) +
- 128);
- ret = CLAMP (ret, 0, 255);
- return ret;
+ /* return upstream caps + composition feature + upstream caps
+ * filtered by the software caps. */
+ GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
+ caps = gst_ass_render_add_feature_and_intersect (peer_caps,
+ GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
+ gst_caps_unref (sw_caps);
+ }
+
+ gst_caps_unref (peer_caps);
+
+ } else {
+ /* no peer, our padtemplate is enough then */
+ caps = gst_pad_get_pad_template_caps (pad);
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = intersection;
+ }
+
+ GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
+
+ return caps;
}
static void
-blit_i420 (GstAssRender * render, ASS_Image * ass_image, GstVideoFrame * frame)
+blit_bgra_premultiplied (GstAssRender * render, ASS_Image * ass_image,
+ guint8 * data, gint width, gint height, gint stride, gint x_off, gint y_off)
{
guint counter = 0;
- gint alpha, r, g, b, k, k2;
- gint Y, U, V;
+ gint alpha, r, g, b, k;
const guint8 *src;
- guint8 *dst_y, *dst_u, *dst_v;
+ guint8 *dst;
gint x, y, w, h;
-/* FIXME ignoring source image stride might be wrong here */
-#if 0
- gint w2;
- gint src_stride;
-#endif
- gint width, height;
- guint8 *y_data, *u_data, *v_data;
- gint y_stride, u_stride, v_stride;
-
- width = GST_VIDEO_FRAME_WIDTH (frame);
- height = GST_VIDEO_FRAME_HEIGHT (frame);
+ gint dst_skip;
+ gint src_skip;
+ gint dst_x, dst_y;
- y_data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
- u_data = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
- v_data = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
-
- y_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
- u_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1);
- v_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 2);
+ memset (data, 0, stride * height);
while (ass_image) {
- if (ass_image->dst_y > height || ass_image->dst_x > width)
+ dst_x = ass_image->dst_x + x_off;
+ dst_y = ass_image->dst_y + y_off;
+
+ if (dst_y >= height || dst_x >= width)
goto next;
- /* blend subtitles onto the video frame */
- alpha = 255 - ((ass_image->color) & 0xff);
+ alpha = 255 - (ass_image->color & 0xff);
r = ((ass_image->color) >> 24) & 0xff;
g = ((ass_image->color) >> 16) & 0xff;
b = ((ass_image->color) >> 8) & 0xff;
-
- Y = rgb_to_y (r, g, b);
- U = rgb_to_u (r, g, b);
- V = rgb_to_v (r, g, b);
-
- w = MIN (ass_image->w, width - ass_image->dst_x);
- h = MIN (ass_image->h, height - ass_image->dst_y);
-
-#if 0
- w2 = (w + 1) / 2;
-
- src_stride = ass_image->stride;
-#endif
-
src = ass_image->bitmap;
-#if 0
- dst_y = y_data + ass_image->dst_y * y_stride + ass_image->dst_x;
- dst_u = u_data + (ass_image->dst_y / 2) * u_stride + ass_image->dst_x / 2;
- dst_v = v_data + (ass_image->dst_y / 2) * v_stride + ass_image->dst_x / 2;
-#endif
+ dst = data + dst_y * stride + dst_x * 4;
+
+ w = MIN (ass_image->w, width - dst_x);
+ h = MIN (ass_image->h, height - dst_y);
+ src_skip = ass_image->stride - w;
+ dst_skip = stride - w * 4;
for (y = 0; y < h; y++) {
- dst_y = y_data + (ass_image->dst_y + y) * y_stride + ass_image->dst_x;
for (x = 0; x < w; x++) {
- k = src[y * ass_image->w + x] * alpha / 255;
- dst_y[x] = (k * Y + (255 - k) * dst_y[x]) / 255;
- }
- }
-
- y = 0;
- if (ass_image->dst_y & 1) {
- dst_u = u_data + (ass_image->dst_y / 2) * u_stride + ass_image->dst_x / 2;
- dst_v = v_data + (ass_image->dst_y / 2) * v_stride + ass_image->dst_x / 2;
- x = 0;
- if (ass_image->dst_x & 1) {
- k2 = src[y * ass_image->w + x] * alpha / 255;
- k2 = (k2 + 2) >> 2;
- dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
- dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
- x++;
- dst_u++;
- dst_v++;
- }
- for (; x < w - 1; x += 2) {
- k2 = src[y * ass_image->w + x] * alpha / 255;
- k2 += src[y * ass_image->w + x + 1] * alpha / 255;
- k2 = (k2 + 2) >> 2;
- dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
- dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
- dst_u++;
- dst_v++;
- }
- if (x < w) {
- k2 = src[y * ass_image->w + x] * alpha / 255;
- k2 = (k2 + 2) >> 2;
- dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
- dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
- }
- }
-
- for (; y < h - 1; y += 2) {
- dst_u = u_data + ((ass_image->dst_y + y) / 2) * u_stride +
- ass_image->dst_x / 2;
- dst_v = v_data + ((ass_image->dst_y + y) / 2) * v_stride +
- ass_image->dst_x / 2;
- x = 0;
- if (ass_image->dst_x & 1) {
- k2 = src[y * ass_image->w + x] * alpha / 255;
- k2 += src[(y + 1) * ass_image->w + x] * alpha / 255;
- k2 = (k2 + 2) >> 2;
- dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
- dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
- x++;
- dst_u++;
- dst_v++;
- }
- for (; x < w - 1; x += 2) {
- k2 = src[y * ass_image->w + x] * alpha / 255;
- k2 += src[y * ass_image->w + x + 1] * alpha / 255;
- k2 += src[(y + 1) * ass_image->w + x] * alpha / 255;
- k2 += src[(y + 1) * ass_image->w + x + 1] * alpha / 255;
- k2 = (k2 + 2) >> 2;
- dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
- dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
- dst_u++;
- dst_v++;
- }
- if (x < w) {
- k2 = src[y * ass_image->w + x] * alpha / 255;
- k2 += src[(y + 1) * ass_image->w + x] * alpha / 255;
- k2 = (k2 + 2) >> 2;
- dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
- dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
- }
- }
-
- if (y < h) {
- dst_u = u_data + (ass_image->dst_y / 2) * u_stride + ass_image->dst_x / 2;
- dst_v = v_data + (ass_image->dst_y / 2) * v_stride + ass_image->dst_x / 2;
- x = 0;
- if (ass_image->dst_x & 1) {
- k2 = src[y * ass_image->w + x] * alpha / 255;
- k2 = (k2 + 2) >> 2;
- dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
- dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
- x++;
- dst_u++;
- dst_v++;
- }
- for (; x < w - 1; x += 2) {
- k2 = src[y * ass_image->w + x] * alpha / 255;
- k2 += src[y * ass_image->w + x + 1] * alpha / 255;
- k2 = (k2 + 2) >> 2;
- dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
- dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
- dst_u++;
- dst_v++;
- }
- if (x < w) {
- k2 = src[y * ass_image->w + x] * alpha / 255;
- k2 = (k2 + 2) >> 2;
- dst_u[0] = (k2 * U + (255 - k2) * dst_u[0]) / 255;
- dst_v[0] = (k2 * V + (255 - k2) * dst_v[0]) / 255;
+ k = src[0] * alpha / 255;
+ if (dst[3] == 0) {
+ dst[3] = k;
+ dst[2] = (k * r) / 255;
+ dst[1] = (k * g) / 255;
+ dst[0] = (k * b) / 255;
+ } else {
+ dst[3] = k + (255 - k) * dst[3] / 255;
+ dst[2] = (k * r + (255 - k) * dst[2]) / 255;
+ dst[1] = (k * g + (255 - k) * dst[1]) / 255;
+ dst[0] = (k * b + (255 - k) * dst[0]) / 255;
+ }
+ src++;
+ dst += 4;
}
+ src += src_skip;
+ dst += dst_skip;
}
-
-
-
next:
counter++;
ass_image = ass_image->next;
}
-
GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter);
}
static gboolean
-gst_ass_render_setcaps_video (GstPad * pad, GstCaps * caps)
+gst_ass_render_can_handle_caps (GstCaps * incaps)
{
- GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
+ static GstStaticCaps static_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
+ gboolean ret;
+ GstCaps *caps;
+
+ caps = gst_static_caps_get (&static_caps);
+ ret = gst_caps_is_subset (incaps, caps);
+ gst_caps_unref (caps);
+
+ return ret;
+}
+
+static gboolean
+gst_ass_render_setcaps_video (GstPad * pad, GstAssRender * render,
+ GstCaps * caps)
+{
+ GstQuery *query;
gboolean ret = FALSE;
gint par_n = 1, par_d = 1;
gdouble dar;
GstVideoInfo info;
+ gboolean attach = FALSE;
+ gboolean caps_has_meta = TRUE;
+ GstCapsFeatures *f;
+ GstCaps *original_caps = caps;
if (!gst_video_info_from_caps (&info, caps))
goto invalid_caps;
render->info = info;
+ gst_caps_ref (caps);
+
+ /* Try to use the overlay meta if possible */
+ f = gst_caps_get_features (caps, 0);
+
+ /* if the caps doesn't have the overlay meta, we query if downstream
+ * accepts it before trying the version without the meta
+ * If upstream already is using the meta then we can only use it */
+ if (!f
+ || !gst_caps_features_contains (f,
+ GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION)) {
+ GstCaps *overlay_caps;
+
+ /* In this case we added the meta, but we can work without it
+ * so preserve the original caps so we can use it as a fallback */
+ overlay_caps = gst_caps_copy (caps);
+
+ f = gst_caps_get_features (overlay_caps, 0);
+ gst_caps_features_add (f,
+ GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
+
+ ret = gst_pad_peer_query_accept_caps (render->srcpad, overlay_caps);
+ GST_DEBUG_OBJECT (render, "Downstream accepts the overlay meta: %d", ret);
+ if (ret) {
+ gst_caps_unref (caps);
+ caps = overlay_caps;
+ } else {
+ /* fallback to the original */
+ gst_caps_unref (overlay_caps);
+ caps_has_meta = FALSE;
+ }
+
+ }
+ GST_DEBUG_OBJECT (render, "Using caps %" GST_PTR_FORMAT, caps);
ret = gst_pad_set_caps (render->srcpad, caps);
+ gst_caps_unref (caps);
+
if (!ret)
goto out;
- switch (GST_VIDEO_INFO_FORMAT (&info)) {
- case GST_VIDEO_FORMAT_RGB:
- render->blit = blit_rgb;
- break;
- case GST_VIDEO_FORMAT_BGR:
- render->blit = blit_bgr;
- break;
- case GST_VIDEO_FORMAT_xRGB:
- render->blit = blit_xrgb;
- break;
- case GST_VIDEO_FORMAT_xBGR:
- render->blit = blit_xbgr;
- break;
- case GST_VIDEO_FORMAT_RGBx:
- render->blit = blit_rgbx;
- break;
- case GST_VIDEO_FORMAT_BGRx:
- render->blit = blit_bgrx;
- break;
- case GST_VIDEO_FORMAT_I420:
- render->blit = blit_i420;
- break;
- default:
- ret = FALSE;
- goto out;
+ render->width = info.width;
+ render->height = info.height;
+
+ query = gst_query_new_allocation (caps, FALSE);
+ if (caps_has_meta && gst_pad_peer_query (render->srcpad, query)) {
+ if (gst_query_find_allocation_meta (query,
+ GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL))
+ attach = TRUE;
+ }
+ gst_query_unref (query);
+
+ render->attach_compo_to_buffer = attach;
+
+ if (!attach) {
+ if (caps_has_meta) {
+ /* Some elements (fakesink) claim to accept the meta on caps but won't
+ put it in the allocation query result, this leads below
+ check to fail. Prevent this by removing the meta from caps */
+ caps = original_caps;
+ ret = gst_pad_set_caps (render->srcpad, caps);
+ if (!ret)
+ goto out;
+ }
+ if (!gst_ass_render_can_handle_caps (caps))
+ goto unsupported_caps;
}
g_mutex_lock (&render->ass_mutex);
- ass_set_frame_size (render->ass_renderer, info.width, info.height);
+ ass_set_frame_size (render->ass_renderer, render->width, render->height);
- dar = (((gdouble) par_n) * ((gdouble) info.width))
- / (((gdouble) par_d) * ((gdouble) info.height));
+ dar = (((gdouble) par_n) * ((gdouble) render->width))
+ / (((gdouble) par_d) * ((gdouble) render->height));
#if !defined(LIBASS_VERSION) || LIBASS_VERSION < 0x00907000
ass_set_aspect_ratio (render->ass_renderer, dar);
#else
ass_set_aspect_ratio (render->ass_renderer,
- dar, ((gdouble) info.width) / ((gdouble) info.height));
+ dar, ((gdouble) render->width) / ((gdouble) render->height));
#endif
ass_set_font_scale (render->ass_renderer, 1.0);
ass_set_hinting (render->ass_renderer, ASS_HINTING_LIGHT);
GST_DEBUG_OBJECT (render, "ass renderer setup complete");
out:
- gst_object_unref (render);
return ret;
ret = FALSE;
goto out;
}
+unsupported_caps:
+ {
+ GST_ERROR_OBJECT (render, "Unsupported caps: %" GST_PTR_FORMAT, caps);
+ ret = FALSE;
+ goto out;
+ }
}
static gboolean
-gst_ass_render_setcaps_text (GstPad * pad, GstCaps * caps)
+gst_ass_render_setcaps_text (GstPad * pad, GstAssRender * render,
+ GstCaps * caps)
{
- GstAssRender *render = GST_ASS_RENDER (gst_pad_get_parent (pad));
GstStructure *structure;
const GValue *value;
GstBuffer *priv;
- gchar *codec_private;
- gsize codec_private_size;
+ GstMapInfo map;
gboolean ret = FALSE;
structure = gst_caps_get_structure (caps, 0);
priv = gst_value_get_buffer (value);
g_return_val_if_fail (priv != NULL, FALSE);
- codec_private =
- gst_buffer_map (priv, &codec_private_size, NULL, GST_MAP_READ);
+ gst_buffer_map (priv, &map, GST_MAP_READ);
if (!render->ass_track)
render->ass_track = ass_new_track (render->ass_library);
- ass_process_codec_private (render->ass_track,
- codec_private, codec_private_size);
+ ass_process_codec_private (render->ass_track, (char *) map.data, map.size);
- gst_buffer_unmap (priv, codec_private, codec_private_size);
+ gst_buffer_unmap (priv, &map);
GST_DEBUG_OBJECT (render, "ass track created");
}
g_mutex_unlock (&render->ass_mutex);
- gst_object_unref (render);
-
return ret;
}
gst_ass_render_process_text (GstAssRender * render, GstBuffer * buffer,
GstClockTime running_time, GstClockTime duration)
{
- gchar *data;
- gsize size;
+ GstMapInfo map;
gdouble pts_start, pts_end;
pts_start = running_time;
" and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time),
GST_TIME_ARGS (duration));
- data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
g_mutex_lock (&render->ass_mutex);
- ass_process_chunk (render->ass_track, data, size, pts_start, pts_end);
+ ass_process_chunk (render->ass_track, (gchar *) map.data, map.size,
+ pts_start, pts_end);
g_mutex_unlock (&render->ass_mutex);
- gst_buffer_unmap (buffer, data, size);
+ gst_buffer_unmap (buffer, &map);
+}
+
+static GstVideoOverlayComposition *
+gst_ass_render_composite_overlay (GstAssRender * render, ASS_Image * images)
+{
+ GstVideoOverlayComposition *composition;
+ GstVideoOverlayRectangle *rectangle;
+ GstVideoMeta *vmeta;
+ GstMapInfo map;
+ GstBuffer *buffer;
+ ASS_Image *image;
+ gint min_x, min_y;
+ gint max_x, max_y;
+ gint width, height;
+ gint stride;
+ gpointer data;
+
+ min_x = G_MAXINT;
+ min_y = G_MAXINT;
+ max_x = 0;
+ max_y = 0;
+
+ /* find bounding box of all images, to limit the overlay rectangle size */
+ for (image = images; image; image = image->next) {
+ if (min_x > image->dst_x)
+ min_x = image->dst_x;
+ if (min_y > image->dst_y)
+ min_y = image->dst_y;
+ if (max_x < image->dst_x + image->w)
+ max_x = image->dst_x + image->w;
+ if (max_y < image->dst_y + image->h)
+ max_y = image->dst_y + image->h;
+ }
+
+ width = MIN (max_x - min_x, render->width);
+ height = MIN (max_y - min_y, render->height);
+
+ GST_DEBUG_OBJECT (render, "render overlay rectangle %dx%d%+d%+d",
+ width, height, min_x, min_y);
+
+ buffer = gst_buffer_new_and_alloc (4 * width * height);
+ if (!buffer) {
+ GST_ERROR_OBJECT (render, "Failed to allocate overlay buffer");
+ return NULL;
+ }
+
+ vmeta = gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE,
+ GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB, width, height);
+
+ if (!gst_video_meta_map (vmeta, 0, &map, &data, &stride, GST_MAP_READWRITE)) {
+ GST_ERROR_OBJECT (render, "Failed to map overlay buffer");
+ gst_buffer_unref (buffer);
+ return NULL;
+ }
+
+ blit_bgra_premultiplied (render, images, data, width, height, stride,
+ -min_x, -min_y);
+ gst_video_meta_unmap (vmeta, 0, &map);
+
+ rectangle = gst_video_overlay_rectangle_new_raw (buffer, min_x, min_y,
+ width, height, GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
+
gst_buffer_unref (buffer);
+
+ composition = gst_video_overlay_composition_new (rectangle);
+ gst_video_overlay_rectangle_unref (rectangle);
+
+ return composition;
+}
+
+static gboolean
+gst_ass_render_push_frame (GstAssRender * render, GstBuffer * video_frame)
+{
+ GstVideoFrame frame;
+
+ if (!render->composition)
+ goto done;
+
+ video_frame = gst_buffer_make_writable (video_frame);
+
+ if (render->attach_compo_to_buffer) {
+ gst_buffer_add_video_overlay_composition_meta (video_frame,
+ render->composition);
+ goto done;
+ }
+
+ if (!gst_video_frame_map (&frame, &render->info, video_frame,
+ GST_MAP_READWRITE)) {
+ GST_WARNING_OBJECT (render, "failed to map video frame for blending");
+ goto done;
+ }
+
+ gst_video_overlay_composition_blend (render->composition, &frame);
+ gst_video_frame_unmap (&frame);
+
+done:
+ return gst_pad_push (render->srcpad, video_frame);
}
static GstFlowReturn
guint64 start, stop, clip_start = 0, clip_stop = 0;
ASS_Image *ass_image;
- if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
- GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
- gst_buffer_unref (buffer);
- return GST_FLOW_OK;
- }
+ if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
+ goto missing_timestamp;
/* ignore buffers that are outside of the current segment */
start = GST_BUFFER_TIMESTAMP (buffer);
GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
}
- render->video_segment.position = clip_start;
-
- g_mutex_lock (&render->subtitle_mutex);
- if (render->subtitle_pending) {
- GstClockTime sub_running_time, vid_running_time;
- GstClockTime sub_running_time_end, vid_running_time_end;
-
- sub_running_time =
- gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
- GST_BUFFER_TIMESTAMP (render->subtitle_pending));
- sub_running_time_end =
- gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
- GST_BUFFER_TIMESTAMP (render->subtitle_pending) +
- GST_BUFFER_DURATION (render->subtitle_pending));
- vid_running_time =
- gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
- GST_BUFFER_TIMESTAMP (buffer));
- vid_running_time_end =
- gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
- GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer));
-
- if (sub_running_time_end < vid_running_time) {
- gst_buffer_unref (render->subtitle_pending);
- GST_DEBUG_OBJECT (render,
- "Too late text buffer, dropping (%" GST_TIME_FORMAT " < %"
- GST_TIME_FORMAT, GST_TIME_ARGS (sub_running_time_end),
- GST_TIME_ARGS (vid_running_time));
- render->subtitle_pending = NULL;
- g_cond_signal (&render->subtitle_cond);
- } else if (sub_running_time <= vid_running_time_end + GST_SECOND / 2) {
- gst_ass_render_process_text (render, render->subtitle_pending,
- sub_running_time, sub_running_time_end - sub_running_time);
- render->subtitle_pending = NULL;
- g_cond_signal (&render->subtitle_cond);
+ /* now, after we've done the clipping, fix up end time if there's no
+ * duration (we only use those estimated values internally though, we
+ * don't want to set bogus values on the buffer itself) */
+ if (stop == -1) {
+ if (render->info.fps_n && render->info.fps_d) {
+ GST_DEBUG_OBJECT (render, "estimating duration based on framerate");
+ stop =
+ start + gst_util_uint64_scale_int (GST_SECOND, render->info.fps_d,
+ render->info.fps_n);
+ } else {
+ GST_WARNING_OBJECT (render, "no duration, assuming minimal duration");
+ stop = start + 1; /* we need to assume some interval */
}
}
- g_mutex_unlock (&render->subtitle_mutex);
- /* now start rendering subtitles, if all conditions are met */
+wait_for_text_buf:
+
+ GST_ASS_RENDER_LOCK (render);
+
+ if (render->video_flushing)
+ goto flushing;
+
+ if (render->video_eos)
+ goto have_eos;
+
if (render->renderer_init_ok && render->track_init_ok && render->enable) {
- GstClockTime running_time;
- gdouble timestamp;
-#ifndef GST_DISABLE_GST_DEBUG
- gdouble step;
-#endif
+ /* Text pad linked, check if we have a text buffer queued */
+ if (render->subtitle_pending) {
+ GstClockTime text_start = GST_CLOCK_TIME_NONE;
+ GstClockTime text_end = GST_CLOCK_TIME_NONE;
+ GstClockTime text_running_time = GST_CLOCK_TIME_NONE;
+ GstClockTime text_running_time_end = GST_CLOCK_TIME_NONE;
+ GstClockTime vid_running_time, vid_running_time_end;
+ gdouble timestamp;
+ gint changed = 0;
+
+ /* if the text buffer isn't stamped right, pop it off the
+ * queue and display it for the current video frame only */
+ if (!GST_BUFFER_TIMESTAMP_IS_VALID (render->subtitle_pending) ||
+ !GST_BUFFER_DURATION_IS_VALID (render->subtitle_pending)) {
+ GST_WARNING_OBJECT (render,
+ "Got text buffer with invalid timestamp or duration");
+ gst_ass_render_pop_text (render);
+ GST_ASS_RENDER_UNLOCK (render);
+ goto wait_for_text_buf;
+ }
- running_time =
- gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
- GST_BUFFER_TIMESTAMP (buffer));
- GST_DEBUG_OBJECT (render,
- "rendering frame for running time %" GST_TIME_FORMAT,
- GST_TIME_ARGS (running_time));
- /* libass needs timestamps in ms */
- timestamp = running_time / GST_MSECOND;
-
- g_mutex_lock (&render->ass_mutex);
-#ifndef GST_DISABLE_GST_DEBUG
- /* only for testing right now. could possibly be used for optimizations? */
- step = ass_step_sub (render->ass_track, timestamp, 1);
- GST_DEBUG_OBJECT (render, "Current running time: %" GST_TIME_FORMAT
- " // Next event: %" GST_TIME_FORMAT,
- GST_TIME_ARGS (running_time), GST_TIME_ARGS (step * GST_MSECOND));
-#endif
+ text_start = GST_BUFFER_TIMESTAMP (render->subtitle_pending);
+ text_end = text_start + GST_BUFFER_DURATION (render->subtitle_pending);
+
+ vid_running_time =
+ gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
+ start);
+ vid_running_time_end =
+ gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
+ stop);
+
+ /* If timestamp and duration are valid */
+ text_running_time =
+ gst_segment_to_running_time (&render->video_segment,
+ GST_FORMAT_TIME, text_start);
+ text_running_time_end =
+ gst_segment_to_running_time (&render->video_segment,
+ GST_FORMAT_TIME, text_end);
+
+ GST_LOG_OBJECT (render, "T: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (text_running_time),
+ GST_TIME_ARGS (text_running_time_end));
+ GST_LOG_OBJECT (render, "V: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (vid_running_time),
+ GST_TIME_ARGS (vid_running_time_end));
+
+ /* Text too old */
+ if (text_running_time_end <= vid_running_time) {
+ GST_DEBUG_OBJECT (render, "text buffer too old, popping");
+ gst_ass_render_pop_text (render);
+ GST_ASS_RENDER_UNLOCK (render);
+ goto wait_for_text_buf;
+ }
+
+ if (render->need_process) {
+ GST_DEBUG_OBJECT (render, "process text buffer");
+ gst_ass_render_process_text (render, render->subtitle_pending,
+ text_running_time, text_running_time_end - text_running_time);
+ render->need_process = FALSE;
+ }
- /* not sure what the last parameter to this call is for (detect_change) */
- ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
- timestamp, NULL);
- g_mutex_unlock (&render->ass_mutex);
+ GST_ASS_RENDER_UNLOCK (render);
- if (ass_image != NULL) {
- GstVideoFrame frame;
+ /* libass needs timestamps in ms */
+ timestamp = vid_running_time / GST_MSECOND;
- buffer = gst_buffer_make_writable (buffer);
+ g_mutex_lock (&render->ass_mutex);
+ ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
+ timestamp, &changed);
+ g_mutex_unlock (&render->ass_mutex);
- gst_video_frame_map (&frame, &render->info, buffer, GST_MAP_WRITE);
- render->blit (render, ass_image, &frame);
- gst_video_frame_unmap (&frame);
+ if ((!ass_image || changed) && render->composition) {
+ GST_DEBUG_OBJECT (render, "release overlay (changed %d)", changed);
+ gst_video_overlay_composition_unref (render->composition);
+ render->composition = NULL;
+ }
+
+ if (ass_image != NULL) {
+ if (!render->composition)
+ render->composition = gst_ass_render_composite_overlay (render,
+ ass_image);
+ } else {
+ GST_DEBUG_OBJECT (render, "nothing to render right now");
+ }
+
+ /* Push the video frame */
+ ret = gst_ass_render_push_frame (render, buffer);
+
+ if (text_running_time_end <= vid_running_time_end) {
+ GST_ASS_RENDER_LOCK (render);
+ gst_ass_render_pop_text (render);
+ GST_ASS_RENDER_UNLOCK (render);
+ }
} else {
- GST_LOG_OBJECT (render, "nothing to render right now");
+ gboolean wait_for_text_buf = TRUE;
+
+ if (render->subtitle_eos)
+ wait_for_text_buf = FALSE;
+
+ if (!render->wait_text)
+ wait_for_text_buf = FALSE;
+
+ /* Text pad linked, but no text buffer available - what now? */
+ if (render->subtitle_segment.format == GST_FORMAT_TIME) {
+ GstClockTime text_start_running_time, text_last_stop_running_time;
+ GstClockTime vid_running_time;
+
+ vid_running_time =
+ gst_segment_to_running_time (&render->video_segment,
+ GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer));
+ text_start_running_time =
+ gst_segment_to_running_time (&render->subtitle_segment,
+ GST_FORMAT_TIME, render->subtitle_segment.start);
+ text_last_stop_running_time =
+ gst_segment_to_running_time (&render->subtitle_segment,
+ GST_FORMAT_TIME, render->subtitle_segment.position);
+
+ if ((GST_CLOCK_TIME_IS_VALID (text_start_running_time) &&
+ vid_running_time < text_start_running_time) ||
+ (GST_CLOCK_TIME_IS_VALID (text_last_stop_running_time) &&
+ vid_running_time < text_last_stop_running_time)) {
+ wait_for_text_buf = FALSE;
+ }
+ }
+
+ if (wait_for_text_buf) {
+ GST_DEBUG_OBJECT (render, "no text buffer, need to wait for one");
+ GST_ASS_RENDER_WAIT (render);
+ GST_DEBUG_OBJECT (render, "resuming");
+ GST_ASS_RENDER_UNLOCK (render);
+ goto wait_for_text_buf;
+ } else {
+ GST_ASS_RENDER_UNLOCK (render);
+ GST_LOG_OBJECT (render, "no need to wait for a text buffer");
+ ret = gst_pad_push (render->srcpad, buffer);
+ }
}
} else {
GST_LOG_OBJECT (render, "rendering disabled, doing buffer passthrough");
+
+ GST_ASS_RENDER_UNLOCK (render);
+ ret = gst_pad_push (render->srcpad, buffer);
+ return ret;
}
- ret = gst_pad_push (render->srcpad, buffer);
+ GST_DEBUG_OBJECT (render, "leaving chain for buffer %p ret=%d", buffer, ret);
+
+ /* Update last_stop */
+ render->video_segment.position = clip_start;
return ret;
+missing_timestamp:
+ {
+ GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+flushing:
+ {
+ GST_ASS_RENDER_UNLOCK (render);
+ GST_DEBUG_OBJECT (render, "flushing, discarding buffer");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_FLUSHING;
+ }
+have_eos:
+ {
+ GST_ASS_RENDER_UNLOCK (render);
+ GST_DEBUG_OBJECT (render, "eos, discarding buffer");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_EOS;
+ }
out_of_segment:
{
GST_DEBUG_OBJECT (render, "buffer out of segment, discarding");
{
GstFlowReturn ret = GST_FLOW_OK;
GstAssRender *render = GST_ASS_RENDER (parent);
- GstClockTime timestamp, duration;
- GstClockTime sub_running_time, vid_running_time;
- GstClockTime sub_running_time_end;
- guint64 cstart, cstop;
- gboolean in_seg;
+ gboolean in_seg = FALSE;
+ guint64 clip_start = 0, clip_stop = 0;
+
+ GST_DEBUG_OBJECT (render, "entering chain for buffer %p", buffer);
+
+ GST_ASS_RENDER_LOCK (render);
if (render->subtitle_flushing) {
- gst_buffer_unref (buffer);
- return GST_FLOW_WRONG_STATE;
+ GST_ASS_RENDER_UNLOCK (render);
+ ret = GST_FLOW_FLUSHING;
+ GST_LOG_OBJECT (render, "text flushing");
+ goto beach;
}
- timestamp = GST_BUFFER_TIMESTAMP (buffer);
- duration = GST_BUFFER_DURATION (buffer);
-
- if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp)
- || !GST_CLOCK_TIME_IS_VALID (duration))) {
- GST_WARNING_OBJECT (render,
- "Text buffer without valid timestamp" " or duration, dropping");
- gst_buffer_unref (buffer);
- return GST_FLOW_OK;
+ if (render->subtitle_eos) {
+ GST_ASS_RENDER_UNLOCK (render);
+ ret = GST_FLOW_EOS;
+ GST_LOG_OBJECT (render, "text EOS");
+ goto beach;
}
- in_seg =
- gst_segment_clip (&render->subtitle_segment, GST_FORMAT_TIME, timestamp,
- timestamp + duration, &cstart, &cstop);
- if (!in_seg) {
- GST_DEBUG_OBJECT (render,
- "Text buffer before segment start (%" GST_TIME_FORMAT " < %"
- GST_TIME_FORMAT ")", GST_TIME_ARGS (timestamp),
- GST_TIME_ARGS (render->subtitle_segment.start));
- gst_buffer_unref (buffer);
- return GST_FLOW_OK;
+ if (G_LIKELY (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))) {
+ GstClockTime stop;
+
+ if (G_LIKELY (GST_BUFFER_DURATION_IS_VALID (buffer)))
+ stop = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
+ else
+ stop = GST_CLOCK_TIME_NONE;
+
+ in_seg = gst_segment_clip (&render->subtitle_segment, GST_FORMAT_TIME,
+ GST_BUFFER_TIMESTAMP (buffer), stop, &clip_start, &clip_stop);
+ } else {
+ in_seg = TRUE;
}
- GST_BUFFER_TIMESTAMP (buffer) = timestamp = cstart;
- GST_BUFFER_DURATION (buffer) = duration = cstop - cstart;
-
- render->subtitle_segment.position = GST_BUFFER_TIMESTAMP (buffer);
-
- sub_running_time =
- gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
- timestamp);
- sub_running_time_end =
- gst_segment_to_running_time (&render->subtitle_segment, GST_FORMAT_TIME,
- timestamp + duration);
- vid_running_time =
- gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
- render->video_segment.position);
-
- if (render->info.fps_n && render->info.fps_d)
- vid_running_time +=
- gst_util_uint64_scale (GST_SECOND, render->info.fps_d,
- render->info.fps_n);
-
- if (sub_running_time > vid_running_time + GST_SECOND / 2) {
- g_assert (render->subtitle_pending == NULL);
- g_mutex_lock (&render->subtitle_mutex);
- if (G_UNLIKELY (render->subtitle_flushing)) {
- GST_DEBUG_OBJECT (render, "Text pad flushing");
- gst_buffer_unref (buffer);
- g_mutex_unlock (&render->subtitle_mutex);
- return GST_FLOW_WRONG_STATE;
+ if (in_seg) {
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
+ GST_BUFFER_TIMESTAMP (buffer) = clip_start;
+ else if (GST_BUFFER_DURATION_IS_VALID (buffer))
+ GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
+
+ if (render->subtitle_pending
+ && (!GST_BUFFER_TIMESTAMP_IS_VALID (render->subtitle_pending)
+ || !GST_BUFFER_DURATION_IS_VALID (render->subtitle_pending))) {
+ gst_buffer_unref (render->subtitle_pending);
+ render->subtitle_pending = NULL;
+ GST_ASS_RENDER_BROADCAST (render);
+ } else {
+ /* Wait for the previous buffer to go away */
+ while (render->subtitle_pending != NULL) {
+ GST_DEBUG ("Pad %s:%s has a buffer queued, waiting",
+ GST_DEBUG_PAD_NAME (pad));
+ GST_ASS_RENDER_WAIT (render);
+ GST_DEBUG ("Pad %s:%s resuming", GST_DEBUG_PAD_NAME (pad));
+ if (render->subtitle_flushing) {
+ GST_ASS_RENDER_UNLOCK (render);
+ ret = GST_FLOW_FLUSHING;
+ goto beach;
+ }
+ }
}
+
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
+ render->subtitle_segment.position = clip_start;
+
GST_DEBUG_OBJECT (render,
- "Too early text buffer, waiting (%" GST_TIME_FORMAT " > %"
- GST_TIME_FORMAT, GST_TIME_ARGS (sub_running_time),
- GST_TIME_ARGS (vid_running_time));
- render->subtitle_pending = buffer;
- g_cond_wait (&render->subtitle_cond, &render->subtitle_mutex);
- g_mutex_unlock (&render->subtitle_mutex);
- } else if (sub_running_time_end < vid_running_time) {
- GST_DEBUG_OBJECT (render,
- "Too late text buffer, dropping (%" GST_TIME_FORMAT " < %"
- GST_TIME_FORMAT, GST_TIME_ARGS (sub_running_time_end),
- GST_TIME_ARGS (vid_running_time));
- gst_buffer_unref (buffer);
- ret = GST_FLOW_OK;
- } else {
- gst_ass_render_process_text (render, buffer, sub_running_time,
- sub_running_time_end - sub_running_time);
- ret = GST_FLOW_OK;
+ "New buffer arrived for timestamp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
+ render->subtitle_pending = gst_buffer_ref (buffer);
+ render->need_process = TRUE;
+
+ /* in case the video chain is waiting for a text buffer, wake it up */
+ GST_ASS_RENDER_BROADCAST (render);
}
- GST_DEBUG_OBJECT (render,
- "processed text packet with timestamp %" GST_TIME_FORMAT
- " and duration %" GST_TIME_FORMAT,
- GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration));
+ GST_ASS_RENDER_UNLOCK (render);
+
+beach:
+ GST_DEBUG_OBJECT (render, "leaving chain for buffer %p", buffer);
+ gst_buffer_unref (buffer);
return ret;
}
static void
gst_ass_render_handle_tags (GstAssRender * render, GstTagList * taglist)
{
-#if 0
static const gchar *mimetypes[] = {
"application/x-font-ttf",
"application/x-font-otf",
".otf",
".ttf"
};
-#endif
guint tag_size;
if (!taglist)
tag_size = gst_tag_list_get_tag_size (taglist, GST_TAG_ATTACHMENT);
if (tag_size > 0 && render->embeddedfonts) {
-#if 0
- const GValue *value;
+ GstSample *sample;
GstBuffer *buf;
- GstCaps *caps;
- GstStructure *structure;
+ const GstStructure *structure;
gboolean valid_mimetype, valid_extension;
guint j;
const gchar *filename;
-#endif
guint index;
+ GstMapInfo map;
GST_DEBUG_OBJECT (render, "TAG event has attachments");
for (index = 0; index < tag_size; index++) {
-#if 0
- value = gst_tag_list_get_value_index (taglist, GST_TAG_ATTACHMENT, index);
- buf = gst_value_get_buffer (value);
- if (!buf || !GST_BUFFER_CAPS (buf))
+ if (!gst_tag_list_get_sample_index (taglist, GST_TAG_ATTACHMENT, index,
+ &sample))
+ continue;
+ buf = gst_sample_get_buffer (sample);
+ structure = gst_sample_get_info (sample);
+ if (!buf || !structure)
continue;
-
- caps = GST_BUFFER_CAPS (buf);
- structure = gst_caps_get_structure (caps, 0);
valid_mimetype = FALSE;
valid_extension = FALSE;
if (valid_mimetype || valid_extension) {
g_mutex_lock (&render->ass_mutex);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
ass_add_font (render->ass_library, (gchar *) filename,
- (gchar *) GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ (gchar *) map.data, map.size);
+ gst_buffer_unmap (buf, &map);
GST_DEBUG_OBJECT (render, "registered new font %s", filename);
g_mutex_unlock (&render->ass_mutex);
}
-#endif
}
}
}
gboolean ret = FALSE;
GstAssRender *render = GST_ASS_RENDER (parent);
- GST_DEBUG_OBJECT (pad, "received video event %s",
- GST_EVENT_TYPE_NAME (event));
+ GST_DEBUG_OBJECT (pad, "received video event %" GST_PTR_FORMAT, event);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
GstCaps *caps;
gst_event_parse_caps (event, &caps);
- ret = gst_ass_render_setcaps_video (pad, caps);
+ ret = gst_ass_render_setcaps_video (pad, render, caps);
gst_event_unref (event);
break;
}
GST_DEBUG_OBJECT (render, "VIDEO SEGMENT after: %" GST_SEGMENT_FORMAT,
&render->video_segment);
- ret = gst_pad_push_event (render->srcpad, event);
+ ret = gst_pad_event_default (pad, parent, event);
} else {
GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
("received non-TIME newsegment event on video input"));
gst_event_parse_tag (event, &taglist);
gst_ass_render_handle_tags (render, taglist);
- ret = gst_pad_push_event (render->srcpad, event);
+ ret = gst_pad_event_default (pad, parent, event);
break;
}
+ case GST_EVENT_EOS:
+ GST_ASS_RENDER_LOCK (render);
+ GST_INFO_OBJECT (render, "video EOS");
+ render->video_eos = TRUE;
+ GST_ASS_RENDER_UNLOCK (render);
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ case GST_EVENT_FLUSH_START:
+ GST_ASS_RENDER_LOCK (render);
+ GST_INFO_OBJECT (render, "video flush start");
+ render->video_flushing = TRUE;
+ GST_ASS_RENDER_BROADCAST (render);
+ GST_ASS_RENDER_UNLOCK (render);
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
case GST_EVENT_FLUSH_STOP:
+ GST_ASS_RENDER_LOCK (render);
+ GST_INFO_OBJECT (render, "video flush stop");
+ render->video_flushing = FALSE;
+ render->video_eos = FALSE;
gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
+ GST_ASS_RENDER_UNLOCK (render);
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
default:
- ret = gst_pad_push_event (render->srcpad, event);
+ ret = gst_pad_event_default (pad, parent, event);
break;
}
GstCaps *filter, *caps;
gst_query_parse_caps (query, &filter);
- caps = gst_ass_render_getcaps (pad, filter);
+ caps =
+ gst_ass_render_get_videosink_caps (pad, (GstAssRender *) parent,
+ filter);
gst_query_set_caps_result (query, caps);
gst_caps_unref (caps);
res = TRUE;
gboolean ret = FALSE;
GstAssRender *render = GST_ASS_RENDER (parent);
- GST_DEBUG_OBJECT (pad, "received text event %s", GST_EVENT_TYPE_NAME (event));
+ GST_DEBUG_OBJECT (pad, "received text event %" GST_PTR_FORMAT, event);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
GstCaps *caps;
gst_event_parse_caps (event, &caps);
- ret = gst_ass_render_setcaps_text (pad, caps);
+ ret = gst_ass_render_setcaps_text (pad, render, caps);
gst_event_unref (event);
break;
}
{
GstSegment segment;
- GST_DEBUG_OBJECT (render, "received new segment");
+ GST_ASS_RENDER_LOCK (render);
+ render->subtitle_eos = FALSE;
+ GST_ASS_RENDER_UNLOCK (render);
gst_event_copy_segment (event, &segment);
+ GST_ASS_RENDER_LOCK (render);
if (segment.format == GST_FORMAT_TIME) {
- GST_DEBUG_OBJECT (render, "SUBTITLE SEGMENT now: %" GST_SEGMENT_FORMAT,
+ GST_DEBUG_OBJECT (render, "TEXT SEGMENT now: %" GST_SEGMENT_FORMAT,
&render->subtitle_segment);
render->subtitle_segment = segment;
GST_DEBUG_OBJECT (render,
- "SUBTITLE SEGMENT after: %" GST_SEGMENT_FORMAT,
+ "TEXT SEGMENT after: %" GST_SEGMENT_FORMAT,
&render->subtitle_segment);
- ret = TRUE;
- gst_event_unref (event);
} else {
GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
("received non-TIME newsegment event on subtitle input"));
- ret = FALSE;
- gst_event_unref (event);
}
+
+ gst_event_unref (event);
+ ret = TRUE;
+
+ /* wake up the video chain, it might be waiting for a text buffer or
+ * a text segment update */
+ GST_ASS_RENDER_BROADCAST (render);
+ GST_ASS_RENDER_UNLOCK (render);
+ break;
+ }
+ case GST_EVENT_GAP:{
+ GstClockTime start, duration;
+
+ gst_event_parse_gap (event, &start, &duration);
+ if (GST_CLOCK_TIME_IS_VALID (duration))
+ start += duration;
+ /* we do not expect another buffer until after gap,
+ * so that is our position now */
+ GST_ASS_RENDER_LOCK (render);
+ render->subtitle_segment.position = start;
+
+ /* wake up the video chain, it might be waiting for a text buffer or
+ * a text segment update */
+ GST_ASS_RENDER_BROADCAST (render);
+ GST_ASS_RENDER_UNLOCK (render);
+
+ gst_event_unref (event);
+ ret = TRUE;
break;
}
case GST_EVENT_FLUSH_STOP:
- gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
+ GST_ASS_RENDER_LOCK (render);
+ GST_INFO_OBJECT (render, "text flush stop");
render->subtitle_flushing = FALSE;
+ render->subtitle_eos = FALSE;
+ gst_ass_render_pop_text (render);
+ gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
+ GST_ASS_RENDER_UNLOCK (render);
gst_event_unref (event);
ret = TRUE;
break;
case GST_EVENT_FLUSH_START:
- GST_DEBUG_OBJECT (render, "begin flushing");
+ GST_DEBUG_OBJECT (render, "text flush start");
g_mutex_lock (&render->ass_mutex);
if (render->ass_track) {
/* delete any events on the ass_track */
GST_DEBUG_OBJECT (render, "done flushing");
}
g_mutex_unlock (&render->ass_mutex);
- g_mutex_lock (&render->subtitle_mutex);
- if (render->subtitle_pending)
- gst_buffer_unref (render->subtitle_pending);
- render->subtitle_pending = NULL;
+ GST_ASS_RENDER_LOCK (render);
render->subtitle_flushing = TRUE;
- g_cond_signal (&render->subtitle_cond);
- g_mutex_unlock (&render->subtitle_mutex);
+ GST_ASS_RENDER_BROADCAST (render);
+ GST_ASS_RENDER_UNLOCK (render);
gst_event_unref (event);
ret = TRUE;
break;
case GST_EVENT_EOS:
- GST_OBJECT_LOCK (render);
+ GST_ASS_RENDER_LOCK (render);
+ render->subtitle_eos = TRUE;
GST_INFO_OBJECT (render, "text EOS");
- GST_OBJECT_UNLOCK (render);
+ /* wake up the video chain, it might be waiting for a text buffer or
+ * a text segment update */
+ GST_ASS_RENDER_BROADCAST (render);
+ GST_ASS_RENDER_UNLOCK (render);
gst_event_unref (event);
ret = TRUE;
break;
gst_event_parse_tag (event, &taglist);
gst_ass_render_handle_tags (render, taglist);
- ret = gst_pad_push_event (render->srcpad, event);
+ ret = gst_pad_event_default (pad, parent, event);
break;
}
default:
- ret = gst_pad_push_event (render->srcpad, event);
+ ret = gst_pad_event_default (pad, parent, event);
break;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
- "assrender",
+ assrender,
"ASS/SSA subtitle renderer",
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)