#define I420_SIZE(w,h) (I420_V_OFFSET(w,h)+(I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
+#define GST_TEXT_OVERLAY_GET_COND(ov) (((GstTextOverlay *)ov)->cond)
+#define GST_TEXT_OVERLAY_WAIT(ov) (g_cond_wait (GST_TEXT_OVERLAY_GET_COND (ov), GST_OBJECT_GET_LOCK (ov)))
+#define GST_TEXT_OVERLAY_SIGNAL(ov) (g_cond_signal (GST_TEXT_OVERLAY_GET_COND (ov)))
+#define GST_TEXT_OVERLAY_BROADCAST(ov)(g_cond_broadcast (GST_TEXT_OVERLAY_GET_COND (ov)))
static GstStateChangeReturn gst_text_overlay_change_state (GstElement * element,
GstStateChange transition);
+
static GstCaps *gst_text_overlay_getcaps (GstPad * pad);
static gboolean gst_text_overlay_setcaps (GstPad * pad, GstCaps * caps);
static gboolean gst_text_overlay_src_event (GstPad * pad, GstEvent * event);
-static GstFlowReturn gst_text_overlay_collected (GstCollectPads * pads,
- gpointer data);
+
+static gboolean gst_text_overlay_video_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_text_overlay_video_chain (GstPad * pad,
+ GstBuffer * buffer);
+
+static gboolean gst_text_overlay_text_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_text_overlay_text_chain (GstPad * pad,
+ GstBuffer * buffer);
+static GstPadLinkReturn gst_text_overlay_text_pad_link (GstPad * pad,
+ GstPad * peer);
+static void gst_text_overlay_text_pad_unlink (GstPad * pad);
+static void gst_text_overlay_pop_text (GstTextOverlay * overlay);
+
static void gst_text_overlay_finalize (GObject * object);
static void gst_text_overlay_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
{
GstTextOverlay *overlay = GST_TEXT_OVERLAY (object);
- gst_object_unref (overlay->collect);
-
g_free (overlay->default_text);
g_free (overlay->bitmap.buffer);
if (overlay->layout)
g_object_unref (overlay->layout);
+ if (overlay->segment) {
+ gst_segment_free (overlay->segment);
+ overlay->segment = NULL;
+ }
+
+ if (overlay->cond) {
+ g_cond_free (overlay->cond);
+ overlay->cond = NULL;
+ }
+
G_OBJECT_CLASS (parent_class)->finalize (object);
}
GST_DEBUG_FUNCPTR (gst_text_overlay_getcaps));
gst_pad_set_setcaps_function (overlay->video_sinkpad,
GST_DEBUG_FUNCPTR (gst_text_overlay_setcaps));
+ gst_pad_set_event_function (overlay->video_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_text_overlay_video_event));
+ gst_pad_set_chain_function (overlay->video_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_text_overlay_video_chain));
gst_element_add_pad (GST_ELEMENT (overlay), overlay->video_sinkpad);
if (!GST_IS_TIME_OVERLAY_CLASS (klass) && !GST_IS_CLOCK_OVERLAY_CLASS (klass)) {
overlay->text_sinkpad =
gst_pad_new_from_template (gst_static_pad_template_get
(&text_sink_template_factory), "text_sink");
+ gst_pad_set_event_function (overlay->text_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_text_overlay_text_event));
+ gst_pad_set_chain_function (overlay->text_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_text_overlay_text_chain));
+ gst_pad_set_link_function (overlay->text_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_text_overlay_text_pad_link));
+ gst_pad_set_unlink_function (overlay->text_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_text_overlay_text_pad_unlink));
gst_element_add_pad (GST_ELEMENT (overlay), overlay->text_sinkpad);
}
overlay->fps_n = 0;
overlay->fps_d = 1;
- overlay->collect = gst_collect_pads_new ();
-
- gst_collect_pads_set_function (overlay->collect,
- GST_DEBUG_FUNCPTR (gst_text_overlay_collected), overlay);
-
- overlay->video_collect_data = gst_collect_pads_add_pad (overlay->collect,
- overlay->video_sinkpad, sizeof (GstCollectData));
- if (overlay->text_sinkpad) {
- overlay->text_collect_data = gst_collect_pads_add_pad (overlay->collect,
- overlay->text_sinkpad, sizeof (GstCollectData));
+ overlay->text_buffer = NULL;
+ overlay->text_linked = FALSE;
+ overlay->cond = g_cond_new ();
+ overlay->segment = gst_segment_new ();
+ if (overlay->segment) {
+ gst_segment_init (overlay->segment, GST_FORMAT_TIME);
+ } else {
+ GST_WARNING_OBJECT (overlay, "segment creation failed");
+ g_assert_not_reached ();
}
}
GST_OBJECT_UNLOCK (overlay);
}
+ gst_object_unref (overlay);
+
return ret;
}
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:
/* We don't handle seek if we have not text pad */
- if (!overlay->text_sinkpad) {
+ if (!overlay->text_linked) {
ret = gst_pad_push_event (overlay->video_sinkpad, event);
goto beach;
}
/* Flush downstream */
gst_pad_push_event (overlay->srcpad, gst_event_new_flush_start ());
- /* Stopping collect pads */
- gst_collect_pads_stop (overlay->collect);
+ /* Mark our sink pads as flushing to acquire stream lock */
+ GST_OBJECT_LOCK (overlay->video_sinkpad);
+ GST_PAD_SET_FLUSHING (overlay->video_sinkpad);
+ GST_OBJECT_UNLOCK (overlay->video_sinkpad);
+ GST_OBJECT_LOCK (overlay->text_sinkpad);
+ GST_PAD_SET_FLUSHING (overlay->text_sinkpad);
+ GST_OBJECT_UNLOCK (overlay->text_sinkpad);
+
+ /* Unblock the text chain if it's waiting */
+ gst_text_overlay_pop_text (overlay);
- /* Acquire stream lock */
+ /* Take the stream locks */
GST_PAD_STREAM_LOCK (overlay->video_sinkpad);
GST_PAD_STREAM_LOCK (overlay->text_sinkpad);
gst_event_unref (event);
}
- /* Start collect pads again */
- gst_collect_pads_start (overlay->collect);
-
- /* Release stream lock */
+ /* Release the locks */
GST_PAD_STREAM_UNLOCK (overlay->video_sinkpad);
GST_PAD_STREAM_UNLOCK (overlay->text_sinkpad);
break;
default:
gst_event_ref (event);
ret = gst_pad_push_event (overlay->video_sinkpad, event);
- if (overlay->text_sinkpad) {
+ if (overlay->text_linked) {
ret = gst_pad_push_event (overlay->text_sinkpad, event);
}
}
PangoRectangle ink_rect, logical_rect;
gchar *string;
- if (textlen < 0)
+ /* -1 is the whole string */
+ if (text != NULL && textlen < 0) {
textlen = strlen (text);
+ }
- string = g_strndup (text, textlen);
+ if (text != NULL) {
+ string = g_strndup (text, textlen);
+ } else { /* empty string */
+ string = g_strdup (" ");
+ }
g_strdelimit (string, "\n\r\t", ' ');
textlen = strlen (string);
return gst_pad_push (overlay->srcpad, video_frame);
}
-static void
-gst_text_overlay_pop_video (GstTextOverlay * overlay)
+static GstPadLinkReturn
+gst_text_overlay_text_pad_link (GstPad * pad, GstPad * peer)
{
- GstBuffer *buf;
+ GstTextOverlay *overlay;
+
+ overlay = GST_TEXT_OVERLAY (gst_pad_get_parent (pad));
- buf = gst_collect_pads_pop (overlay->collect, overlay->video_collect_data);
- g_return_if_fail (buf != NULL);
- gst_buffer_unref (buf);
+ GST_DEBUG_OBJECT (overlay, "Text pad linked");
+
+ overlay->text_linked = TRUE;
+
+ gst_object_unref (overlay);
+
+ return GST_PAD_LINK_OK;
}
static void
-gst_text_overlay_pop_text (GstTextOverlay * overlay)
+gst_text_overlay_text_pad_unlink (GstPad * pad)
{
- GstBuffer *buf;
+ GstTextOverlay *overlay;
- if (overlay->text_collect_data) {
- buf = gst_collect_pads_pop (overlay->collect, overlay->text_collect_data);
- g_return_if_fail (buf != NULL);
- gst_buffer_unref (buf);
- }
+ /* don't use gst_pad_get_parent() here, will deadlock */
+ overlay = GST_TEXT_OVERLAY (GST_PAD_PARENT (pad));
- overlay->need_render = TRUE;
+ GST_DEBUG_OBJECT (overlay, "Text pad unlinked");
+
+ overlay->text_linked = FALSE;
}
-/* This function is called when there is data on all pads */
-static GstFlowReturn
-gst_text_overlay_collected (GstCollectPads * pads, gpointer data)
+static gboolean
+gst_text_overlay_text_event (GstPad * pad, GstEvent * event)
{
- GstTextOverlayClass *klass;
- GstTextOverlay *overlay;
- GstFlowReturn ret = GST_FLOW_OK;
- GstClockTime now, txt_end, frame_end;
- GstBuffer *video_frame = NULL;
- GstBuffer *text_buf = NULL;
- gchar *text;
+ gboolean ret = FALSE;
+ GstTextOverlay *overlay = NULL;
+
+ overlay = GST_TEXT_OVERLAY (gst_pad_get_parent (pad));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_FLUSH_START:
+ case GST_EVENT_FLUSH_STOP:
+ /* We just ignore those events from the text pad */
+ gst_event_unref (event);
+ ret = TRUE;
+ break;
+
+ default:
+ ret = gst_pad_event_default (pad, event);
+ goto beach;
+ }
- overlay = GST_TEXT_OVERLAY (data);
- klass = GST_TEXT_OVERLAY_GET_CLASS (data);
+beach:
+ gst_object_unref (overlay);
- GST_DEBUG ("Collecting");
+ return ret;
+}
- if (overlay->video_collect_data->abidata.ABI.new_segment) {
+static gboolean
+gst_text_overlay_video_event (GstPad * pad, GstEvent * event)
+{
+ gboolean ret = FALSE;
+ GstTextOverlay *overlay = NULL;
- GST_DEBUG ("generating newsegment, start %" GST_TIME_FORMAT
- ", stop %" GST_TIME_FORMAT,
- GST_TIME_ARGS (overlay->video_collect_data->segment.start),
- GST_TIME_ARGS (overlay->video_collect_data->segment.stop));
+ overlay = GST_TEXT_OVERLAY (gst_pad_get_parent (pad));
- gst_pad_push_event (overlay->srcpad, gst_event_new_new_segment (FALSE,
- overlay->video_collect_data->segment.rate, GST_FORMAT_TIME,
- overlay->video_collect_data->segment.start,
- overlay->video_collect_data->segment.stop,
- overlay->video_collect_data->segment.last_stop));
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_NEWSEGMENT:
+ {
+ GstFormat format;
+ gdouble rate;
+ gint64 start, stop, time;
+ gboolean update;
- overlay->video_collect_data->abidata.ABI.new_segment = FALSE;
- }
+ GST_DEBUG_OBJECT (overlay, "received new segment");
- video_frame = gst_collect_pads_peek (overlay->collect,
- overlay->video_collect_data);
+ gst_event_parse_new_segment (event, &update, &rate, &format, &start,
+ &stop, &time);
- /* send EOS if video stream EOSed regardless of text stream */
- if (video_frame == NULL) {
- GST_DEBUG ("Video stream at EOS");
- if (overlay->text_collect_data) {
- text_buf = gst_collect_pads_pop (overlay->collect,
- overlay->text_collect_data);
+ /* now copy over the values */
+ gst_segment_set_newsegment (overlay->segment, update, rate, format,
+ start, stop, time);
+
+ ret = gst_pad_event_default (pad, event);
+ break;
}
- gst_pad_push_event (overlay->srcpad, gst_event_new_eos ());
- ret = GST_FLOW_UNEXPECTED;
- goto done;
+ case GST_EVENT_FLUSH_START:
+ case GST_EVENT_FLUSH_STOP:
+ case GST_EVENT_EOS:
+ default:
+ ret = gst_pad_event_default (pad, event);
}
- if (GST_BUFFER_TIMESTAMP (video_frame) == GST_CLOCK_TIME_NONE) {
- g_warning ("%s: video frame has invalid timestamp", G_STRLOC);
- }
+ gst_object_unref (overlay);
- now = GST_BUFFER_TIMESTAMP (video_frame);
+ return ret;
+}
- if (GST_BUFFER_DURATION (video_frame) != GST_CLOCK_TIME_NONE) {
- frame_end = now + GST_BUFFER_DURATION (video_frame);
- } else if (overlay->fps_n > 0) {
- frame_end = now + gst_util_uint64_scale_int (GST_SECOND,
- overlay->fps_d, overlay->fps_n);
- } else {
- /* magic value, does not really matter since texts
- * tend to span quite a few frames in practice anyway */
- frame_end = now + GST_SECOND / 25;
+/* Called with lock held */
+static void
+gst_text_overlay_pop_text (GstTextOverlay * overlay)
+{
+ g_return_if_fail (GST_IS_TEXT_OVERLAY (overlay));
+
+ if (overlay->text_buffer) {
+ GST_DEBUG_OBJECT (overlay, "releasing text buffer %p",
+ overlay->text_buffer);
+ gst_buffer_unref (overlay->text_buffer);
+ overlay->text_buffer = NULL;
}
- GST_DEBUG ("Got video frame: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
- GST_TIME_ARGS (now), GST_TIME_ARGS (frame_end));
+ /* Let the text task know we used that buffer */
+ GST_TEXT_OVERLAY_BROADCAST (overlay);
+}
- /* text pad not linked? */
- if (overlay->text_collect_data == NULL) {
- gchar *txt;
+/* We receive text buffers here. If they are out of segment we just ignore them.
+ If the buffer is in our segment we keep it internally except if another one
+ is already waiting here, in that case we wait that it gets kicked out */
+static GstFlowReturn
+gst_text_overlay_text_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstTextOverlay *overlay = NULL;
+ gboolean in_seg = FALSE;
+ gint64 clip_start = 0, clip_stop = 0;
- if (klass->get_text)
- txt = klass->get_text (overlay, video_frame);
- else
- txt = g_strdup (overlay->default_text);
+ overlay = GST_TEXT_OVERLAY (gst_pad_get_parent (pad));
- GST_DEBUG ("Text pad not linked, rendering default text: '%s'",
- GST_STR_NULL (txt));
- if (txt != NULL && *txt != '\0') {
- gst_text_overlay_render_text (overlay, txt, -1);
- ret = gst_text_overlay_push_frame (overlay, video_frame);
- } else {
- ret = gst_pad_push (overlay->srcpad, video_frame);
+ GST_OBJECT_LOCK (overlay);
+
+ in_seg = gst_segment_clip (overlay->segment, GST_FORMAT_TIME,
+ GST_BUFFER_TIMESTAMP (buffer),
+ GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer),
+ &clip_start, &clip_stop);
+
+ if (in_seg) {
+ GST_BUFFER_TIMESTAMP (buffer) = clip_start;
+ GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
+
+ /* Wait for the previous buffer to go away */
+ while (overlay->text_buffer != NULL) {
+ GST_DEBUG ("Pad %s:%s has a buffer queued, waiting",
+ GST_DEBUG_PAD_NAME (pad));
+ GST_TEXT_OVERLAY_WAIT (overlay);
+ GST_DEBUG ("Pad %s:%s resuming", GST_DEBUG_PAD_NAME (pad));
}
- gst_text_overlay_pop_video (overlay);
- video_frame = NULL;
- goto done;
+
+ overlay->text_buffer = buffer;
+ /* That's a new text buffer we need to render */
+ overlay->need_render = TRUE;
}
- text_buf = gst_collect_pads_peek (overlay->collect,
- overlay->text_collect_data);
+ GST_OBJECT_UNLOCK (overlay);
- /* just push the video frame if the text stream has EOSed */
- if (text_buf == NULL) {
- GST_DEBUG ("Text pad EOSed, just pushing video frame as is");
- ret = gst_pad_push (overlay->srcpad, video_frame);
- gst_text_overlay_pop_video (overlay);
- video_frame = NULL;
- goto done;
- }
+ gst_object_unref (overlay);
- /* if the text buffer isn't stamped right, pop it off the
- * queue and display it for the current video frame only */
- if (GST_BUFFER_TIMESTAMP (text_buf) == GST_CLOCK_TIME_NONE ||
- GST_BUFFER_DURATION (text_buf) == GST_CLOCK_TIME_NONE) {
- GST_WARNING ("Got text buffer with invalid time stamp or duration");
- gst_text_overlay_pop_text (overlay);
- GST_BUFFER_TIMESTAMP (text_buf) = now;
- GST_BUFFER_DURATION (text_buf) = frame_end - now;
- }
+ return ret;
+}
- txt_end = GST_BUFFER_TIMESTAMP (text_buf) + GST_BUFFER_DURATION (text_buf);
+static GstFlowReturn
+gst_text_overlay_video_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstTextOverlay *overlay = NULL;
+ gboolean in_seg = FALSE;
+ gint64 clip_start = 0, clip_stop = 0;
+ GstTextOverlayClass *klass = NULL;
- GST_DEBUG ("Got text buffer: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (text_buf)), GST_TIME_ARGS (txt_end));
+ overlay = GST_TEXT_OVERLAY (gst_pad_get_parent (pad));
+ klass = GST_TEXT_OVERLAY_GET_CLASS (overlay);
- /* if the text buffer is too old, pop it off the
- * queue and return so we get a new one next time */
- if (txt_end < now) {
- GST_DEBUG ("Text buffer too old, popping off the queue");
- gst_text_overlay_pop_text (overlay);
- ret = GST_FLOW_OK;
- goto done;
- }
+ GST_OBJECT_LOCK (overlay);
- /* if the video frame ends before the text even starts,
- * just push it out as is and pop it off the queue */
- if (frame_end < GST_BUFFER_TIMESTAMP (text_buf)) {
- GST_DEBUG ("Video buffer before text, pushing out and popping off queue");
- ret = gst_pad_push (overlay->srcpad, video_frame);
- gst_text_overlay_pop_video (overlay);
- video_frame = NULL;
- goto done;
- }
+ in_seg = gst_segment_clip (overlay->segment, GST_FORMAT_TIME,
+ GST_BUFFER_TIMESTAMP (buffer),
+ GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer),
+ &clip_start, &clip_stop);
- /* text duration overlaps video frame duration */
- text = g_strndup ((gchar *) GST_BUFFER_DATA (text_buf),
- GST_BUFFER_SIZE (text_buf));
+ if (in_seg) {
+ gchar *text = NULL;
- if (text != NULL && *text != '\0') {
- gint text_len = strlen (text);
+ GST_BUFFER_TIMESTAMP (buffer) = clip_start;
+ GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
- while (text_len > 0 && (text[text_len - 1] == '\n' ||
- text[text_len - 1] == '\r')) {
- --text_len;
- }
- GST_DEBUG ("Rendering text '%*s'", text_len, text);;
- gst_text_overlay_render_text (overlay, text, text_len);
- } else {
- GST_DEBUG ("No text to render (empty buffer)");
- gst_text_overlay_render_text (overlay, " ", 1);
- }
+ /* Text pad not linked, rendering internal text */
+ if (!overlay->text_linked) {
+ if (klass->get_text) {
+ text = klass->get_text (overlay, buffer);
+ } else {
+ text = g_strdup (overlay->default_text);
+ }
+
+ GST_DEBUG_OBJECT (overlay, "Text pad not linked, rendering default "
+ "text: '%s'", GST_STR_NULL (text));
- g_free (text);
+ GST_OBJECT_UNLOCK (overlay);
- gst_text_overlay_pop_video (overlay);
- ret = gst_text_overlay_push_frame (overlay, video_frame);
- video_frame = NULL;
- goto done;
+ if (text != NULL && *text != '\0') {
+ /* Render and push */
+ gst_text_overlay_render_text (overlay, text, -1);
+ ret = gst_text_overlay_push_frame (overlay, buffer);
+ } else {
+ /* Invalid or empty string */
+ ret = gst_pad_push (overlay->srcpad, buffer);
+ }
+ } else {
+ if (overlay->text_buffer) {
+ gboolean pop_text = FALSE;
+ gint64 text_end = 0;
+
+ /* if the text buffer isn't stamped right, pop it off the
+ * queue and display it for the current video frame only */
+ if (GST_BUFFER_TIMESTAMP (overlay->text_buffer) == GST_CLOCK_TIME_NONE
+ || GST_BUFFER_DURATION (overlay->text_buffer) ==
+ GST_CLOCK_TIME_NONE) {
+ GST_WARNING_OBJECT (overlay,
+ "Got text buffer with invalid time " "stamp or duration");
+ gst_buffer_stamp (overlay->text_buffer, buffer);
+ pop_text = TRUE;
+ }
-done:
- {
- if (text_buf)
- gst_buffer_unref (text_buf);
+ text_end = GST_BUFFER_TIMESTAMP (overlay->text_buffer) +
+ GST_BUFFER_DURATION (overlay->text_buffer);
+
+ /* Text too old or in the future */
+ if ((text_end < clip_start) ||
+ (clip_stop < GST_BUFFER_TIMESTAMP (overlay->text_buffer))) {
+ if (text_end < clip_start) {
+ /* Get rid of it, if it's too old only */
+ pop_text = FALSE;
+ gst_text_overlay_pop_text (overlay);
+ }
+ GST_OBJECT_UNLOCK (overlay);
+ /* Push the video frame */
+ ret = gst_pad_push (overlay->srcpad, buffer);
+ } else {
+ /* Get the string */
+ text = g_strndup ((gchar *) GST_BUFFER_DATA (overlay->text_buffer),
+ GST_BUFFER_SIZE (overlay->text_buffer));
+
+ if (text != NULL && *text != '\0') {
+ gint text_len = strlen (text);
+
+ while (text_len > 0 && (text[text_len - 1] == '\n' ||
+ text[text_len - 1] == '\r')) {
+ --text_len;
+ }
+ GST_DEBUG_OBJECT (overlay, "Rendering text '%*s'", text_len, text);
+ gst_text_overlay_render_text (overlay, text, text_len);
+ } else {
+ GST_DEBUG_OBJECT (overlay, "No text to render (empty buffer)");
+ gst_text_overlay_render_text (overlay, " ", 1);
+ }
+
+ GST_OBJECT_UNLOCK (overlay);
+ ret = gst_text_overlay_push_frame (overlay, buffer);
+ }
+ } else {
+ /* No text to overlay, push the frame as is */
+ GST_OBJECT_UNLOCK (overlay);
+ ret = gst_pad_push (overlay->srcpad, buffer);
+ }
+ }
- if (video_frame)
- gst_buffer_unref (video_frame);
+ g_free (text);
- return ret;
+ /* Update last_stop */
+ gst_segment_set_last_stop (overlay->segment, GST_FORMAT_TIME,
+ GST_BUFFER_TIMESTAMP (buffer));
+ } else { /* Out of segment */
+ GST_OBJECT_UNLOCK (overlay);
+ GST_DEBUG_OBJECT (overlay, "buffer out of segment discarding");
+ gst_buffer_unref (buffer);
}
+
+ gst_object_unref (overlay);
+
+ return ret;
}
static GstStateChangeReturn
gst_text_overlay_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstTextOverlay *overlay = GST_TEXT_OVERLAY (element);
+
+ /*GstTextOverlay *overlay = GST_TEXT_OVERLAY (element); */
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
- gst_collect_pads_start (overlay->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
- /* need to unblock the collectpads before calling the
- * parent change_state so that streaming can finish */
- gst_collect_pads_stop (overlay->collect);
break;
default:
break;