* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
-
- /**
- * SECTION:element-appsink
- *
- * Appsink is a sink plugin that supports many different methods for making
- * the application get a handle on the GStreamer data in a pipeline. Unlike
- * most GStreamer elements, Appsink provides external API functions.
- *
- * For the documentation of the API, please see the
- * <link linkend="gst-plugins-base-libs-appsink">libgstapp</link> section in
- * the GStreamer Plugins Base Libraries documentation.
- *
- * Since: 0.10.22
- */
-
-
/**
* SECTION:gstappsink
* @short_description: Easy way for applications to extract buffers from a
GstBuffer * buffer);
static GstFlowReturn gst_app_sink_render_list (GstBaseSink * psink,
GstBufferList * list);
-static GstCaps *gst_app_sink_getcaps (GstBaseSink * psink);
+static GstCaps *gst_app_sink_getcaps (GstBaseSink * psink, GstCaps * filter);
static GstMiniObject *gst_app_sink_pull_object (GstAppSink * appsink);
static guint gst_app_sink_signals[LAST_SIGNAL] = { 0 };
-static void
-_do_init (GType filesrc_type)
-{
- static const GInterfaceInfo urihandler_info = {
- gst_app_sink_uri_handler_init,
- NULL,
- NULL
- };
- g_type_add_interface_static (filesrc_type, GST_TYPE_URI_HANDLER,
- &urihandler_info);
-}
-
-GST_BOILERPLATE_FULL (GstAppSink, gst_app_sink, GstBaseSink, GST_TYPE_BASE_SINK,
- _do_init);
+#define gst_app_sink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstAppSink, gst_app_sink, GST_TYPE_BASE_SINK,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER,
+ gst_app_sink_uri_handler_init));
/* Can't use glib-genmarshal for this, as it doesn't know how to handle
* GstMiniObject-based types, which are a new fundamental type */
}
static void
-gst_app_sink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- GST_DEBUG_CATEGORY_INIT (app_sink_debug, "appsink", 0, "appsink element");
-
- gst_element_class_set_details_simple (element_class, "AppSink",
- "Generic/Sink", "Allow the application to get access to raw buffer",
- "David Schleef <ds@schleef.org>, Wim Taymans <wim.taymans@gmail.com>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_app_sink_template));
-}
-
-static void
gst_app_sink_class_init (GstAppSinkClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *element_class = (GstElementClass *) klass;
GstBaseSinkClass *basesink_class = (GstBaseSinkClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (app_sink_debug, "appsink", 0, "appsink element");
+
gobject_class->dispose = gst_app_sink_dispose;
gobject_class->finalize = gst_app_sink_finalize;
* GstAppSink::new-preroll:
* @appsink: the appsink element that emited the signal
*
- * Signal that a new preroll buffer is available.
+ * Signal that a new preroll buffer is available.
*
* This signal is emited from the steaming thread and only when the
- * "emit-signals" property is %TRUE.
+ * "emit-signals" property is %TRUE.
*
* The new preroll buffer can be retrieved with the "pull-preroll" action
* signal or gst_app_sink_pull_preroll() either from this signal callback
* Signal that a new buffer is available.
*
* This signal is emited from the steaming thread and only when the
- * "emit-signals" property is %TRUE.
+ * "emit-signals" property is %TRUE.
*
* The new buffer can be retrieved with the "pull-buffer" action
* signal or gst_app_sink_pull_buffer() either from this signal callback
* Signal that a new bufferlist is available.
*
* This signal is emited from the steaming thread and only when the
- * "emit-signals" property is %TRUE.
+ * "emit-signals" property is %TRUE.
*
* The new buffer can be retrieved with the "pull-buffer-list" action
* signal or gst_app_sink_pull_buffer_list() either from this signal callback
* when calling gst_app_sink_pull_buffer() or the "pull-buffer" action signal.
*
* If an EOS event was received before any buffers, this function returns
- * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
+ * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
*
* This function blocks until a preroll buffer or EOS is received or the appsink
- * element is set to the READY/NULL state.
+ * element is set to the READY/NULL state.
*
* Returns: a #GstBuffer or NULL when the appsink is stopped or EOS.
*/
* @appsink: the appsink element to emit this signal on
*
* This function blocks until a buffer or EOS becomes available or the appsink
- * element is set to the READY/NULL state.
+ * element is set to the READY/NULL state.
*
* This function will only return buffers when the appsink is in the PLAYING
* state. All rendered buffers will be put in a queue so that the application
- * can pull buffers at its own rate.
+ * can pull buffers at its own rate.
*
* Note that when the application does not pull buffers fast enough, the
* queued buffers could consume a lot of memory, especially when dealing with
* the "drop" and "max-buffers" properties.
*
* If an EOS event was received before any buffers, this function returns
- * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
+ * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
*
* Returns: a #GstBuffer or NULL when the appsink is stopped or EOS.
*/
* @appsink: the appsink element to emit this signal on
*
* This function blocks until a buffer list or EOS becomes available or the appsink
- * element is set to the READY/NULL state.
+ * element is set to the READY/NULL state.
*
* This function will only return bufferlists when the appsink is in the PLAYING
* state. All rendered bufferlists will be put in a queue so that the application
- * can pull bufferlists at its own rate.
+ * can pull bufferlists at its own rate.
*
* Note that when the application does not pull bufferlists fast enough, the
* queued bufferlists could consume a lot of memory, especially when dealing with
* the "drop" and "max-buffers" properties.
*
* If an EOS event was received before any buffers, this function returns
- * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
+ * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
*
* Returns: a #GstBufferList or NULL when the appsink is stopped or EOS.
*/
pull_buffer_list), NULL, NULL, gst_app_marshal_BUFFER__VOID,
GST_TYPE_BUFFER_LIST, 0, G_TYPE_NONE);
+ gst_element_class_set_details_simple (element_class, "AppSink",
+ "Generic/Sink", "Allow the application to get access to raw buffer",
+ "David Schleef <ds@schleef.org>, Wim Taymans <wim.taymans@gmail.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_app_sink_template));
+
basesink_class->unlock = gst_app_sink_unlock_start;
basesink_class->unlock_stop = gst_app_sink_unlock_stop;
basesink_class->start = gst_app_sink_start;
}
static void
-gst_app_sink_init (GstAppSink * appsink, GstAppSinkClass * klass)
+gst_app_sink_init (GstAppSink * appsink)
{
GstAppSinkPrivate *priv;
static GstFlowReturn
gst_app_sink_render_list (GstBaseSink * sink, GstBufferList * list)
{
- GstBufferListIterator *it;
GstFlowReturn flow;
GstAppSink *appsink;
- GstBuffer *group;
+ GstBuffer *buffer;
+ guint i, len;
appsink = GST_APP_SINK_CAST (sink);
* then and push them one-by-one */
GST_INFO_OBJECT (sink, "chaining each group in list as a merged buffer");
- it = gst_buffer_list_iterate (list);
+ len = gst_buffer_list_len (list);
- if (gst_buffer_list_iterator_next_group (it)) {
- do {
- group = gst_buffer_list_iterator_merge_group (it);
- if (group == NULL) {
- group = gst_buffer_new ();
- GST_DEBUG_OBJECT (sink, "chaining empty group");
- } else {
- GST_DEBUG_OBJECT (sink, "chaining group");
- }
- flow = gst_app_sink_render (sink, group);
- gst_buffer_unref (group);
- } while (flow == GST_FLOW_OK && gst_buffer_list_iterator_next_group (it));
- } else {
- GST_DEBUG_OBJECT (sink, "chaining empty group");
- group = gst_buffer_new ();
- flow = gst_app_sink_render (sink, group);
- gst_buffer_unref (group);
+ flow = GST_FLOW_OK;
+ for (i = 0; i < len; i++) {
+ buffer = gst_buffer_list_get (list, i);
+ flow = gst_app_sink_render (sink, buffer);
+ if (flow != GST_FLOW_OK)
+ break;
}
- gst_buffer_list_iterator_free (it);
-
return flow;
}
static GstCaps *
-gst_app_sink_getcaps (GstBaseSink * psink)
+gst_app_sink_getcaps (GstBaseSink * psink, GstCaps * filter)
{
GstCaps *caps;
GstAppSink *appsink = GST_APP_SINK_CAST (psink);
GstAppSinkPrivate *priv = appsink->priv;
GST_OBJECT_LOCK (appsink);
- if ((caps = priv->caps))
- gst_caps_ref (caps);
+ if ((caps = priv->caps)) {
+ if (filter)
+ caps = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ else
+ gst_caps_ref (caps);
+ }
GST_DEBUG_OBJECT (appsink, "got caps %" GST_PTR_FORMAT, caps);
GST_OBJECT_UNLOCK (appsink);
* Set the capabilities on the appsink element. This function takes
* a copy of the caps structure. After calling this method, the sink will only
* accept caps that match @caps. If @caps is non-fixed, you must check the caps
- * on the buffers to get the actual used caps.
+ * on the buffers to get the actual used caps.
*
* Since: 0.10.22
*/
* when calling gst_app_sink_pull_buffer().
*
* If an EOS event was received before any buffers, this function returns
- * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
+ * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
*
* This function blocks until a preroll buffer or EOS is received or the appsink
- * element is set to the READY/NULL state.
+ * element is set to the READY/NULL state.
*
* Returns: a #GstBuffer or NULL when the appsink is stopped or EOS.
*
* @appsink: a #GstAppSink
*
* This function blocks until a buffer or EOS becomes available or the appsink
- * element is set to the READY/NULL state.
+ * element is set to the READY/NULL state.
*
* This function will only return buffers when the appsink is in the PLAYING
* state. All rendered buffers will be put in a queue so that the application
* especially when dealing with raw video frames.
*
* If an EOS event was received before any buffers, this function returns
- * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
+ * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
*
* Returns: a #GstBuffer or NULL when the appsink is stopped or EOS.
*
* @appsink: a #GstAppSink
*
* This function blocks until a buffer list or EOS becomes available or the
- * appsink element is set to the READY/NULL state.
+ * appsink element is set to the READY/NULL state.
*
* This function will only return buffer lists when the appsink is in the
* PLAYING state. All rendered buffer lists will be put in a queue so that
* video frames.
*
* If an EOS event was received before any buffer lists, this function returns
- * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
+ * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition.
*
* Returns: a #GstBufferList or NULL when the appsink is stopped or EOS.
*/
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
-
- /**
- * SECTION:element-appsrc
- *
- * The appsrc element can be used by applications to insert data into a
- * GStreamer pipeline. Unlike most GStreamer elements, Appsrc provides
- * external API functions.
- *
- * For the documentation of the API, please see the
- * <link linkend="gst-plugins-base-libs-appsrc">libgstapp</link> section in the
- * GStreamer Plugins Base Libraries documentation.
- *
- * Since: 0.10.22
- */
-
/**
* SECTION:gstappsrc
* @short_description: Easy way for applications to inject buffers into a
* For the stream and seekable modes, setting this property is optional but
* recommended.
*
- * When the application is finished pushing data into appsrc, it should call
+ * When the application is finished pushing data into appsrc, it should call
* gst_app_src_end_of_stream() or emit the end-of-stream action signal. After
* this call, no more buffers can be pushed into appsrc until a flushing seek
* happened or the state of the appsrc has gone through READY.
static guint gst_app_src_signals[LAST_SIGNAL] = { 0 };
-static void
-_do_init (GType filesrc_type)
-{
- static const GInterfaceInfo urihandler_info = {
- gst_app_src_uri_handler_init,
- NULL,
- NULL
- };
- g_type_add_interface_static (filesrc_type, GST_TYPE_URI_HANDLER,
- &urihandler_info);
-}
-
-GST_BOILERPLATE_FULL (GstAppSrc, gst_app_src, GstBaseSrc, GST_TYPE_BASE_SRC,
- _do_init);
-
-static void
-gst_app_src_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- GST_DEBUG_CATEGORY_INIT (app_src_debug, "appsrc", 0, "appsrc element");
-
- gst_element_class_set_details_simple (element_class, "AppSrc",
- "Generic/Source", "Allow the application to feed buffers to a pipeline",
- "David Schleef <ds@schleef.org>, Wim Taymans <wim.taymans@gmail.com>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_app_src_template));
-}
+#define gst_app_src_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstAppSrc, gst_app_src, GST_TYPE_BASE_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_app_src_uri_handler_init));
static void
gst_app_src_class_init (GstAppSrcClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *element_class = (GstElementClass *) klass;
GstBaseSrcClass *basesrc_class = (GstBaseSrcClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (app_src_debug, "appsrc", 0, "appsrc element");
+
gobject_class->dispose = gst_app_src_dispose;
gobject_class->finalize = gst_app_src_finalize;
* GstAppSrc::end-of-stream:
* @appsrc: the appsrc
*
- * Notify @appsrc that no more buffer are available.
+ * Notify @appsrc that no more buffer are available.
*/
gst_app_src_signals[SIGNAL_END_OF_STREAM] =
g_signal_new ("end-of-stream", G_TYPE_FROM_CLASS (klass),
end_of_stream), NULL, NULL, __gst_app_marshal_ENUM__VOID,
GST_TYPE_FLOW_RETURN, 0, G_TYPE_NONE);
+ gst_element_class_set_details_simple (element_class, "AppSrc",
+ "Generic/Source", "Allow the application to feed buffers to a pipeline",
+ "David Schleef <ds@schleef.org>, Wim Taymans <wim.taymans@gmail.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_app_src_template));
+
basesrc_class->create = gst_app_src_create;
basesrc_class->start = gst_app_src_start;
basesrc_class->stop = gst_app_src_stop;
}
static void
-gst_app_src_init (GstAppSrc * appsrc, GstAppSrcClass * klass)
+gst_app_src_init (GstAppSrc * appsrc)
{
GstAppSrcPrivate *priv;
gint64 desired_position;
gboolean res = FALSE;
- desired_position = segment->last_stop;
+ desired_position = segment->position;
GST_DEBUG_OBJECT (appsrc, "seeking to %" G_GINT64_FORMAT ", format %s",
desired_position, gst_format_get_name (segment->format));
GST_DEBUG_OBJECT (appsrc,
"Size changed from %" G_GINT64_FORMAT " to %" G_GINT64_FORMAT,
bsrc->segment.duration, priv->size);
- gst_segment_set_duration (&bsrc->segment, GST_FORMAT_BYTES, priv->size);
+ bsrc->segment.duration = priv->size;
GST_OBJECT_UNLOCK (appsrc);
gst_element_post_message (GST_ELEMENT (appsrc),
guint buf_size;
*buf = g_queue_pop_head (priv->queue);
- buf_size = GST_BUFFER_SIZE (*buf);
+ buf_size = gst_buffer_get_size (*buf);
GST_DEBUG_OBJECT (appsrc, "we have buffer %p of size %u", *buf, buf_size);
/* only update the offset when in random_access mode */
if (priv->stream_type == GST_APP_STREAM_TYPE_RANDOM_ACCESS)
priv->offset += buf_size;
- if (caps) {
- *buf = gst_buffer_make_metadata_writable (*buf);
- gst_buffer_set_caps (*buf, caps);
- }
/* signal that we removed an item */
g_cond_broadcast (priv->cond);
* a copy of the caps structure. After calling this method, the source will
* only produce caps that match @caps. @caps must be fixed and the caps on the
* buffers must match the caps or left NULL.
- *
+ *
* Since: 0.10.22
*/
void
* Get the configured caps on @appsrc.
*
* Returns: the #GstCaps produced by the source. gst_caps_unref() after usage.
- *
+ *
* Since: 0.10.22
*/
GstCaps *
* @size: the size to set
*
* Set the size of the stream in bytes. A value of -1 means that the size is
- * not known.
- *
+ * not known.
+ *
* Since: 0.10.22
*/
void
* @appsrc: a #GstAppSrc
*
* Get the size of the stream in bytes. A value of -1 means that the size is
- * not known.
+ * not known.
*
* Returns: the size of the stream previously set with gst_app_src_set_size();
- *
+ *
* Since: 0.10.22
*/
gint64
* Set the stream type on @appsrc. For seekable streams, the "seek" signal must
* be connected to.
*
- * A stream_type stream
- *
+ * A stream_type stream
+ *
* Since: 0.10.22
*/
void
* with gst_app_src_set_stream_type().
*
* Returns: the stream type.
- *
+ *
* Since: 0.10.22
*/
GstAppStreamType
* Set the maximum amount of bytes that can be queued in @appsrc.
* After the maximum amount of bytes are queued, @appsrc will emit the
* "enough-data" signal.
- *
+ *
* Since: 0.10.22
*/
void
* Get the maximum amount of bytes that can be queued in @appsrc.
*
* Returns: The maximum amount of bytes that can be queued.
- *
+ *
* Since: 0.10.22
*/
guint64
*
* Configure the @min and @max latency in @src. If @min is set to -1, the
* default latency calculations for pseudo-live sources will be used.
- *
+ *
* Since: 0.10.22
*/
void
* @max: the min latency
*
* Retrieve the min and max latencies in @min and @max respectively.
- *
+ *
* Since: 0.10.22
*/
void
if (!steal_ref)
gst_buffer_ref (buffer);
g_queue_push_tail (priv->queue, buffer);
- priv->queued_bytes += GST_BUFFER_SIZE (buffer);
+ priv->queued_bytes += gst_buffer_get_size (buffer);
g_cond_broadcast (priv->cond);
g_mutex_unlock (priv->mutex);
* Returns: #GST_FLOW_OK when the buffer was successfuly queued.
* #GST_FLOW_WRONG_STATE when @appsrc is not PAUSED or PLAYING.
* #GST_FLOW_UNEXPECTED when EOS occured.
- *
+ *
* Since: 0.10.22
*/
GstFlowReturn
*
* Returns: #GST_FLOW_OK when the EOS was successfuly queued.
* #GST_FLOW_WRONG_STATE when @appsrc is not PAUSED or PLAYING.
- *
+ *
* Since: 0.10.22
*/
GstFlowReturn
priv = appsrc->priv;
g_mutex_lock (priv->mutex);
- /* can't accept buffers when we are flushing. We can accept them when we are
+ /* can't accept buffers when we are flushing. We can accept them when we are
* EOS although it will not do anything. */
if (priv->flushing)
goto flushing;
/**
* GstAudioFilter:
- * @basetransform: Element parent class
*
* Base class for audio filters with the same format for input and output.
*
GType gst_audio_filter_get_type (void);
void gst_audio_filter_class_add_pad_templates (GstAudioFilterClass * klass,
- const GstCaps * allowed_caps);
+ GstCaps * allowed_caps);
G_END_DECLS
GstClockTime eos_time;
- gboolean do_time_offset;
/* number of microseconds we alow timestamps or clock slaving to drift
* before resyncing */
guint64 drift_tolerance;
}
-#define _do_init(bla) \
+#define _do_init \
GST_DEBUG_CATEGORY_INIT (gst_base_audio_sink_debug, "baseaudiosink", 0, "baseaudiosink element");
-
-GST_BOILERPLATE_FULL (GstBaseAudioSink, gst_base_audio_sink, GstBaseSink,
+#define gst_base_audio_sink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstBaseAudioSink, gst_base_audio_sink,
GST_TYPE_BASE_SINK, _do_init);
static void gst_base_audio_sink_dispose (GObject * object);
static void gst_base_audio_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
+#if 0
static GstStateChangeReturn gst_base_audio_sink_async_play (GstBaseSink *
basesink);
+#endif
static GstStateChangeReturn gst_base_audio_sink_change_state (GstElement *
element, GstStateChange transition);
static gboolean gst_base_audio_sink_activate_pull (GstBaseSink * basesink,
/* static guint gst_base_audio_sink_signals[LAST_SIGNAL] = { 0 }; */
static void
-gst_base_audio_sink_base_init (gpointer g_class)
-{
-}
-
-static void
gst_base_audio_sink_class_init (GstBaseAudioSinkClass * klass)
{
GObjectClass *gobject_class;
GST_DEBUG_FUNCPTR (gst_base_audio_sink_get_times);
gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_base_audio_sink_setcaps);
gstbasesink_class->fixate = GST_DEBUG_FUNCPTR (gst_base_audio_sink_fixate);
+#if 0
gstbasesink_class->async_play =
GST_DEBUG_FUNCPTR (gst_base_audio_sink_async_play);
+#endif
gstbasesink_class->activate_pull =
GST_DEBUG_FUNCPTR (gst_base_audio_sink_activate_pull);
}
static void
-gst_base_audio_sink_init (GstBaseAudioSink * baseaudiosink,
- GstBaseAudioSinkClass * g_class)
+gst_base_audio_sink_init (GstBaseAudioSink * baseaudiosink)
{
- GstPluginFeature *feature;
GstBaseSink *basesink;
baseaudiosink->priv = GST_BASE_AUDIO_SINK_GET_PRIVATE (baseaudiosink);
/* install some custom pad_query functions */
gst_pad_set_query_function (GST_BASE_SINK_PAD (baseaudiosink),
GST_DEBUG_FUNCPTR (gst_base_audio_sink_query_pad));
-
- baseaudiosink->priv->do_time_offset = TRUE;
-
- /* check the factory, pulsesink < 0.10.17 does the timestamp offset itself so
- * we should not do ourselves */
- feature =
- GST_PLUGIN_FEATURE_CAST (GST_ELEMENT_CLASS (g_class)->elementfactory);
- GST_DEBUG ("created from factory %p", feature);
-
- /* HACK for old pulsesink that did the time_offset themselves */
- if (feature) {
- if (strcmp (gst_plugin_feature_get_name (feature), "pulsesink") == 0) {
- if (!gst_plugin_feature_check_version (feature, 0, 10, 17)) {
- /* we're dealing with an old pulsesink, we need to disable time corection */
- GST_DEBUG ("disable time offset");
- baseaudiosink->priv->do_time_offset = FALSE;
- }
- }
- }
}
static void
GstBaseAudioSink *sink = GST_BASE_AUDIO_SINK (bsink);
GstRingBufferSpec *spec;
GstClockTime now;
+ GstClockTime crate_num, crate_denom;
if (!sink->ringbuffer)
return FALSE;
GST_DEBUG_OBJECT (sink, "release old ringbuffer");
- /* get current time, updates the last_time */
+ /* get current time, updates the last_time. When the subclass has a clock that
+ * restarts from 0 when a new format is negotiated, it will call
+ * gst_audio_clock_reset() which will use this last_time to create an offset
+ * so that time from the clock keeps on increasing monotonically. */
now = gst_clock_get_time (sink->provided_clock);
GST_DEBUG_OBJECT (sink, "time was %" GST_TIME_FORMAT, GST_TIME_ARGS (now));
gst_ring_buffer_activate (sink->ringbuffer, TRUE);
}
+ /* due to possible changes in the spec file we should recalibrate the clock */
+ gst_clock_get_calibration (sink->provided_clock, NULL, NULL,
+ &crate_num, &crate_denom);
+ gst_clock_set_calibration (sink->provided_clock,
+ gst_clock_get_internal_time (sink->provided_clock), now, crate_num,
+ crate_denom);
+
/* calculate actual latency and buffer times.
* FIXME: In 0.11, store the latency_time internally in ns */
spec->latency_time = gst_util_uint64_scale (spec->segsize,
/* now wait till we played everything */
gst_base_audio_sink_drain (sink);
break;
- case GST_EVENT_NEWSEGMENT:
- {
- gdouble rate;
-
- /* we only need the rate */
- gst_event_parse_new_segment_full (event, NULL, &rate, NULL, NULL,
- NULL, NULL, NULL);
-
- GST_DEBUG_OBJECT (sink, "new segment rate of %f", rate);
- break;
- }
default:
break;
}
G_GINT64_FORMAT, align, maxdrift);
} else {
/* calculate sample diff in seconds for error message */
- gint64 diff_s = gst_util_uint64_scale_int (diff, GST_SECOND,
- ringbuf->spec.rate);
+ gint64 diff_s =
+ gst_util_uint64_scale_int (diff, GST_SECOND, ringbuf->spec.rate);
/* timestamps drifted apart from previous samples too much, we need to
* resync. We log this as an element warning. */
GST_WARNING_OBJECT (sink,
GstBaseAudioSinkClass *bclass;
GstBaseAudioSink *sink;
GstRingBuffer *ringbuf;
- gint64 diff, align, ctime, cstop;
+ gint64 diff, align;
+ guint64 ctime, cstop;
+ gsize offset;
guint8 *data;
- guint size;
+ gsize size;
guint samples, written;
gint bps;
gint accum;
bps = ringbuf->spec.bytes_per_sample;
- size = GST_BUFFER_SIZE (buf);
+ size = gst_buffer_get_size (buf);
if (G_UNLIKELY (size % bps) != 0)
goto wrong_size;
GST_TIME_FORMAT ", samples %u", GST_TIME_ARGS (time), in_offset,
GST_TIME_ARGS (bsink->segment.start), samples);
- data = GST_BUFFER_DATA (buf);
+ offset = 0;
/* if not valid timestamp or we can't clip or sync, try to play
* sample ASAP */
render_stop = render_start + samples;
GST_DEBUG_OBJECT (sink,
"Buffer of size %u has no time. Using render_start=%" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (buf), render_start);
+ size, render_start);
/* we don't have a start so we don't know stop either */
stop = -1;
goto no_sync;
GST_DEBUG_OBJECT (sink, "clipping start to %" GST_TIME_FORMAT " %"
G_GUINT64_FORMAT " samples", GST_TIME_ARGS (ctime), diff);
samples -= diff;
- data += diff * bps;
+ offset += diff * bps;
time = ctime;
}
diff = stop - cstop;
GST_TIME_ARGS (render_start), GST_TIME_ARGS (render_stop));
/* bring to position in the ringbuffer */
- if (sink->priv->do_time_offset) {
- time_offset =
- GST_AUDIO_CLOCK_CAST (sink->provided_clock)->abidata.ABI.time_offset;
- GST_DEBUG_OBJECT (sink,
- "time offset %" GST_TIME_FORMAT, GST_TIME_ARGS (time_offset));
- if (render_start > time_offset)
- render_start -= time_offset;
- else
- render_start = 0;
- if (render_stop > time_offset)
- render_stop -= time_offset;
- else
- render_stop = 0;
- }
+ time_offset =
+ GST_AUDIO_CLOCK_CAST (sink->provided_clock)->abidata.ABI.time_offset;
+ GST_DEBUG_OBJECT (sink,
+ "time offset %" GST_TIME_FORMAT, GST_TIME_ARGS (time_offset));
+ if (render_start > time_offset)
+ render_start -= time_offset;
+ else
+ render_start = 0;
+ if (render_stop > time_offset)
+ render_stop -= time_offset;
+ else
+ render_stop = 0;
/* and bring the time to the rate corrected offset in the buffer */
render_start = gst_util_uint64_scale_int (render_start,
/* we need to accumulate over different runs for when we get interrupted */
accum = 0;
align_next = TRUE;
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
do {
written =
- gst_ring_buffer_commit_full (ringbuf, &sample_offset, data, samples,
- out_samples, &accum);
+ gst_ring_buffer_commit_full (ringbuf, &sample_offset, data + offset,
+ samples, out_samples, &accum);
GST_DEBUG_OBJECT (sink, "wrote %u of %u", written, samples);
/* if we wrote all, we're done */
break;
samples -= written;
- data += written * bps;
+ offset += written * bps;
} while (TRUE);
+ gst_buffer_unmap (buf, data, size);
if (align_next)
sink->next_sample = sample_offset;
{
GST_DEBUG_OBJECT (sink, "preroll got interrupted: %d (%s)", ret,
gst_flow_get_name (ret));
+ gst_buffer_unmap (buf, data, size);
goto done;
}
sync_latency_failed:
GstBaseAudioSink *sink;
GstBuffer *buf;
GstFlowReturn ret;
+ gsize size;
basesink = GST_BASE_SINK (user_data);
sink = GST_BASE_AUDIO_SINK (user_data);
GST_LOG_OBJECT (basesink, "pulling %d bytes offset %" G_GUINT64_FORMAT
" to fill audio buffer", len, basesink->offset);
ret =
- gst_pad_pull_range (basesink->sinkpad, basesink->segment.last_stop, len,
+ gst_pad_pull_range (basesink->sinkpad, basesink->segment.position, len,
&buf);
if (ret != GST_FLOW_OK) {
goto error;
}
- GST_PAD_PREROLL_LOCK (basesink->sinkpad);
+ GST_BASE_SINK_PREROLL_LOCK (basesink);
if (basesink->flushing)
goto flushing;
if (ret != GST_FLOW_OK)
goto preroll_error;
- if (len != GST_BUFFER_SIZE (buf)) {
+ size = gst_buffer_get_size (buf);
+
+ if (len != size) {
GST_INFO_OBJECT (basesink,
- "got different size than requested from sink pad: %u != %u", len,
- GST_BUFFER_SIZE (buf));
- len = MIN (GST_BUFFER_SIZE (buf), len);
+ "got different size than requested from sink pad: %u != %u", len, size);
+ len = MIN (size, len);
}
- basesink->segment.last_stop += len;
+ basesink->segment.position += len;
- memcpy (data, GST_BUFFER_DATA (buf), len);
- GST_PAD_PREROLL_UNLOCK (basesink->sinkpad);
+ gst_buffer_extract (buf, 0, data, len);
+ GST_BASE_SINK_PREROLL_UNLOCK (basesink);
GST_PAD_STREAM_UNLOCK (basesink->sinkpad);
{
GST_DEBUG_OBJECT (sink, "we are flushing");
gst_ring_buffer_pause (rbuf);
- GST_PAD_PREROLL_UNLOCK (basesink->sinkpad);
+ GST_BASE_SINK_PREROLL_UNLOCK (basesink);
GST_PAD_STREAM_UNLOCK (basesink->sinkpad);
return;
}
{
GST_DEBUG_OBJECT (sink, "error %s", gst_flow_get_name (ret));
gst_ring_buffer_pause (rbuf);
- GST_PAD_PREROLL_UNLOCK (basesink->sinkpad);
+ GST_BASE_SINK_PREROLL_UNLOCK (basesink);
GST_PAD_STREAM_UNLOCK (basesink->sinkpad);
return;
}
return ret;
}
+#if 0
/* should be called with the LOCK */
static GstStateChangeReturn
gst_base_audio_sink_async_play (GstBaseSink * basesink)
return GST_STATE_CHANGE_SUCCESS;
}
+#endif
static GstStateChangeReturn
gst_base_audio_sink_change_state (GstElement * element,
GST_BUFTYPE_MPEG4_AAC,
} GstBufferFormatType;
+ /**
+ * GstBufferFormat:
+ * @GST_UNKNOWN: unspecified
+ * @GST_S8: integer signed 8 bit
+ * @GST_U8: integer unsigned 8 bit
+ * @GST_S16_LE: integer signed 16 bit little endian
+ * @GST_S16_BE: integer signed 16 bit big endian
+ * @GST_U16_LE: integer unsigned 16 bit little endian
+ * @GST_U16_BE: integer unsigned 16 bit big endian
+ * @GST_S24_LE: integer signed 24 bit little endian
+ * @GST_S24_BE: integer signed 24 bit big endian
+ * @GST_U24_LE: integer unsigned 24 bit little endian
+ * @GST_U24_BE: integer unsigned 24 bit big endian
+ * @GST_S32_LE: integer signed 32 bit little endian
+ * @GST_S32_BE: integer signed 32 bit big endian
+ * @GST_U32_LE: integer unsigned 32 bit little endian
+ * @GST_U32_BE: integer unsigned 32 bit big endian
+ * @GST_S24_3LE: integer signed 24 bit little endian packed in 3 bytes
+ * @GST_S24_3BE: integer signed 24 bit big endian packed in 3 bytes
+ * @GST_U24_3LE: integer unsigned 24 bit little endian packed in 3 bytes
+ * @GST_U24_3BE: integer unsigned 24 bit big endian packed in 3 bytes
+ * @GST_S20_3LE: integer signed 20 bit little endian packed in 3 bytes
+ * @GST_S20_3BE: integer signed 20 bit big endian packed in 3 bytes
+ * @GST_U20_3LE: integer unsigned 20 bit little endian packed in 3 bytes
+ * @GST_U20_3BE: integer unsigned 20 bit big endian packed in 3 bytes
+ * @GST_S18_3LE: integer signed 18 bit little endian packed in 3 bytes
+ * @GST_S18_3BE: integer signed 18 bit big endian packed in 3 bytes
+ * @GST_U18_3LE: integer unsigned 18 bit little endian packed in 3 bytes
+ * @GST_U18_3BE: integer unsigned 18 bit big endian packed in 3 bytes
+ * @GST_FLOAT32_LE: floating 32 bit little endian
+ * @GST_FLOAT32_BE: floating 32 bit big endian
+ * @GST_FLOAT64_LE: floating 64 bit little endian
+ * @GST_FLOAT64_BE: floating 64 bit big endian
+ * @GST_MU_LAW: mu-law
+ * @GST_A_LAW: a-law
+ * @GST_IMA_ADPCM: ima adpcm
+ * @GST_MPEG: mpeg audio (but not aac)
+ * @GST_GSM: gsm
+ * @GST_IEC958: IEC958 frames
+ * @GST_AC3: ac3
+ * @GST_EAC3: eac3
+ * @GST_DTS: dts
+ * @GST_MPEG2_AAC: mpeg-2 aac
+ * @GST_MPEG4_AAC: mpeg-4 aac
+ *
+ * The detailed format of the samples in the ringbuffer.
+ */
typedef enum
{
GST_UNKNOWN,
GCond *cond;
gboolean open;
gboolean acquired;
- GstBuffer *data;
+ guint8 *memory;
+ gsize size;
GstRingBufferSpec spec;
GstRingBufferSegState *segstate;
gint samples_per_seg;
* SECTION:gstbasertpaudiopayload
* @short_description: Base class for audio RTP payloader
*
- * <refsect2>
- * <para>
* Provides a base class for audio RTP payloaders for frame or sample based
* audio codecs (constant bitrate)
- * </para>
- * <para>
+ *
* This class derives from GstBaseRTPPayload. It can be used for payloading
* audio codecs. It will only work with constant bitrate codecs. It supports
* both frame based and sample based codecs. It takes care of packing up the
* equal to min-ptime (if set). If min-ptime is not set, any residual data is
* sent in a last RTP packet. In the case of frame based codecs, the resulting
* RTP packets always contain full frames.
- * </para>
+ *
+ * <refsect2>
* <title>Usage</title>
* <para>
* To use this base class, your child element needs to call either
static gboolean gst_base_rtp_payload_audio_handle_event (GstPad * pad,
GstEvent * event);
-GST_BOILERPLATE (GstBaseRTPAudioPayload, gst_base_rtp_audio_payload,
- GstBaseRTPPayload, GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_base_rtp_audio_payload_base_init (gpointer klass)
-{
-}
+#define gst_base_rtp_audio_payload_parent_class parent_class
+G_DEFINE_TYPE (GstBaseRTPAudioPayload, gst_base_rtp_audio_payload,
+ GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_base_rtp_audio_payload_class_init (GstBaseRTPAudioPayloadClass * klass)
}
static void
-gst_base_rtp_audio_payload_init (GstBaseRTPAudioPayload * payload,
- GstBaseRTPAudioPayloadClass * klass)
+gst_base_rtp_audio_payload_init (GstBaseRTPAudioPayload * payload)
{
payload->priv = GST_BASE_RTP_AUDIO_PAYLOAD_GET_PRIVATE (payload);
{
GstBaseRTPPayload *basepayload;
GstBaseRTPAudioPayloadPrivate *priv;
+ GstRTPBuffer rtp;
basepayload = GST_BASE_RTP_PAYLOAD_CAST (payload);
priv = payload->priv;
/* set payload type */
- gst_rtp_buffer_set_payload_type (buffer, basepayload->pt);
+ gst_rtp_buffer_map (buffer, GST_MAP_WRITE, &rtp);
+ gst_rtp_buffer_set_payload_type (&rtp, basepayload->pt);
/* set marker bit for disconts */
if (priv->discont) {
GST_DEBUG_OBJECT (payload, "Setting marker and DISCONT");
- gst_rtp_buffer_set_marker (buffer, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
priv->discont = FALSE;
}
+ gst_rtp_buffer_unmap (&rtp);
+
GST_BUFFER_TIMESTAMP (buffer) = timestamp;
/* get the offset in RTP time */
GstBuffer *outbuf;
guint8 *payload;
GstFlowReturn ret;
+ GstRTPBuffer rtp;
basepayload = GST_BASE_RTP_PAYLOAD (baseaudiopayload);
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
/* copy payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
memcpy (payload, data, payload_len);
+ gst_rtp_buffer_unmap (&rtp);
/* set metadata */
gst_base_rtp_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
priv = baseaudiopayload->priv;
basepayload = GST_BASE_RTP_PAYLOAD (baseaudiopayload);
- payload_len = GST_BUFFER_SIZE (buffer);
+ payload_len = gst_buffer_get_size (buffer);
GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT,
payload_len, GST_TIME_ARGS (timestamp));
if (priv->buffer_list) {
GstBufferList *list;
- GstBufferListIterator *it;
+ guint i, len;
list = gst_buffer_list_new ();
- it = gst_buffer_list_iterate (list);
+ len = gst_buffer_list_len (list);
- /* add both buffers to the buffer list */
- gst_buffer_list_iterator_add_group (it);
- gst_buffer_list_iterator_add (it, outbuf);
- gst_buffer_list_iterator_add (it, buffer);
-
- gst_buffer_list_iterator_free (it);
+ for (i = 0; i < len; i++) {
+ /* FIXME */
+ g_warning ("bufferlist not implemented");
+ gst_buffer_list_add (list, outbuf);
+ gst_buffer_list_add (list, buffer);
+ }
GST_DEBUG_OBJECT (baseaudiopayload, "Pushing list %p", list);
ret = gst_basertppayload_push_list (basepayload, list);
} else {
+ GstRTPBuffer rtp;
+
/* copy payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
- memcpy (payload, GST_BUFFER_DATA (buffer), payload_len);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ gst_buffer_extract (buffer, 0, payload, payload_len);
+ gst_rtp_buffer_unmap (&rtp);
+
gst_buffer_unref (buffer);
GST_DEBUG_OBJECT (baseaudiopayload, "Pushing buffer %p", outbuf);
gst_base_rtp_audio_payload_push_buffer (baseaudiopayload, buffer,
timestamp);
} else {
+ GstRTPBuffer rtp;
+
/* create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
/* copy payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
gst_adapter_copy (adapter, payload, 0, payload_len);
gst_adapter_flush (adapter, payload_len);
+ gst_rtp_buffer_unmap (&rtp);
/* set metadata */
gst_base_rtp_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
"Calculated min_payload_len %u and max_payload_len %u",
min_payload_len, max_payload_len);
- size = GST_BUFFER_SIZE (buffer);
+ size = gst_buffer_get_size (buffer);
/* shortcut, we don't need to use the adapter when the packet can be pushed
* through directly. */
/* GStreamer
- * Copyright (C) <2005> Philippe Khalaf <burger@speedy.org>
+ * Copyright (C) <2005> Philippe Khalaf <burger@speedy.org>
* Copyright (C) <2005> Nokia Corporation <kai.vehmanen@nokia.com>
*
* This library is free software; you can redistribute it and/or
* SECTION:gstbasertpdepayload
* @short_description: Base class for RTP depayloader
*
- * <refsect2>
- * <para>
* Provides a base class for RTP depayloaders
- * </para>
- * </refsect2>
*/
#include "gstbasertpdepayload.h"
-#ifdef GST_DISABLE_DEPRECATED
-#define QUEUE_LOCK_INIT(base) (g_static_rec_mutex_init(&base->queuelock))
-#define QUEUE_LOCK_FREE(base) (g_static_rec_mutex_free(&base->queuelock))
-#define QUEUE_LOCK(base) (g_static_rec_mutex_lock(&base->queuelock))
-#define QUEUE_UNLOCK(base) (g_static_rec_mutex_unlock(&base->queuelock))
-#else
-/* otherwise it's already been defined in the header (FIXME 0.11)*/
-#endif
-
GST_DEBUG_CATEGORY_STATIC (basertpdepayload_debug);
#define GST_CAT_DEFAULT (basertpdepayload_debug)
LAST_SIGNAL
};
-#define DEFAULT_QUEUE_DELAY 0
-
enum
{
PROP_0,
- PROP_QUEUE_DELAY,
PROP_LAST
};
static gboolean gst_base_rtp_depayload_handle_event (GstBaseRTPDepayload *
filter, GstEvent * event);
-GST_BOILERPLATE (GstBaseRTPDepayload, gst_base_rtp_depayload, GstElement,
- GST_TYPE_ELEMENT);
+static GstElementClass *parent_class = NULL;
+static void gst_base_rtp_depayload_class_init (GstBaseRTPDepayloadClass *
+ klass);
+static void gst_base_rtp_depayload_init (GstBaseRTPDepayload * basertppayload,
+ GstBaseRTPDepayloadClass * klass);
-static void
-gst_base_rtp_depayload_base_init (gpointer klass)
+GType
+gst_base_rtp_depayload_get_type (void)
{
- /*GstElementClass *element_class = GST_ELEMENT_CLASS (klass); */
+ static GType base_rtp_depayload_type = 0;
+
+ if (g_once_init_enter ((gsize *) & base_rtp_depayload_type)) {
+ static const GTypeInfo base_rtp_depayload_info = {
+ sizeof (GstBaseRTPDepayloadClass),
+ NULL,
+ NULL,
+ (GClassInitFunc) gst_base_rtp_depayload_class_init,
+ NULL,
+ NULL,
+ sizeof (GstBaseRTPDepayload),
+ 0,
+ (GInstanceInitFunc) gst_base_rtp_depayload_init,
+ };
+
+ g_once_init_leave ((gsize *) & base_rtp_depayload_type,
+ g_type_register_static (GST_TYPE_ELEMENT, "GstBaseRTPDepayload",
+ &base_rtp_depayload_info, G_TYPE_FLAG_ABSTRACT));
+ }
+ return base_rtp_depayload_type;
}
static void
gobject_class->set_property = gst_base_rtp_depayload_set_property;
gobject_class->get_property = gst_base_rtp_depayload_get_property;
- /**
- * GstBaseRTPDepayload::queue-delay
- *
- * Control the amount of packets to buffer.
- *
- * Deprecated: Use a jitterbuffer or RTP session manager to delay packet
- * playback. This property has no effect anymore since 0.10.15.
- */
-#ifndef GST_REMOVE_DEPRECATED
- g_object_class_install_property (gobject_class, PROP_QUEUE_DELAY,
- g_param_spec_uint ("queue-delay", "Queue Delay",
- "Amount of ms to queue/buffer, deprecated", 0, G_MAXUINT,
- DEFAULT_QUEUE_DELAY, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-#endif
-
gstelement_class->change_state = gst_base_rtp_depayload_change_state;
klass->set_gst_timestamp = gst_base_rtp_depayload_set_gst_timestamp;
gst_pad_use_fixed_caps (filter->srcpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
- filter->queue = g_queue_new ();
- filter->queue_delay = DEFAULT_QUEUE_DELAY;
-
gst_segment_init (&filter->segment, GST_FORMAT_UNDEFINED);
}
static void
gst_base_rtp_depayload_finalize (GObject * object)
{
- GstBaseRTPDepayload *filter = GST_BASE_RTP_DEPAYLOAD (object);
-
- g_queue_free (filter->queue);
-
G_OBJECT_CLASS (parent_class)->finalize (object);
}
guint32 rtptime;
gboolean discont;
gint gap;
+ GstRTPBuffer rtp;
filter = GST_BASE_RTP_DEPAYLOAD (GST_OBJECT_PARENT (pad));
priv = filter->priv;
priv->timestamp = timestamp;
priv->duration = GST_BUFFER_DURATION (in);
- seqnum = gst_rtp_buffer_get_seq (in);
- rtptime = gst_rtp_buffer_get_timestamp (in);
+ gst_rtp_buffer_map (in, GST_MAP_READ, &rtp);
+ seqnum = gst_rtp_buffer_get_seq (&rtp);
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+
discont = FALSE;
GST_LOG_OBJECT (filter, "discont %d, seqnum %u, rtptime %u, timestamp %"
/* we detected a seqnum discont but the buffer was not flagged with a discont,
* set the discont flag so that the subclass can throw away old data. */
priv->discont = TRUE;
- in = gst_buffer_make_metadata_writable (in);
+ in = gst_buffer_make_writable (in);
GST_BUFFER_FLAG_SET (in, GST_BUFFER_FLAG_DISCONT);
}
not_negotiated:
{
/* this is not fatal but should be filtered earlier */
- if (GST_BUFFER_CAPS (in) == NULL) {
- GST_ELEMENT_ERROR (filter, CORE, NEGOTIATION,
- ("No RTP format was negotiated."),
- ("Input buffers need to have RTP caps set on them. This is usually "
- "achieved by setting the 'caps' property of the upstream source "
- "element (often udpsrc or appsrc), or by putting a capsfilter "
- "element before the depayloader and setting the 'caps' property "
- "on that. Also see http://cgit.freedesktop.org/gstreamer/"
- "gst-plugins-good/tree/gst/rtp/README"));
- } else {
- GST_ELEMENT_ERROR (filter, CORE, NEGOTIATION,
- ("No RTP format was negotiated."),
- ("RTP caps on input buffer were rejected, most likely because they "
- "were incomplete or contained wrong values. Check the debug log "
- "for more information."));
- }
+ GST_ELEMENT_ERROR (filter, CORE, NEGOTIATION,
+ ("No RTP format was negotiated."),
+ ("Input buffers need to have RTP caps set on them. This is usually "
+ "achieved by setting the 'caps' property of the upstream source "
+ "element (often udpsrc or appsrc), or by putting a capsfilter "
+ "element before the depayloader and setting the 'caps' property "
+ "on that. Also see http://cgit.freedesktop.org/gstreamer/"
+ "gst-plugins-good/tree/gst/rtp/README"));
gst_buffer_unref (in);
return GST_FLOW_NOT_NEGOTIATED;
}
filter->need_newsegment = TRUE;
filter->priv->next_seqnum = -1;
break;
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- gboolean update;
- gdouble rate;
- GstFormat fmt;
- gint64 start, stop, position;
-
- gst_event_parse_new_segment (event, &update, &rate, &fmt, &start, &stop,
- &position);
-
- gst_segment_set_newsegment (&filter->segment, update, rate, fmt,
- start, stop, position);
-
+ gst_event_copy_segment (event, &filter->segment);
/* don't pass the event downstream, we generate our own segment including
* the NTP time and other things we receive in caps */
forward = FALSE;
GstEvent *event;
GstClockTime stop;
GstBaseRTPDepayloadPrivate *priv;
+ GstSegment segment;
priv = filter->priv;
else
stop = -1;
- event = gst_event_new_new_segment_full (update, priv->play_speed,
- priv->play_scale, GST_FORMAT_TIME, position, stop,
- position + priv->npt_start);
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ segment.rate = priv->play_speed;
+ segment.applied_rate = priv->play_scale;
+ segment.start = 0;
+ segment.stop = stop;
+ segment.time = priv->npt_start;
+ segment.position = position;
+
+ event = gst_event_new_segment (&segment);
return event;
}
{
GstBaseRTPDepayload *depayload;
GstBaseRTPDepayloadClass *bclass;
- GstCaps *caps;
gboolean do_ts;
gboolean rtptime;
} HeaderData;
-static GstBufferListItem
-set_headers (GstBuffer ** buffer, guint group, guint idx, HeaderData * data)
+static gboolean
+set_headers (GstBuffer ** buffer, guint idx, HeaderData * data)
{
GstBaseRTPDepayload *depayload = data->depayload;
- *buffer = gst_buffer_make_metadata_writable (*buffer);
- gst_buffer_set_caps (*buffer, data->caps);
+ *buffer = gst_buffer_make_writable (*buffer);
/* set the timestamp if we must and can */
if (data->bclass->set_gst_timestamp && data->do_ts)
depayload->priv->discont = FALSE;
}
- return GST_BUFFER_LIST_SKIP_GROUP;
+ return TRUE;
}
static GstFlowReturn
HeaderData data;
data.depayload = filter;
- data.caps = GST_PAD_CAPS (filter->srcpad);
data.rtptime = rtptime;
data.do_ts = do_ts;
data.bclass = GST_BASE_RTP_DEPAYLOAD_GET_CLASS (filter);
gst_buffer_list_foreach (*blist, (GstBufferListFunc) set_headers, &data);
} else {
GstBuffer **buf = obj;
- set_headers (buf, 0, 0, &data);
+ set_headers (buf, 0, &data);
}
/* if this is the first buffer send a NEWSEGMENT */
filter = GST_BASE_RTP_DEPAYLOAD (object);
switch (prop_id) {
- case PROP_QUEUE_DELAY:
- filter->queue_delay = g_value_get_uint (value);
- break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
filter = GST_BASE_RTP_DEPAYLOAD (object);
switch (prop_id) {
- case PROP_QUEUE_DELAY:
- g_value_set_uint (value, filter->queue_delay);
- break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
/* GStreamer
- * Copyright (C) <2005> Philippe Khalaf <burger@speedy.org>
+ * Copyright (C) <2005> Philippe Khalaf <burger@speedy.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
#define GST_BASE_RTP_DEPAYLOAD_SINKPAD(depayload) (GST_BASE_RTP_DEPAYLOAD (depayload)->sinkpad)
#define GST_BASE_RTP_DEPAYLOAD_SRCPAD(depayload) (GST_BASE_RTP_DEPAYLOAD (depayload)->srcpad)
-#ifndef GST_DISABLE_DEPRECATED
-/* this was presumably never meant to be public API, or should at least
- * have been prefixed if it was. Don't use. (FIXME: remove in 0.11) */
-#define QUEUE_LOCK_INIT(base) (g_static_rec_mutex_init(&base->queuelock))
-#define QUEUE_LOCK_FREE(base) (g_static_rec_mutex_free(&base->queuelock))
-#define QUEUE_LOCK(base) (g_static_rec_mutex_lock(&base->queuelock))
-#define QUEUE_UNLOCK(base) (g_static_rec_mutex_unlock(&base->queuelock))
-#endif
-
typedef struct _GstBaseRTPDepayload GstBaseRTPDepayload;
typedef struct _GstBaseRTPDepayloadClass GstBaseRTPDepayloadClass;
typedef struct _GstBaseRTPDepayloadPrivate GstBaseRTPDepayloadPrivate;
GstPad *sinkpad, *srcpad;
-#ifndef GST_REMOVE_DEPRECATED
- /* lock to protect the queue, deprecated */
- GStaticRecMutex queuelock;
-
- /* deprecated */
- gboolean thread_running;
- /* the releaser thread, deprecated */
- GThread *thread;
-#endif
-
/* this attribute must be set by the child */
guint clock_rate;
-#ifndef GST_REMOVE_DEPRECATED
- /* this value can be modified by the child if needed, deprecated */
- guint queue_delay;
-#endif
-
- /* we will queue up to RTP_QUEUEDELAY ms of packets,
- * reordering them if necessary
- * dropping any packets that are more than
- * RTP_QUEUEDELAY ms late, deprecated */
- GQueue *queue;
-
GstSegment segment;
gboolean need_newsegment;
gpointer _gst_reserved[GST_PADDING-1];
};
+ /**
+ * GstBaseRTPDepayloadClass:
+ * @parent_class: the parent class
+ * @set_caps: configure the depayloader
+ * @add_to_queue: (deprecated)
+ * @process: process incoming rtp packets
+ * @set_gst_timestamp: convert from RTP timestamp to GST timestamp
+ * @packet_lost: signal the depayloader about packet loss
+ * @handle_event: custom event handling
+ *
+ * Base class for audio RTP payloader.
+ */
struct _GstBaseRTPDepayloadClass
{
GstElementClass parent_class;
/* virtuals, inform the subclass of the caps. */
gboolean (*set_caps) (GstBaseRTPDepayload *filter, GstCaps *caps);
- /* non-pure function, default implementation in base class
- * this does buffering, reordering and dropping, deprecated */
- GstFlowReturn (*add_to_queue) (GstBaseRTPDepayload *filter, GstBuffer *in);
-
/* pure virtual function, child must use this to process incoming
* rtp packets. If the child returns a buffer without a valid timestamp,
* the timestamp of @in will be applied to the result buffer and the
* SECTION:gstbasertppayload
* @short_description: Base class for RTP payloader
*
- * <refsect2>
- * <para>
* Provides a base class for RTP payloaders
- * </para>
- * </refsect2>
*/
#ifdef HAVE_CONFIG_H
static void gst_basertppayload_finalize (GObject * object);
static gboolean gst_basertppayload_sink_setcaps (GstPad * pad, GstCaps * caps);
-static GstCaps *gst_basertppayload_sink_getcaps (GstPad * pad);
+static GstCaps *gst_basertppayload_sink_getcaps (GstPad * pad,
+ GstCaps * filter);
static gboolean gst_basertppayload_event (GstPad * pad, GstEvent * event);
static GstFlowReturn gst_basertppayload_chain (GstPad * pad,
GstBuffer * buffer);
{
static GType basertppayload_type = 0;
- if (!basertppayload_type) {
+ if (g_once_init_enter ((gsize *) & basertppayload_type)) {
static const GTypeInfo basertppayload_info = {
sizeof (GstBaseRTPPayloadClass),
(GBaseInitFunc) gst_basertppayload_base_init,
(GInstanceInitFunc) gst_basertppayload_init,
};
- basertppayload_type =
+ g_once_init_leave ((gsize *) & basertppayload_type,
g_type_register_static (GST_TYPE_ELEMENT, "GstBaseRTPPayload",
- &basertppayload_info, G_TYPE_FLAG_ABSTRACT);
+ &basertppayload_info, G_TYPE_FLAG_ABSTRACT));
}
return basertppayload_type;
}
}
static GstCaps *
-gst_basertppayload_sink_getcaps (GstPad * pad)
+gst_basertppayload_sink_getcaps (GstPad * pad, GstCaps * filter)
{
GstBaseRTPPayload *basertppayload;
GstBaseRTPPayloadClass *basertppayload_class;
basertppayload_class = GST_BASE_RTP_PAYLOAD_GET_CLASS (basertppayload);
if (basertppayload_class->get_caps)
- caps = basertppayload_class->get_caps (basertppayload, pad);
+ caps = basertppayload_class->get_caps (basertppayload, pad, filter);
if (!caps) {
caps = GST_PAD_TEMPLATE_CAPS (GST_PAD_PAD_TEMPLATE (pad));
"using pad template %p with caps %p %" GST_PTR_FORMAT,
GST_PAD_PAD_TEMPLATE (pad), caps, caps);
- caps = gst_caps_ref (caps);
+ if (filter)
+ caps = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ else
+ caps = gst_caps_ref (caps);
}
gst_object_unref (basertppayload);
res = gst_pad_event_default (pad, event);
gst_segment_init (&basertppayload->segment, GST_FORMAT_UNDEFINED);
break;
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- gboolean update;
- gdouble rate, arate;
- GstFormat fmt;
- gint64 start, stop, position;
GstSegment *segment;
segment = &basertppayload->segment;
-
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &fmt,
- &start, &stop, &position);
- gst_segment_set_newsegment_full (segment, update, rate, arate, fmt, start,
- stop, position);
+ gst_event_copy_segment (event, segment);
GST_DEBUG_OBJECT (basertppayload,
- "configured NEWSEGMENT update %d, rate %lf, applied rate %lf, "
- "format %d, "
- "%" G_GINT64_FORMAT " -- %" G_GINT64_FORMAT ", time %"
- G_GINT64_FORMAT ", accum %" G_GINT64_FORMAT, update, rate, arate,
- segment->format, segment->start, segment->stop, segment->time,
- segment->accum);
+ "configured SEGMENT %" GST_SEGMENT_FORMAT, segment);
/* fallthrough */
}
default:
payload->abidata.ABI.ptime = 0;
/* the peer caps can override some of the defaults */
- peercaps = gst_pad_peer_get_caps (payload->srcpad);
+ peercaps = gst_pad_peer_get_caps (payload->srcpad, srccaps);
if (peercaps == NULL) {
/* no peer caps, just add the other properties */
gst_caps_set_simple (srccaps,
gint pt;
guint max_ptime, ptime;
- /* peer provides caps we can use to fixate, intersect. This always returns a
- * writable caps. */
- temp = gst_caps_intersect (srccaps, peercaps);
+ /* peer provides caps we can use to fixate. They are already intersected
+ * with our srccaps, just make them writable */
+ temp = gst_caps_make_writable (peercaps);
gst_caps_unref (srccaps);
- gst_caps_unref (peercaps);
if (gst_caps_is_empty (temp)) {
gst_caps_unref (temp);
guint32 ssrc;
guint16 seqnum;
guint8 pt;
- GstCaps *caps;
GstClockTime timestamp;
guint64 offset;
guint32 rtptime;
} HeaderData;
-static GstBufferListItem
-find_timestamp (GstBuffer ** buffer, guint group, guint idx, HeaderData * data)
+static gboolean
+find_timestamp (GstBuffer ** buffer, guint idx, HeaderData * data)
{
data->timestamp = GST_BUFFER_TIMESTAMP (*buffer);
data->offset = GST_BUFFER_OFFSET (*buffer);
/* stop when we find a timestamp. We take whatever offset is associated with
* the timestamp (if any) to do perfect timestamps when we need to. */
if (data->timestamp != -1)
- return GST_BUFFER_LIST_END;
+ return FALSE;
else
- return GST_BUFFER_LIST_CONTINUE;
+ return TRUE;
}
-static GstBufferListItem
+static gboolean
set_headers (GstBuffer ** buffer, guint group, guint idx, HeaderData * data)
{
- gst_rtp_buffer_set_ssrc (*buffer, data->ssrc);
- gst_rtp_buffer_set_payload_type (*buffer, data->pt);
- gst_rtp_buffer_set_seq (*buffer, data->seqnum);
- gst_rtp_buffer_set_timestamp (*buffer, data->rtptime);
- gst_buffer_set_caps (*buffer, data->caps);
+ GstRTPBuffer rtp;
+
+ gst_rtp_buffer_map (*buffer, GST_MAP_WRITE, &rtp);
+ gst_rtp_buffer_set_ssrc (&rtp, data->ssrc);
+ gst_rtp_buffer_set_payload_type (&rtp, data->pt);
+ gst_rtp_buffer_set_seq (&rtp, data->seqnum);
+ gst_rtp_buffer_set_timestamp (&rtp, data->rtptime);
+ gst_rtp_buffer_unmap (&rtp);
+
/* increment the seqnum for each buffer */
data->seqnum++;
- return GST_BUFFER_LIST_SKIP_GROUP;
+ return TRUE;
}
/* Updates the SSRC, payload type, seqnum and timestamp of the RTP buffer
data.seqnum = payload->seqnum;
data.ssrc = payload->current_ssrc;
data.pt = payload->pt;
- data.caps = GST_PAD_CAPS (payload->srcpad);
/* find the first buffer with a timestamp */
if (is_list) {
GST_LOG_OBJECT (payload,
"Preparing to push packet with size %d, seq=%d, rtptime=%u, timestamp %"
GST_TIME_FORMAT, (is_list) ? -1 :
- GST_BUFFER_SIZE (GST_BUFFER (obj)), payload->seqnum, data.rtptime,
+ gst_buffer_get_size (GST_BUFFER (obj)), payload->seqnum, data.rtptime,
GST_TIME_ARGS (data.timestamp));
if (g_atomic_int_compare_and_exchange (&payload->
} abidata;
};
+ /**
+ * GstBaseRTPPayloadClass:
+ * @parent_class: the parent class
+ * @set_caps: configure the payloader
+ * @handle_buffer: process data
+ * @handle_event: custom event handling
+ * @get_caps: get desired caps
+ *
+ * Base class for audio RTP payloader.
+ */
struct _GstBaseRTPPayloadClass
{
GstElementClass parent_class;
GstFlowReturn (*handle_buffer) (GstBaseRTPPayload *payload,
GstBuffer *buffer);
gboolean (*handle_event) (GstPad * pad, GstEvent * event);
- GstCaps * (*get_caps) (GstBaseRTPPayload *payload, GstPad * pad);
+ GstCaps * (*get_caps) (GstBaseRTPPayload *payload, GstPad * pad, GstCaps * filter);
/*< private >*/
gpointer _gst_reserved[GST_PADDING-2];
switch (xmptag->type) {
case GstXmpTagTypeSeq:
return "rdf:Seq";
- default:
- g_assert_not_reached ();
case GstXmpTagTypeBag:
return "rdf:Bag";
+ default:
+ g_assert_not_reached ();
}
}
return;
}
- if (value < 0 || value > 100) {
+ if (value > 100) {
GST_WARNING ("Unsupported Rating tag %u (should be from 0 to 100), "
"ignoring", value);
return;
g_return_if_fail (tag != NULL);
- if (xmptag && xmptag->deserialize) {
+ if (xmptag->deserialize) {
xmptag->deserialize (xmptag, list, tag, xmptag->tag_name, v, pending_tags);
return;
}
* Since: 0.10.29
*/
GstTagList *
-gst_tag_list_from_xmp_buffer (const GstBuffer * buffer)
+gst_tag_list_from_xmp_buffer (GstBuffer * buffer)
{
GstTagList *list = NULL;
- const gchar *xps, *xp1, *xp2, *xpe, *ns, *ne;
- guint len, max_ft_len;
+ gchar *xps, *xp1, *xp2, *xpe, *ns, *ne;
+ gsize len, max_ft_len;
gboolean in_tag;
gchar *part, *pp;
guint i;
XmpTag *context_tag = NULL;
GstXmpNamespaceMap ns_map[] = {
- {"dc", NULL},
- {"exif", NULL},
- {"tiff", NULL},
- {"xap", NULL},
- {"photoshop", NULL},
- {"Iptc4xmpCore", NULL},
- {"Iptc4xmpExt", NULL},
+ {"dc", NULL}
+ ,
+ {"exif", NULL}
+ ,
+ {"tiff", NULL}
+ ,
+ {"xap", NULL}
+ ,
+ {"photoshop", NULL}
+ ,
+ {"Iptc4xmpCore", NULL}
+ ,
+ {"Iptc4xmpExt", NULL}
+ ,
{NULL, NULL}
};
xmp_tags_initialize ();
g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL);
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) > 0, NULL);
GST_LOG ("Starting xmp parsing");
- xps = (const gchar *) GST_BUFFER_DATA (buffer);
- len = GST_BUFFER_SIZE (buffer);
+ xps = gst_buffer_map (buffer, &len, NULL, GST_MAP_READ);
+ g_return_val_if_fail (len > 0, NULL);
+
xpe = &xps[len + 1];
/* check header and footer */
}
g_free (part);
+ gst_buffer_unmap (buffer, xps, len);
+
return list;
/* Errors */
XmpSerializationData serialization_data;
GString *data;
guint i;
+ gsize bsize;
+ gpointer bdata;
serialization_data.data = g_string_sized_new (4096);
serialization_data.schemas = schemas;
g_string_append_printf (data, "<?xpacket end=\"%c\"?>\n",
(read_only ? 'r' : 'w'));
+ bsize = data->len + 1;
+ bdata = g_string_free (data, FALSE);
+
buffer = gst_buffer_new ();
- GST_BUFFER_SIZE (buffer) = data->len + 1;
- GST_BUFFER_DATA (buffer) = (guint8 *) g_string_free (data, FALSE);
- GST_BUFFER_MALLOCDATA (buffer) = GST_BUFFER_DATA (buffer);
+ gst_buffer_take_memory (buffer,
+ gst_memory_new_wrapped (0, bdata, g_free, bsize, 0, bsize));
return buffer;
}
/**
* gst_video_convert_frame:
* @buf: a #GstBuffer
+ * @from_caps: the #GstCaps to convert from
* @to_caps: the #GstCaps to convert to
* @timeout: the maximum amount of time allowed for the processing.
- * @err: pointer to a #GError. Can be %NULL.
+ * @error: pointer to a #GError. Can be %NULL.
*
* Converts a raw video buffer into the specified output caps.
*
*
*/
GstBuffer *
-gst_video_convert_frame (GstBuffer * buf, const GstCaps * to_caps,
- GstClockTime timeout, GError ** error)
+gst_video_convert_frame (GstBuffer * buf, GstCaps * from_caps,
+ const GstCaps * to_caps, GstClockTime timeout, GError ** err)
{
GstMessage *msg;
GstBuffer *result = NULL;
- GError *error = NULL;
+ GError *err = NULL;
GstBus *bus;
- GstCaps *from_caps, *to_caps_copy = NULL;
+ GstCaps *to_caps_copy = NULL;
GstFlowReturn ret;
GstElement *pipeline, *src, *sink;
guint i, n;
g_return_val_if_fail (buf != NULL, NULL);
g_return_val_if_fail (to_caps != NULL, NULL);
- g_return_val_if_fail (GST_BUFFER_CAPS (buf) != NULL, NULL);
-
- from_caps = GST_BUFFER_CAPS (buf);
+ g_return_val_if_fail (from_caps != NULL, NULL);
to_caps_copy = gst_caps_new_empty ();
n = gst_caps_get_size (to_caps);
}
pipeline =
- build_convert_frame_pipeline (&src, &sink, from_caps, to_caps_copy,
- &error);
+ build_convert_frame_pipeline (&src, &sink, from_caps, to_caps_copy, &err);
if (!pipeline)
goto no_pipeline;
/* feed buffer in appsrc */
GST_DEBUG ("feeding buffer %p, size %u, caps %" GST_PTR_FORMAT,
- buf, GST_BUFFER_SIZE (buf), from_caps);
+ buf, gst_buffer_get_size (buf), from_caps);
g_signal_emit_by_name (src, "push-buffer", buf, &ret);
/* now see what happens. We either got an error somewhere or the pipeline
case GST_MESSAGE_ERROR:{
gchar *dbg = NULL;
- gst_message_parse_error (msg, &error, &dbg);
- if (error) {
- GST_ERROR ("Could not convert video frame: %s", error->message);
- GST_DEBUG ("%s [debug: %s]", error->message, GST_STR_NULL (dbg));
- if (err)
- *err = error;
+ gst_message_parse_error (msg, &err, &dbg);
+ if (err) {
+ GST_ERROR ("Could not convert video frame: %s", err->message);
+ GST_DEBUG ("%s [debug: %s]", err->message, GST_STR_NULL (dbg));
+ if (error)
+ *error = err;
else
- g_error_free (error);
+ g_error_free (err);
}
g_free (dbg);
break;
gst_message_unref (msg);
} else {
GST_ERROR ("Could not convert video frame: timeout during conversion");
- if (err)
- *err = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_FAILED,
+ if (error)
+ *error = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_FAILED,
"Could not convert video frame: timeout during conversion");
}
{
gst_caps_unref (to_caps_copy);
- if (err)
- *err = error;
+ if (error)
+ *error = err;
else
- g_error_free (error);
+ g_error_free (err);
return NULL;
}
* @to_caps: the #GstCaps to convert to
* @timeout: the maximum amount of time allowed for the processing.
* @callback: %GstVideoConvertFrameCallback that will be called after conversion.
+ * @user_data: extra data that will be passed to the @callback
* @destroy_notify: %GDestroyNotify to be called after @user_data is not needed anymore
*
* Converts a raw video buffer into the specified output caps.
*
*/
void
-gst_video_convert_frame_async (GstBuffer * buf, const GstCaps * to_caps,
- GstClockTime timeout, GstVideoConvertFrameCallback callback,
- gpointer user_data, GDestroyNotify destroy_notify)
+gst_video_convert_frame_async (GstBuffer * buf, GstCaps * from_caps,
+ const GstCaps * to_caps, GstClockTime timeout,
+ GstVideoConvertFrameCallback callback, gpointer user_data,
+ GDestroyNotify destroy_notify)
{
GMainContext *context = NULL;
GError *error = NULL;
GstBus *bus;
- GstCaps *from_caps, *to_caps_copy = NULL;
+ GstCaps *to_caps_copy = NULL;
GstElement *pipeline, *src, *sink;
guint i, n;
GSource *source;
g_return_if_fail (buf != NULL);
g_return_if_fail (to_caps != NULL);
- g_return_if_fail (GST_BUFFER_CAPS (buf) != NULL);
+ g_return_if_fail (from_caps != NULL);
g_return_if_fail (callback != NULL);
context = g_main_context_get_thread_default ();
if (!context)
context = g_main_context_default ();
- from_caps = GST_BUFFER_CAPS (buf);
-
to_caps_copy = gst_caps_new_empty ();
n = gst_caps_get_size (to_caps);
for (i = 0; i < n; i++) {
*
* <refsect2>
* <para>
- * This library contains some helper functions and includes the
+ * This library contains some helper functions and includes the
* videosink and videofilter base classes.
* </para>
* </refsect2>
*
* A convenience function to retrieve a GValue holding the framerate
* from the caps on a pad.
- *
+ *
* The pad needs to have negotiated caps containing a framerate property.
*
* Returns: NULL if the pad has no configured caps or the configured caps
{
const GValue *fps;
gchar *fps_string;
-
- const GstCaps *caps = NULL;
+ GstCaps *caps = NULL;
GstStructure *structure;
/* get pad caps */
- caps = GST_PAD_CAPS (pad);
- if (caps == NULL) {
+ caps = gst_pad_get_current_caps (pad);
+ if (caps == NULL)
+ goto no_caps;
+
+ structure = gst_caps_get_structure (caps, 0);
+ if ((fps = gst_structure_get_value (structure, "framerate")) == NULL)
+ goto no_framerate;
+
+ if (!GST_VALUE_HOLDS_FRACTION (fps))
+ goto no_fraction;
+
+ fps_string = gst_value_serialize (fps);
+ GST_DEBUG ("Framerate request on pad %s:%s: %s",
+ GST_DEBUG_PAD_NAME (pad), fps_string);
+ g_free (fps_string);
+
+ gst_caps_unref (caps);
+
+ return fps;
+
+ /* ERRORS */
+no_caps:
+ {
g_warning ("gstvideo: failed to get caps of pad %s:%s",
GST_DEBUG_PAD_NAME (pad));
return NULL;
}
-
- structure = gst_caps_get_structure (caps, 0);
- if ((fps = gst_structure_get_value (structure, "framerate")) == NULL) {
+no_framerate:
+ {
g_warning ("gstvideo: failed to get framerate property of pad %s:%s",
GST_DEBUG_PAD_NAME (pad));
+ gst_caps_unref (caps);
return NULL;
}
- if (!GST_VALUE_HOLDS_FRACTION (fps)) {
+no_fraction:
+ {
g_warning
("gstvideo: framerate property of pad %s:%s is not of type Fraction",
GST_DEBUG_PAD_NAME (pad));
+ gst_caps_unref (caps);
return NULL;
}
-
- fps_string = gst_value_serialize (fps);
- GST_DEBUG ("Framerate request on pad %s:%s: %s",
- GST_DEBUG_PAD_NAME (pad), fps_string);
- g_free (fps_string);
-
- return fps;
}
/**
*
* Inspect the caps of the provided pad and retrieve the width and height of
* the video frames it is configured for.
- *
+ *
* The pad needs to have negotiated caps containing width and height properties.
*
* Returns: TRUE if the width and height could be retrieved.
gboolean
gst_video_get_size (GstPad * pad, gint * width, gint * height)
{
- const GstCaps *caps = NULL;
+ GstCaps *caps = NULL;
GstStructure *structure;
gboolean ret;
g_return_val_if_fail (width != NULL, FALSE);
g_return_val_if_fail (height != NULL, FALSE);
- caps = GST_PAD_CAPS (pad);
-
- if (caps == NULL) {
- g_warning ("gstvideo: failed to get caps of pad %s:%s",
- GST_DEBUG_PAD_NAME (pad));
- return FALSE;
- }
+ caps = gst_pad_get_current_caps (pad);
+ if (caps == NULL)
+ goto no_caps;
structure = gst_caps_get_structure (caps, 0);
ret = gst_structure_get_int (structure, "width", width);
ret &= gst_structure_get_int (structure, "height", height);
+ gst_caps_unref (caps);
- if (!ret) {
- g_warning ("gstvideo: failed to get size properties on pad %s:%s",
- GST_DEBUG_PAD_NAME (pad));
- return FALSE;
- }
+ if (!ret)
+ goto no_size;
GST_DEBUG ("size request on pad %s:%s: %dx%d",
GST_DEBUG_PAD_NAME (pad), width ? *width : -1, height ? *height : -1);
return TRUE;
+
+ /* ERROR */
+no_caps:
+ {
+ g_warning ("gstvideo: failed to get caps of pad %s:%s",
+ GST_DEBUG_PAD_NAME (pad));
+ return FALSE;
+ }
+no_size:
+ {
+ g_warning ("gstvideo: failed to get size properties on pad %s:%s",
+ GST_DEBUG_PAD_NAME (pad));
+ return FALSE;
+ }
}
/**
* @display_par_n: Numerator of the pixel aspect ratio of the display device
* @display_par_d: Denominator of the pixel aspect ratio of the display device
*
- * Given the Pixel Aspect Ratio and size of an input video frame, and the
- * pixel aspect ratio of the intended display device, calculates the actual
+ * Given the Pixel Aspect Ratio and size of an input video frame, and the
+ * pixel aspect ratio of the intended display device, calculates the actual
* display ratio the video will be rendered with.
*
- * Returns: A boolean indicating success and a calculated Display Ratio in the
- * dar_n and dar_d parameters.
- * The return value is FALSE in the case of integer overflow or other error.
+ * Returns: A boolean indicating success and a calculated Display Ratio in the
+ * dar_n and dar_d parameters.
+ * The return value is FALSE in the case of integer overflow or other error.
*
* Since: 0.10.7
*/
* halfway-sited vertically), "jpeg" for JPEG and Theora style
* chroma siting (halfway-sited both horizontally and vertically).
* Other chroma site values are possible, but uncommon.
- *
+ *
* When no chroma site is specified in the caps, it should be assumed
* to be "mpeg2".
*
blue_mask = GST_VIDEO_COMP1_MASK_15_INT;
break;
default:
- return NULL;
+ g_assert_not_reached ();
}
} else if (bpp != 8) {
- return NULL;
+ g_assert_not_reached ();
}
caps = gst_caps_new_simple ("video/x-raw-rgb",
* @blue_mask: blue bit mask
*
* Converts red, green, blue bit masks into the corresponding
- * #GstVideoFormat.
+ * #GstVideoFormat.
*
* Since: 0.10.16
*
/**
* gst_video_format_has_alpha:
* @format: a #GstVideoFormat
- *
+ *
* Returns TRUE or FALSE depending on if the video format provides an
* alpha channel.
*
/**
* gst_video_format_get_component_depth:
* @format: a #GstVideoFormat
- *
+ * @component: the video component (e.g. 0 for 'R' in RGB)
+ *
* Returns the number of bits used to encode an individual pixel of
- * a given component. Typically this is 8, although higher and lower
+ * a given @component. Typically this is 8, although higher and lower
* values are possible for some formats.
*
* Since: 0.10.33
GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2) *
(GST_ROUND_UP_2 (height) / 2);
}
- return 0;
+ break;
case GST_VIDEO_FORMAT_YV12: /* same as I420, but components 1+2 swapped */
if (component == 0)
return 0;
GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2) *
(GST_ROUND_UP_2 (height) / 2);
}
- return 0;
+ break;
case GST_VIDEO_FORMAT_YUY2:
if (component == 0)
return 0;
return 1;
if (component == 2)
return 3;
- return 0;
+ break;
case GST_VIDEO_FORMAT_YVYU:
if (component == 0)
return 0;
return 3;
if (component == 2)
return 1;
- return 0;
+ break;
case GST_VIDEO_FORMAT_UYVY:
if (component == 0)
return 1;
return 0;
if (component == 2)
return 2;
- return 0;
+ break;
case GST_VIDEO_FORMAT_AYUV:
if (component == 0)
return 1;
return 3;
if (component == 3)
return 0;
- return 0;
+ break;
case GST_VIDEO_FORMAT_RGBx:
case GST_VIDEO_FORMAT_RGBA:
if (component == 0)
return 2;
if (component == 3)
return 3;
- return 0;
+ break;
case GST_VIDEO_FORMAT_BGRx:
case GST_VIDEO_FORMAT_BGRA:
if (component == 0)
return 0;
if (component == 3)
return 3;
- return 0;
+ break;
case GST_VIDEO_FORMAT_xRGB:
case GST_VIDEO_FORMAT_ARGB:
if (component == 0)
return 3;
if (component == 3)
return 0;
- return 0;
+ break;
case GST_VIDEO_FORMAT_xBGR:
case GST_VIDEO_FORMAT_ABGR:
if (component == 0)
return 1;
if (component == 3)
return 0;
- return 0;
+ break;
case GST_VIDEO_FORMAT_RGB:
case GST_VIDEO_FORMAT_v308:
if (component == 0)
return 1;
if (component == 2)
return 2;
- return 0;
+ break;
case GST_VIDEO_FORMAT_BGR:
if (component == 0)
return 2;
return 1;
if (component == 2)
return 0;
- return 0;
+ break;
case GST_VIDEO_FORMAT_Y41B:
if (component == 0)
return 0;
if (component == 2)
return (GST_ROUND_UP_4 (width) +
(GST_ROUND_UP_16 (width) / 4)) * height;
- return 0;
+ break;
case GST_VIDEO_FORMAT_Y42B:
if (component == 0)
return 0;
return GST_ROUND_UP_4 (width) * height;
if (component == 2)
return (GST_ROUND_UP_4 (width) + (GST_ROUND_UP_8 (width) / 2)) * height;
- return 0;
+ break;
case GST_VIDEO_FORMAT_Y444:
return GST_ROUND_UP_4 (width) * height * component;
case GST_VIDEO_FORMAT_v210:
return 2;
if (component == 2)
return 6;
- return 0;
+ break;
case GST_VIDEO_FORMAT_NV12:
if (component == 0)
return 0;
return GST_ROUND_UP_4 (width) * GST_ROUND_UP_2 (height);
if (component == 2)
return GST_ROUND_UP_4 (width) * GST_ROUND_UP_2 (height) + 1;
+ break;
case GST_VIDEO_FORMAT_NV21:
if (component == 0)
return 0;
return GST_ROUND_UP_4 (width) * GST_ROUND_UP_2 (height) + 1;
if (component == 2)
return GST_ROUND_UP_4 (width) * GST_ROUND_UP_2 (height);
+ break;
case GST_VIDEO_FORMAT_GRAY8:
case GST_VIDEO_FORMAT_GRAY16_BE:
case GST_VIDEO_FORMAT_GRAY16_LE:
2 * GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2) *
(GST_ROUND_UP_2 (height) / 2);
}
+ break;
case GST_VIDEO_FORMAT_RGB8_PALETTED:
return 0;
case GST_VIDEO_FORMAT_YUV9:
GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) / 4) *
(GST_ROUND_UP_4 (height) / 4);
}
- return 0;
+ break;
case GST_VIDEO_FORMAT_YVU9:
if (component == 0)
return 0;
}
if (component == 2)
return GST_ROUND_UP_4 (width) * height;
- return 0;
+ break;
case GST_VIDEO_FORMAT_IYU1:
if (component == 0)
return 1;
return 0;
if (component == 2)
return 4;
+ break;
case GST_VIDEO_FORMAT_ARGB64:
case GST_VIDEO_FORMAT_AYUV64:
if (component == 0)
return 6;
if (component == 3)
return 0;
- return 0;
+ break;
default:
- return 0;
+ break;
}
+ GST_WARNING ("unhandled format %d or component %d", format, component);
+ return 0;
}
/**
GST_STATIC_CAPS ("text/plain; text/x-pango-markup")
);
-static void gst_sub_parse_base_init (GstSubParseClass * klass);
-static void gst_sub_parse_class_init (GstSubParseClass * klass);
-static void gst_sub_parse_init (GstSubParse * subparse);
static gboolean gst_sub_parse_src_event (GstPad * pad, GstEvent * event);
static gboolean gst_sub_parse_src_query (GstPad * pad, GstQuery * query);
static GstFlowReturn gst_sub_parse_chain (GstPad * sinkpad, GstBuffer * buf);
-static GstElementClass *parent_class = NULL;
-
-GType
-gst_sub_parse_get_type (void)
-{
- static GType sub_parse_type = 0;
-
- if (!sub_parse_type) {
- static const GTypeInfo sub_parse_info = {
- sizeof (GstSubParseClass),
- (GBaseInitFunc) gst_sub_parse_base_init,
- NULL,
- (GClassInitFunc) gst_sub_parse_class_init,
- NULL,
- NULL,
- sizeof (GstSubParse),
- 0,
- (GInstanceInitFunc) gst_sub_parse_init,
- };
-
- sub_parse_type = g_type_register_static (GST_TYPE_ELEMENT,
- "GstSubParse", &sub_parse_info, 0);
- }
-
- return sub_parse_type;
-}
-
-static void
-gst_sub_parse_base_init (GstSubParseClass * klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_templ));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_templ));
- gst_element_class_set_details_simple (element_class,
- "Subtitle parser", "Codec/Parser/Subtitle",
- "Parses subtitle (.sub) files into text streams",
- "Gustavo J. A. M. Carneiro <gjc@inescporto.pt>, "
- "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
-}
+#define gst_sub_parse_parent_class parent_class
+G_DEFINE_TYPE (GstSubParse, gst_sub_parse, GST_TYPE_ELEMENT);
static void
gst_sub_parse_dispose (GObject * object)
GObjectClass *object_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- parent_class = g_type_class_peek_parent (klass);
-
object_class->dispose = gst_sub_parse_dispose;
object_class->set_property = gst_sub_parse_set_property;
object_class->get_property = gst_sub_parse_get_property;
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_templ));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_templ));
+ gst_element_class_set_details_simple (element_class,
+ "Subtitle parser", "Codec/Parser/Subtitle",
+ "Parses subtitle (.sub) files into text streams",
+ "Gustavo J. A. M. Carneiro <gjc@inescporto.pt>, "
+ "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
+
element_class->change_state = gst_sub_parse_change_state;
g_object_class_install_property (object_class, PROP_ENCODING,
ret = gst_pad_peer_query (self->sinkpad, query);
} else {
ret = TRUE;
- gst_query_set_position (query, GST_FORMAT_TIME,
- self->segment.last_stop);
+ gst_query_set_position (query, GST_FORMAT_TIME, self->segment.position);
}
}
case GST_QUERY_SEEKING:
case GST_EVENT_SEEK:
{
GstFormat format;
+ GstSeekFlags flags;
GstSeekType start_type, stop_type;
gint64 start, stop;
gdouble rate;
gboolean update;
- gst_event_parse_seek (event, &rate, &format, &self->segment_flags,
+ gst_event_parse_seek (event, &rate, &format, &flags,
&start_type, &start, &stop_type, &stop);
if (format != GST_FORMAT_TIME) {
/* Convert that seek to a seeking in bytes at position 0,
FIXME: could use an index */
ret = gst_pad_push_event (self->sinkpad,
- gst_event_new_seek (rate, GST_FORMAT_BYTES, self->segment_flags,
+ gst_event_new_seek (rate, GST_FORMAT_BYTES, flags,
GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_NONE, 0));
if (ret) {
/* Apply the seek to our segment */
- gst_segment_set_seek (&self->segment, rate, format, self->segment_flags,
+ gst_segment_do_seek (&self->segment, rate, format, flags,
start_type, start, stop_type, stop, &update);
GST_DEBUG_OBJECT (self, "segment after seek: %" GST_SEGMENT_FORMAT,
const gchar *line_split;
gchar *line_chunk;
guint start_frame, end_frame;
- gint64 clip_start = 0, clip_stop = 0;
+ guint64 clip_start = 0, clip_stop = 0;
gboolean in_seg = FALSE;
GString *markup;
gchar *ret;
case 2:
{
/* No need to parse that text if it's out of segment */
- gint64 clip_start = 0, clip_stop = 0;
+ guint64 clip_start = 0, clip_stop = 0;
gboolean in_seg = FALSE;
/* Check our segment start/stop */
case 1:
{
/* No need to parse that text if it's out of segment */
- gint64 clip_start = 0, clip_stop = 0;
+ guint64 clip_start = 0, clip_stop = 0;
gboolean in_seg = FALSE;
/* Check our segment start/stop */
return NULL;
case 1:
{ /* No need to parse that text if it's out of segment */
- gint64 clip_start = 0, clip_stop = 0;
+ guint64 clip_start = 0, clip_stop = 0;
gboolean in_seg = FALSE;
/* Check our segment start/stop */
return NULL;
case 1:
{
- gint64 clip_start = 0, clip_stop = 0;
+ guint64 clip_start = 0, clip_stop = 0;
gboolean in_seg;
gchar *ret;
switch (regtype) {
case GST_SUB_PARSE_REGEX_MDVDSUB:
result =
- (gpointer) g_regex_new ("^\\{[0-9]+\\}\\{[0-9]+\\}", 0, 0, &gerr);
+ (gpointer) g_regex_new ("^\\{[0-9]+\\}\\{[0-9]+\\}",
+ G_REGEX_RAW | G_REGEX_OPTIMIZE, 0, &gerr);
if (result == NULL) {
g_warning ("Compilation of mdvd regex failed: %s", gerr->message);
g_error_free (gerr);
result = (gpointer) g_regex_new ("^([ 0-9]){0,3}[0-9]\\s*(\x0d)?\x0a"
"[ 0-9][0-9]:[ 0-9][0-9]:[ 0-9][0-9][,.][ 0-9]{0,2}[0-9]"
" +--> +([ 0-9])?[0-9]:[ 0-9][0-9]:[ 0-9][0-9][,.][ 0-9]{0,2}[0-9]",
- 0, 0, &gerr);
+ G_REGEX_RAW | G_REGEX_OPTIMIZE, 0, &gerr);
if (result == NULL) {
g_warning ("Compilation of subrip regex failed: %s", gerr->message);
g_error_free (gerr);
break;
case GST_SUB_PARSE_REGEX_DKS:
result = (gpointer) g_regex_new ("^\\[[0-9]+:[0-9]+:[0-9]+\\].*",
- 0, 0, &gerr);
+ G_REGEX_RAW | G_REGEX_OPTIMIZE, 0, &gerr);
if (result == NULL) {
g_warning ("Compilation of dks regex failed: %s", gerr->message);
g_error_free (gerr);
gboolean discont;
gsize consumed;
gchar *input = NULL;
+ const guint8 *data;
+ gsize avail;
discont = GST_BUFFER_IS_DISCONT (buf);
* subtitles which are discontinuous by nature. */
}
- self->offset = GST_BUFFER_OFFSET (buf) + GST_BUFFER_SIZE (buf);
+ self->offset = GST_BUFFER_OFFSET (buf) + gst_buffer_get_size (buf);
self->next_offset = self->offset;
gst_adapter_push (self->adapter, buf);
- input =
- convert_encoding (self, (const gchar *) gst_adapter_peek (self->adapter,
- gst_adapter_available (self->adapter)),
- (gsize) gst_adapter_available (self->adapter), &consumed);
+ avail = gst_adapter_available (self->adapter);
+ data = gst_adapter_map (self->adapter, avail),
+ input = convert_encoding (self, (const gchar *) data, avail, &consumed);
if (input && consumed > 0) {
self->textbuf = g_string_append (self->textbuf, input);
- gst_adapter_flush (self->adapter, consumed);
+ gst_adapter_unmap (self->adapter, consumed);
+ } else {
+ gst_adapter_unmap (self->adapter, 0);
}
g_free (input);
{
GstFlowReturn ret = GST_FLOW_OK;
GstCaps *caps = NULL;
- gchar *line, *subtitle;
+ gchar *line, *subtitle, *data;
+ gsize size;
if (self->first_buffer) {
- self->detected_encoding =
- detect_encoding ((gchar *) GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ self->detected_encoding = detect_encoding (data, size);
+ gst_buffer_unmap (buf, data, size);
self->first_buffer = FALSE;
self->state.fps_n = self->fps_n;
self->state.fps_d = self->fps_d;
guint subtitle_len = strlen (subtitle);
/* +1 for terminating NUL character */
- ret = gst_pad_alloc_buffer_and_set_caps (self->srcpad,
- GST_BUFFER_OFFSET_NONE, subtitle_len + 1,
- GST_PAD_CAPS (self->srcpad), &buf);
-
- if (ret == GST_FLOW_OK) {
- /* copy terminating NUL character as well */
- memcpy (GST_BUFFER_DATA (buf), subtitle, subtitle_len + 1);
- GST_BUFFER_SIZE (buf) = subtitle_len;
- GST_BUFFER_TIMESTAMP (buf) = self->state.start_time;
- GST_BUFFER_DURATION (buf) = self->state.duration;
-
- /* in some cases (e.g. tmplayer) we can only determine the duration
- * of a text chunk from the timestamp of the next text chunk; in those
- * cases, we probably want to limit the duration to something
- * reasonable, so we don't end up showing some text for e.g. 40 seconds
- * just because nothing else is being said during that time */
- if (self->state.max_duration > 0 && GST_BUFFER_DURATION_IS_VALID (buf)) {
- if (GST_BUFFER_DURATION (buf) > self->state.max_duration)
- GST_BUFFER_DURATION (buf) = self->state.max_duration;
- }
+ buf = gst_buffer_new_and_alloc (subtitle_len + 1);
+
+ /* copy terminating NUL character as well */
+ gst_buffer_fill (buf, 0, subtitle, subtitle_len + 1);
+ gst_buffer_set_size (buf, subtitle_len);
+
+ GST_BUFFER_TIMESTAMP (buf) = self->state.start_time;
+ GST_BUFFER_DURATION (buf) = self->state.duration;
+
+ /* in some cases (e.g. tmplayer) we can only determine the duration
+ * of a text chunk from the timestamp of the next text chunk; in those
+ * cases, we probably want to limit the duration to something
+ * reasonable, so we don't end up showing some text for e.g. 40 seconds
+ * just because nothing else is being said during that time */
+ if (self->state.max_duration > 0 && GST_BUFFER_DURATION_IS_VALID (buf)) {
+ if (GST_BUFFER_DURATION (buf) > self->state.max_duration)
+ GST_BUFFER_DURATION (buf) = self->state.max_duration;
+ }
- gst_segment_set_last_stop (&self->segment, GST_FORMAT_TIME,
- self->state.start_time);
+ self->segment.position = self->state.start_time;
- GST_DEBUG_OBJECT (self, "Sending text '%s', %" GST_TIME_FORMAT " + %"
- GST_TIME_FORMAT, subtitle, GST_TIME_ARGS (self->state.start_time),
- GST_TIME_ARGS (self->state.duration));
+ GST_DEBUG_OBJECT (self, "Sending text '%s', %" GST_TIME_FORMAT " + %"
+ GST_TIME_FORMAT, subtitle, GST_TIME_ARGS (self->state.start_time),
+ GST_TIME_ARGS (self->state.duration));
- ret = gst_pad_push (self->srcpad, buf);
- }
+ ret = gst_pad_push (self->srcpad, buf);
/* move this forward (the tmplayer parser needs this) */
if (self->state.duration != GST_CLOCK_TIME_NONE)
GST_LOG_OBJECT (self, "pushing newsegment event with %" GST_SEGMENT_FORMAT,
&self->segment);
- gst_pad_push_event (self->srcpad, gst_event_new_new_segment (FALSE,
- self->segment.rate, self->segment.format,
- self->segment.last_stop, self->segment.stop, self->segment.time));
+ gst_pad_push_event (self->srcpad, gst_event_new_segment (&self->segment));
self->need_segment = FALSE;
}
self->parser_type == GST_SUB_PARSE_FORMAT_TMPLAYER ||
self->parser_type == GST_SUB_PARSE_FORMAT_MPL2 ||
self->parser_type == GST_SUB_PARSE_FORMAT_QTTEXT) {
+ gchar term_chars[] = { '\n', '\n', '\0' };
GstBuffer *buf = gst_buffer_new_and_alloc (2 + 1);
GST_DEBUG ("EOS. Pushing remaining text (if any)");
- GST_BUFFER_DATA (buf)[0] = '\n';
- GST_BUFFER_DATA (buf)[1] = '\n';
- GST_BUFFER_DATA (buf)[2] = '\0'; /* play it safe */
- GST_BUFFER_SIZE (buf) = 2;
+ gst_buffer_fill (buf, 0, term_chars, 3);
+ gst_buffer_set_size (buf, 2);
+
GST_BUFFER_OFFSET (buf) = self->offset;
gst_sub_parse_chain (pad, buf);
}
ret = gst_pad_event_default (pad, event);
break;
}
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- GstFormat format;
- gdouble rate;
- gint64 start, stop, time;
- gboolean update;
-
- gst_event_parse_new_segment (event, &update, &rate, &format, &start,
- &stop, &time);
-
- GST_DEBUG_OBJECT (self, "newsegment (%s)", gst_format_get_name (format));
-
- if (format == GST_FORMAT_TIME) {
- gst_segment_set_newsegment (&self->segment, update, rate, format,
- start, stop, time);
- } else {
- /* if not time format, we'll either start with a 0 timestamp anyway or
- * it's following a seek in which case we'll have saved the requested
- * seek segment and don't want to overwrite it (remember that on a seek
- * we always just seek back to the start in BYTES format and just throw
- * away all text that's before the requested position; if the subtitles
- * come from an upstream demuxer, it won't be able to handle our BYTES
- * seek request and instead send us a newsegment from the seek request
- * it received via its video pads instead, so all is fine then too) */
- }
-
+ gst_event_copy_segment (event, &self->segment);
+ GST_DEBUG_OBJECT (self, "newsegment (%s)",
+ gst_format_get_name (self->segment.format));
+
+ /* if not time format, we'll either start with a 0 timestamp anyway or
+ * it's following a seek in which case we'll have saved the requested
+ * seek segment and don't want to overwrite it (remember that on a seek
+ * we always just seek back to the start in BYTES format and just throw
+ * away all text that's before the requested position; if the subtitles
+ * come from an upstream demuxer, it won't be able to handle our BYTES
+ * seek request and instead send us a newsegment from the seek request
+ * it received via its video pads instead, so all is fine then too) */
ret = TRUE;
gst_event_unref (event);
break;
break;
}
- ret = parent_class->change_state (element, transition);
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
if (ret == GST_STATE_CHANGE_FAILURE)
return ret;
}
}
converted_str = gst_convert_to_utf8 (str, 128, enc, &tmp, &err);
- if (converted_str == NULL) {
- GST_DEBUG ("Charset conversion failed: %s", err->message);
- g_error_free (err);
- g_free (str);
- return;
- } else {
+ if (converted_str != NULL) {
g_free (str);
str = converted_str;
}