GSource *source;
guint id;
+ gboolean time_provider;
+ GstNetTimeProvider *nettime;
+
gboolean is_live;
gboolean seekable;
gboolean buffering;
//#define DEFAULT_PROTOCOLS GST_RTSP_LOWER_TRANS_UDP_MCAST
#define DEFAULT_EOS_SHUTDOWN FALSE
#define DEFAULT_BUFFER_SIZE 0x80000
+#define DEFAULT_TIME_PROVIDER FALSE
/* define to dump received RTCP packets */
#undef DUMP_STATS
PROP_EOS_SHUTDOWN,
PROP_BUFFER_SIZE,
PROP_ELEMENT,
+ PROP_TIME_PROVIDER,
PROP_LAST
};
enum
{
SIGNAL_NEW_STREAM,
+ SIGNAL_REMOVED_STREAM,
SIGNAL_PREPARED,
SIGNAL_UNPREPARED,
SIGNAL_NEW_STATE,
GstMessage * message);
static void finish_unprepare (GstRTSPMedia * media);
static gboolean default_unprepare (GstRTSPMedia * media);
+static gboolean
+default_convert_range (GstRTSPMedia * media, GstRTSPTimeRange * range,
+ GstRTSPRangeUnit unit);
static guint gst_rtsp_media_signals[SIGNAL_LAST] = { 0 };
"The GstBin to use for streaming the media", GST_TYPE_ELEMENT,
G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE));
+ g_object_class_install_property (gobject_class, PROP_EOS_SHUTDOWN,
+ g_param_spec_boolean ("time-provider", "Time Provider",
+ "Use a NetTimeProvider for clients",
+ DEFAULT_TIME_PROVIDER, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
gst_rtsp_media_signals[SIGNAL_NEW_STREAM] =
g_signal_new ("new-stream", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
G_STRUCT_OFFSET (GstRTSPMediaClass, new_stream), NULL, NULL,
g_cclosure_marshal_generic, G_TYPE_NONE, 1, GST_TYPE_RTSP_STREAM);
+ gst_rtsp_media_signals[SIGNAL_REMOVED_STREAM] =
+ g_signal_new ("removed-stream", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPMediaClass, removed_stream),
+ NULL, NULL, g_cclosure_marshal_generic, G_TYPE_NONE, 1,
+ GST_TYPE_RTSP_STREAM);
+
gst_rtsp_media_signals[SIGNAL_PREPARED] =
g_signal_new ("prepared", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
G_STRUCT_OFFSET (GstRTSPMediaClass, prepared), NULL, NULL,
klass->handle_message = default_handle_message;
klass->unprepare = default_unprepare;
+ klass->convert_range = default_convert_range;
}
static void
priv->protocols = DEFAULT_PROTOCOLS;
priv->eos_shutdown = DEFAULT_EOS_SHUTDOWN;
priv->buffer_size = DEFAULT_BUFFER_SIZE;
+ priv->time_provider = DEFAULT_TIME_PROVIDER;
}
static void
if (priv->pipeline)
gst_object_unref (priv->pipeline);
+ if (priv->nettime)
+ gst_object_unref (priv->nettime);
gst_object_unref (priv->element);
if (priv->auth)
g_object_unref (priv->auth);
case PROP_BUFFER_SIZE:
g_value_set_uint (value, gst_rtsp_media_get_buffer_size (media));
break;
+ case PROP_TIME_PROVIDER:
+ g_value_set_boolean (value, gst_rtsp_media_is_time_provider (media));
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, propid, pspec);
}
case PROP_BUFFER_SIZE:
gst_rtsp_media_set_buffer_size (media, g_value_get_uint (value));
break;
+ case PROP_TIME_PROVIDER:
+ gst_rtsp_media_use_time_provider (media, g_value_get_boolean (value));
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, propid, pspec);
}
{
GstRTSPMediaPrivate *priv;
GstElement *old;
+ GstNetTimeProvider *nettime;
g_return_if_fail (GST_IS_RTSP_MEDIA (media));
g_return_if_fail (GST_IS_PIPELINE (pipeline));
g_mutex_lock (&priv->lock);
old = priv->pipeline;
priv->pipeline = GST_ELEMENT_CAST (pipeline);
+ nettime = priv->nettime;
+ priv->nettime = NULL;
g_mutex_unlock (&priv->lock);
if (old)
gst_object_unref (old);
+ if (nettime)
+ gst_object_unref (nettime);
+
gst_object_ref (priv->element);
gst_bin_add (GST_BIN_CAST (pipeline), priv->element);
}
}
/**
+ * gst_rtsp_media_use_time_provider:
+ * @media: a #GstRTSPMedia
+ *
+ * Set @media to provide a GstNetTimeProvider.
+ */
+void
+gst_rtsp_media_use_time_provider (GstRTSPMedia * media, gboolean time_provider)
+{
+ GstRTSPMediaPrivate *priv;
+
+ g_return_if_fail (GST_IS_RTSP_MEDIA (media));
+
+ priv = media->priv;
+
+ g_mutex_lock (&priv->lock);
+ priv->time_provider = time_provider;
+ g_mutex_unlock (&priv->lock);
+}
+
+/**
+ * gst_rtsp_media_is_time_provider:
+ * @media: a #GstRTSPMedia
+ *
+ * Check if @media can provide a #GstNetTimeProvider for its pipeline clock.
+ *
+ * Use gst_rtsp_media_get_time_provider() to get the network clock.
+ *
+ * Returns: %TRUE if @media can provide a #GstNetTimeProvider.
+ */
+gboolean
+gst_rtsp_media_is_time_provider (GstRTSPMedia * media)
+{
+ GstRTSPMediaPrivate *priv;
+ gboolean res;
+
+ g_return_val_if_fail (GST_IS_RTSP_MEDIA (media), FALSE);
+
+ priv = media->priv;
+
+ g_mutex_unlock (&priv->lock);
+ res = priv->time_provider;
+ g_mutex_unlock (&priv->lock);
+
+ return res;
+}
+
+/**
* gst_rtsp_media_set_auth:
* @media: a #GstRTSPMedia
* @auth: a #GstRTSPAuth
return stream;
}
+static void
+gst_rtsp_media_remove_stream (GstRTSPMedia * media, GstRTSPStream * stream)
+{
+ GstRTSPMediaPrivate *priv;
+ GstPad *srcpad;
+
+ priv = media->priv;
+
+ g_mutex_lock (&priv->lock);
+ /* remove the ghostpad */
+ srcpad = gst_rtsp_stream_get_srcpad (stream);
+ gst_element_remove_pad (priv->element, srcpad);
+ gst_object_unref (srcpad);
+ /* now remove the stream */
+ g_object_ref (stream);
+ g_ptr_array_remove (priv->streams, stream);
+ g_mutex_unlock (&priv->lock);
+
+ g_signal_emit (media, gst_rtsp_media_signals[SIGNAL_REMOVED_STREAM], 0,
+ stream, NULL);
+
+ g_object_unref (stream);
+}
+
/**
* gst_rtsp_media_n_streams:
* @media: a #GstRTSPMedia
gst_rtsp_media_get_range_string (GstRTSPMedia * media, gboolean play,
GstRTSPRangeUnit unit)
{
+ GstRTSPMediaClass *klass;
GstRTSPMediaPrivate *priv;
gchar *result;
GstRTSPTimeRange range;
+ klass = GST_RTSP_MEDIA_GET_CLASS (media);
g_return_val_if_fail (GST_IS_RTSP_MEDIA (media), NULL);
+ g_return_val_if_fail (klass->convert_range != NULL, FALSE);
priv = media->priv;
g_mutex_unlock (&priv->lock);
g_rec_mutex_unlock (&priv->state_lock);
- gst_rtsp_range_convert_units (&range, unit);
+ if (!klass->convert_range (media, &range, unit)) {
+ goto conversion_failed;
+ }
result = gst_rtsp_range_to_string (&range);
g_rec_mutex_unlock (&priv->state_lock);
return NULL;
}
+conversion_failed:
+ {
+ GST_WARNING ("range conversion to unit %d failed", unit);
+ g_rec_mutex_unlock (&priv->state_lock);
+ return NULL;
+ }
}
/**
gboolean
gst_rtsp_media_seek (GstRTSPMedia * media, GstRTSPTimeRange * range)
{
+ GstRTSPMediaClass *klass;
GstRTSPMediaPrivate *priv;
GstSeekFlags flags;
gboolean res;
GstClockTime start, stop;
GstSeekType start_type, stop_type;
+ klass = GST_RTSP_MEDIA_GET_CLASS (media);
+
g_return_val_if_fail (GST_IS_RTSP_MEDIA (media), FALSE);
g_return_val_if_fail (range != NULL, FALSE);
+ g_return_val_if_fail (klass->convert_range != NULL, FALSE);
priv = media->priv;
start_type = stop_type = GST_SEEK_TYPE_NONE;
- if (!gst_rtsp_range_get_times (range, &start, &stop))
+ if (!klass->convert_range (media, range, GST_RTSP_RANGE_NPT))
goto not_supported;
+ gst_rtsp_range_get_times (range, &start, &stop);
GST_INFO ("got %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
not_supported:
{
g_rec_mutex_unlock (&priv->state_lock);
- GST_WARNING ("seek unit %d not supported", range->unit);
+ GST_WARNING ("conversion to npt not supported");
return FALSE;
}
}
case GST_MESSAGE_STREAM_STATUS:
break;
case GST_MESSAGE_ASYNC_DONE:
- if (!priv->adding) {
+ if (priv->adding) {
/* when we are dynamically adding pads, the addition of the udpsrc will
* temporarily produce ASYNC_DONE messages. We have to ignore them and
* wait for the final ASYNC_DONE after everything prerolled */
+ GST_INFO ("%p: ignoring ASYNC_DONE", media);
+ } else {
GST_INFO ("%p: got ASYNC_DONE", media);
collect_media_stats (media);
if (priv->status == GST_RTSP_MEDIA_STATUS_PREPARING)
gst_rtsp_media_set_status (media, GST_RTSP_MEDIA_STATUS_PREPARED);
- } else {
- GST_INFO ("%p: ignoring ASYNC_DONE", media);
}
break;
case GST_MESSAGE_EOS:
/* FIXME, element is likely not a payloader, find the payloader here */
stream = gst_rtsp_media_create_stream (media, element, pad);
+ g_object_set_data (G_OBJECT (pad), "gst-rtsp-dynpad-stream", stream);
+
GST_INFO ("pad added %s:%s, stream %p", GST_DEBUG_PAD_NAME (pad), stream);
g_rec_mutex_lock (&priv->state_lock);
}
static void
-no_more_pads_cb (GstElement * element, GstRTSPMedia * media)
+pad_removed_cb (GstElement * element, GstPad * pad, GstRTSPMedia * media)
{
GstRTSPMediaPrivate *priv = media->priv;
+ GstRTSPStream *stream;
+
+ stream = g_object_get_data (G_OBJECT (pad), "gst-rtsp-dynpad-stream");
+ if (stream == NULL)
+ return;
+
+ GST_INFO ("pad removed %s:%s, stream %p", GST_DEBUG_PAD_NAME (pad), stream);
+
+ g_rec_mutex_lock (&priv->state_lock);
+ gst_rtsp_stream_leave_bin (stream, GST_BIN (priv->pipeline), priv->rtpbin);
+ g_rec_mutex_unlock (&priv->state_lock);
+
+ gst_rtsp_media_remove_stream (media, stream);
+}
+
+static void
+remove_fakesink (GstRTSPMediaPrivate * priv)
+{
GstElement *fakesink;
g_mutex_lock (&priv->lock);
- GST_INFO ("no more pads");
- if ((fakesink = priv->fakesink)) {
+ if ((fakesink = priv->fakesink))
gst_object_ref (fakesink);
- priv->fakesink = NULL;
- g_mutex_unlock (&priv->lock);
+ priv->fakesink = NULL;
+ g_mutex_unlock (&priv->lock);
+ if (fakesink) {
gst_bin_remove (GST_BIN (priv->pipeline), fakesink);
gst_element_set_state (fakesink, GST_STATE_NULL);
gst_object_unref (fakesink);
}
}
+static void
+no_more_pads_cb (GstElement * element, GstRTSPMedia * media)
+{
+ GstRTSPMediaPrivate *priv = media->priv;
+
+ GST_INFO ("no more pads");
+ remove_fakesink (priv);
+}
+
+typedef struct _DynPaySignalHandlers DynPaySignalHandlers;
+
+struct _DynPaySignalHandlers
+{
+ gulong pad_added_handler;
+ gulong pad_removed_handler;
+ gulong no_more_pads_handler;
+};
+
/**
* gst_rtsp_media_prepare:
* @media: a #GstRTSPMedia
*
- * Prepare @media for streaming. This function will create the pipeline and
- * other objects to manage the streaming.
+ * Prepare @media for streaming. This function will create the objects
+ * to manage the streaming. A pipeline must have been set on @media with
+ * gst_rtsp_media_take_pipeline().
*
* It will preroll the pipeline and collect vital information about the streams
* such as the duration.
for (walk = priv->dynamic; walk; walk = g_list_next (walk)) {
GstElement *elem = walk->data;
+ DynPaySignalHandlers *handlers = g_slice_new (DynPaySignalHandlers);
GST_INFO ("adding callbacks for dynamic element %p", elem);
- g_signal_connect (elem, "pad-added", (GCallback) pad_added_cb, media);
- g_signal_connect (elem, "no-more-pads", (GCallback) no_more_pads_cb, media);
+ handlers->pad_added_handler = g_signal_connect (elem, "pad-added",
+ (GCallback) pad_added_cb, media);
+ handlers->pad_removed_handler = g_signal_connect (elem, "pad-removed",
+ (GCallback) pad_removed_cb, media);
+ handlers->no_more_pads_handler = g_signal_connect (elem, "no-more-pads",
+ (GCallback) no_more_pads_cb, media);
+
+ g_object_set_data (G_OBJECT (elem), "gst-rtsp-dynpay-handlers", handlers);
/* we add a fakesink here in order to make the state change async. We remove
* the fakesink again in the no-more-pads callback. */
{
GstRTSPMediaPrivate *priv = media->priv;
gint i;
+ GList *walk;
GST_DEBUG ("shutting down");
gst_element_set_state (priv->pipeline, GST_STATE_NULL);
+ remove_fakesink (priv);
for (i = 0; i < priv->streams->len; i++) {
GstRTSPStream *stream;
gst_rtsp_stream_leave_bin (stream, GST_BIN (priv->pipeline), priv->rtpbin);
}
- g_ptr_array_set_size (priv->streams, 0);
+
+ /* remove the pad signal handlers */
+ for (walk = priv->dynamic; walk; walk = g_list_next (walk)) {
+ GstElement *elem = walk->data;
+ DynPaySignalHandlers *handlers;
+
+ handlers =
+ g_object_steal_data (G_OBJECT (elem), "gst-rtsp-dynpay-handlers");
+ g_assert (handlers != NULL);
+
+ g_signal_handler_disconnect (G_OBJECT (elem), handlers->pad_added_handler);
+ g_signal_handler_disconnect (G_OBJECT (elem),
+ handlers->pad_removed_handler);
+ g_signal_handler_disconnect (G_OBJECT (elem),
+ handlers->no_more_pads_handler);
+
+ g_slice_free (DynPaySignalHandlers, handlers);
+ }
gst_bin_remove (GST_BIN (priv->pipeline), priv->rtpbin);
priv->rtpbin = NULL;
- gst_object_unref (priv->pipeline);
- priv->pipeline = NULL;
+ if (priv->nettime)
+ gst_object_unref (priv->nettime);
+ priv->nettime = NULL;
priv->reused = TRUE;
priv->status = GST_RTSP_MEDIA_STATUS_UNPREPARED;
}
}
+/* should be called with state-lock */
+static GstClock *
+get_clock_unlocked (GstRTSPMedia * media)
+{
+ if (media->priv->status != GST_RTSP_MEDIA_STATUS_PREPARED) {
+ GST_DEBUG_OBJECT (media, "media was not prepared");
+ return NULL;
+ }
+ return gst_pipeline_get_clock (GST_PIPELINE_CAST (media->priv->pipeline));
+}
+
+/**
+ * gst_rtsp_media_get_clock:
+ * @media: a #GstRTSPMedia
+ *
+ * Get the clock that is used by the pipeline in @media.
+ *
+ * @media must be prepared before this method returns a valid clock object.
+ *
+ * Returns: the #GstClock used by @media. unref after usage.
+ */
+GstClock *
+gst_rtsp_media_get_clock (GstRTSPMedia * media)
+{
+ GstClock *clock;
+ GstRTSPMediaPrivate *priv;
+
+ g_return_val_if_fail (GST_IS_RTSP_MEDIA (media), NULL);
+
+ priv = media->priv;
+
+ g_rec_mutex_lock (&priv->state_lock);
+ clock = get_clock_unlocked (media);
+ g_rec_mutex_unlock (&priv->state_lock);
+
+ return clock;
+}
+
+/**
+ * gst_rtsp_media_get_base_time:
+ * @media: a #GstRTSPMedia
+ *
+ * Get the base_time that is used by the pipeline in @media.
+ *
+ * @media must be prepared before this method returns a valid base_time.
+ *
+ * Returns: the base_time used by @media.
+ */
+GstClockTime
+gst_rtsp_media_get_base_time (GstRTSPMedia * media)
+{
+ GstClockTime result;
+ GstRTSPMediaPrivate *priv;
+
+ g_return_val_if_fail (GST_IS_RTSP_MEDIA (media), GST_CLOCK_TIME_NONE);
+
+ priv = media->priv;
+
+ g_rec_mutex_lock (&priv->state_lock);
+ if (media->priv->status != GST_RTSP_MEDIA_STATUS_PREPARED)
+ goto not_prepared;
+
+ result = gst_element_get_base_time (media->priv->pipeline);
+ g_rec_mutex_unlock (&priv->state_lock);
+
+ return result;
+
+ /* ERRORS */
+not_prepared:
+ {
+ g_rec_mutex_unlock (&priv->state_lock);
+ GST_DEBUG_OBJECT (media, "media was not prepared");
+ return GST_CLOCK_TIME_NONE;
+ }
+}
+
+/**
+ * gst_rtsp_media_get_time_provider:
+ * @media: a #GstRTSPMedia
+ * @address: an address or NULL
+ * @port: a port or 0
+ *
+ * Get the #GstNetTimeProvider for the clock used by @media. The time provider
+ * will listen on @address and @port for client time requests.
+ *
+ * Returns: the #GstNetTimeProvider of @media.
+ */
+GstNetTimeProvider *
+gst_rtsp_media_get_time_provider (GstRTSPMedia * media, const gchar * address,
+ guint16 port)
+{
+ GstRTSPMediaPrivate *priv;
+ GstNetTimeProvider *provider = NULL;
+
+ g_return_val_if_fail (GST_IS_RTSP_MEDIA (media), NULL);
+
+ priv = media->priv;
+
+ g_rec_mutex_lock (&priv->state_lock);
+ if (priv->time_provider) {
+ if ((provider = priv->nettime) == NULL) {
+ GstClock *clock;
+
+ if (priv->time_provider && (clock = get_clock_unlocked (media))) {
+ provider = gst_net_time_provider_new (clock, address, port);
+ gst_object_unref (clock);
+
+ priv->nettime = provider;
+ }
+ }
+ }
+ g_rec_mutex_unlock (&priv->state_lock);
+
+ if (provider)
+ gst_object_ref (provider);
+
+ return provider;
+}
+
/**
* gst_rtsp_media_set_state:
* @media: a #GstRTSPMedia
GST_INFO ("state %s media %p", gst_element_state_get_name (state),
media);
priv->target_state = state;
- gst_element_set_state (priv->pipeline, state);
+ /* when we are buffering, don't update the state yet, this will be done
+ * when buffering finishes */
+ if (priv->buffering) {
+ GST_INFO ("Buffering busy, delay state change");
+ } else {
+ gst_element_set_state (priv->pipeline, state);
+ }
}
}
g_signal_emit (media, gst_rtsp_media_signals[SIGNAL_NEW_STATE], 0, state,
return FALSE;
}
}
+
+/* called with state-lock */
+static gboolean
+default_convert_range (GstRTSPMedia * media, GstRTSPTimeRange * range,
+ GstRTSPRangeUnit unit)
+{
+ return gst_rtsp_range_convert_units (range, unit);
+}