gulong buffer_ptreq_sig;
gulong buffer_ntpstop_sig;
gint percent;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gint prev_percent;
+#endif
/* the PT demuxer of the SSRC */
GstElement *demux;
gulong demux_newpad_sig;
create_stream (GstRtpBinSession * session, guint32 ssrc)
{
GstElement *buffer, *demux = NULL;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GstElement *queue2 = NULL;
+#endif
GstRtpBinStream *stream;
GstRtpBin *rtpbin;
GstState target;
if (!rtpbin->ignore_pt)
if (!(demux = gst_element_factory_make ("rtpptdemux", NULL)))
goto no_demux;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (session->bin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE)
+ if (!(queue2 = gst_element_factory_make ("queue2", NULL)))
+ goto no_queue2;
+#endif
stream = g_new0 (GstRtpBinStream, 1);
stream->ssrc = ssrc;
stream->bin = rtpbin;
stream->rt_delta = 0;
stream->rtp_delta = 0;
stream->percent = 100;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ stream->prev_percent = 0;
+#endif
stream->clock_base = -100 * GST_SECOND;
session->streams = g_slist_prepend (session->streams, stream);
g_object_set (buffer, "mode", rtpbin->buffer_mode, NULL);
g_object_set (buffer, "do-retransmission", rtpbin->do_retransmission, NULL);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* configure queue2 to use live buffering */
+ if (queue2) {
+ g_object_set_data (G_OBJECT (queue2), "GstRTPBin.stream", stream);
+ g_object_set (queue2, "use-buffering", TRUE, NULL);
+ g_object_set (queue2, "buffer-mode", GST_BUFFERING_LIVE, NULL);
+ }
+#endif
+
g_signal_emit (rtpbin, gst_rtp_bin_signals[SIGNAL_NEW_JITTERBUFFER], 0,
buffer, session->id, ssrc);
if (!rtpbin->ignore_pt)
gst_bin_add (GST_BIN_CAST (rtpbin), demux);
+
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (queue2)
+ gst_bin_add (GST_BIN_CAST (rtpbin), queue2);
+#endif
+
gst_bin_add (GST_BIN_CAST (rtpbin), buffer);
/* link stuff */
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (queue2) {
+ gst_element_link_pads_full (buffer, "src", queue2, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
+ if (demux) {
+ gst_element_link_pads_full (queue2, "src", demux, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
+ }
+ } else if (demux)
+ gst_element_link_pads_full (buffer, "src", demux, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
+
+#else
if (demux)
gst_element_link_pads_full (buffer, "src", demux, "sink",
GST_PAD_LINK_CHECK_NOTHING);
+#endif
if (rtpbin->buffering) {
guint64 last_out;
gst_element_set_state (buffer, target);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (queue2)
+ gst_element_set_state (queue2, target);
+#endif
+
return stream;
/* ERRORS */
g_warning ("rtpbin: could not create rtpptdemux element");
return NULL;
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+no_queue2:
+ {
+ gst_object_unref (buffer);
+ gst_object_unref (demux);
+ g_warning ("rtpbin: could not create queue2 element");
+ return NULL;
+ }
+#endif
}
/* called with RTP_BIN_LOCK */
gint min_percent = 100;
GSList *sessions, *streams;
GstRtpBinStream *stream;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gboolean buffering_flag = FALSE, update_buffering_status = TRUE;
+#endif
gboolean change = FALSE, active = FALSE;
GstClockTime min_out_time;
GstBufferingMode mode;
for (streams = session->streams; streams;
streams = g_slist_next (streams)) {
GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GstPad *temp_pad_src = NULL;
+ GstCaps *temp_caps_src = NULL;
+ GstStructure *caps_structure;
+ const gchar *caps_str_media = NULL;
+ temp_pad_src = gst_element_get_static_pad (stream->buffer, "src");
+ temp_caps_src = gst_pad_get_current_caps(temp_pad_src);
+ GST_DEBUG_OBJECT (bin, "stream %p percent %d : temp_caps_src=%"GST_PTR_FORMAT, stream,stream->percent,temp_caps_src);
+ if (temp_caps_src)
+ {
+ caps_structure = gst_caps_get_structure (temp_caps_src, 0);
+ caps_str_media = gst_structure_get_string (caps_structure, "media");
+ if (caps_str_media != NULL)
+ {
+ if ((strcmp(caps_str_media,"video") != 0)&&(strcmp(caps_str_media,"audio") != 0))
+ {
+ GST_DEBUG_OBJECT (bin, "Non Audio/Video Stream.. ignoring the same !!");
+ gst_caps_unref( temp_caps_src );
+ gst_object_unref( temp_pad_src );
+ continue;
+ }
+ else if(stream->percent >= 100)
+ {
+ /*[shweta.agg] Most of the time buffering icon displays in rtsp playback.
+ Optimizing the buffering updation code. Whenever any stream percentage
+ reaches 100 do not post buffering messages.*/
+ if(stream->prev_percent < 100)
+ {
+ buffering_flag = TRUE;
+ }
+ else
+ {
+ update_buffering_status = FALSE;
+ }
+ }
+ }
+ gst_caps_unref( temp_caps_src );
+ }
+ gst_object_unref( temp_pad_src );
+#else
GST_DEBUG_OBJECT (bin, "stream %p percent %d", stream,
stream->percent);
-
+#endif
/* find min percent */
if (min_percent > stream->percent)
min_percent = stream->percent;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* Updating prev stream percentage */
+ stream->prev_percent = stream->percent;
+#endif
}
} else {
GST_INFO_OBJECT (bin,
GST_RTP_SESSION_UNLOCK (session);
}
GST_DEBUG_OBJECT (bin, "min percent %d", min_percent);
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (rtpbin->buffer_mode != RTP_JITTER_BUFFER_MODE_SLAVE) {
+ if (rtpbin->buffering) {
+ if (min_percent == 100) {
+ rtpbin->buffering = FALSE;
+ active = TRUE;
+ change = TRUE;
+ }
+ } else {
+ if (min_percent < 100) {
+ /* pause the streams */
+ rtpbin->buffering = TRUE;
+ active = FALSE;
+ change = TRUE;
+ }
+ }
+ }
+#else
if (rtpbin->buffering) {
if (min_percent == 100) {
rtpbin->buffering = FALSE;
change = TRUE;
}
}
+#endif
GST_RTP_BIN_UNLOCK (rtpbin);
gst_message_unref (message);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE)
+ {
+ if(update_buffering_status==FALSE)
+ {
+ break;
+ }
+ if(buffering_flag)
+ {
+ min_percent=100;
+ GST_DEBUG_OBJECT (bin, "forcefully change min_percent to 100!!!");
+ }
+ }
+#endif
/* make a new buffering message with the min value */
message =
gst_message_new_buffering (GST_OBJECT_CAST (bin), min_percent);
gst_message_set_buffering_stats (message, mode, avg_in, avg_out,
buffering_left);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE)
+ goto slave_buffering;
+#endif
if (G_UNLIKELY (change)) {
GstClock *clock;
guint64 running_time = 0;
GST_RTP_BIN_UNLOCK (rtpbin);
}
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+slave_buffering:
+#endif
GST_BIN_CLASS (parent_class)->handle_message (bin, message);
break;
}
#define DEFAULT_NTP_TIME_SOURCE NTP_TIME_SOURCE_NTP
#define DEFAULT_USER_AGENT "GStreamer/" PACKAGE_VERSION
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+#define DEFAULT_START_POSITION 0
+#endif
+
enum
{
PROP_0,
PROP_DEBUG,
PROP_RETRY,
PROP_TIMEOUT,
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ PROP_START_POSITION,
+ PROP_RESUME_POSITION,
+#endif
PROP_TCP_TIMEOUT,
PROP_LATENCY,
PROP_DROP_ON_LATENCY,
}
#endif
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+static void
+gst_rtspsrc_post_error_message ( GstRTSPSrc * src, GstRTSPSrcError error_id, gchar * error_string )
+{
+ GstMessage *message;
+ GstStructure *structure;
+ gboolean ret = TRUE;
+
+ GST_ERROR_OBJECT ( src, "[%d] %s", error_id, error_string );
+
+ structure = gst_structure_new ("streaming_error",
+ "error_id", G_TYPE_UINT, error_id,
+ "error_string", G_TYPE_STRING, error_string, NULL);
+
+ message = gst_message_new_custom ( GST_MESSAGE_ERROR, GST_OBJECT(src), structure );
+
+ ret = gst_element_post_message ( GST_ELEMENT (src), message );
+ if ( !ret )
+ GST_ERROR_OBJECT ( src, "fail to post error message." );
+
+ return;
+}
+#endif
+
static gboolean
default_select_stream (GstRTSPSrc * src, guint id, GstCaps * caps)
{
"Retry TCP transport after UDP timeout microseconds (0 = disabled)",
0, G_MAXUINT64, DEFAULT_TIMEOUT,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ g_object_class_install_property (gobject_class, PROP_START_POSITION,
+ g_param_spec_uint64 ("pending-start-position", "set start position",
+ "Set start position before PLAYING request.",
+ 0, G_MAXUINT64, DEFAULT_START_POSITION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RESUME_POSITION,
+ g_param_spec_uint64 ("resume-position", "set resume position",
+ "Set resume position before PLAYING request after pause.",
+ 0, G_MAXUINT64, DEFAULT_START_POSITION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif
g_object_class_install_property (gobject_class, PROP_TCP_TIMEOUT,
g_param_spec_uint64 ("tcp-timeout", "TCP Timeout",
"Fail after timeout microseconds on TCP connections (0 = disabled)",
src->debug = DEFAULT_DEBUG;
src->retry = DEFAULT_RETRY;
src->udp_timeout = DEFAULT_TIMEOUT;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ src->start_position = DEFAULT_START_POSITION;
+ src->is_audio_codec_supported = FALSE;
+ src->is_video_codec_supported = FALSE;
+ src->audio_codec = NULL;
+ src->video_codec = NULL;
+ src->video_frame_size = NULL;
+#endif
gst_rtspsrc_set_tcp_timeout (src, DEFAULT_TCP_TIMEOUT);
src->latency = DEFAULT_LATENCY_MS;
src->drop_on_latency = DEFAULT_DROP_ON_LATENCY;
src->ntp_time_source = DEFAULT_NTP_TIME_SOURCE;
src->user_agent = g_strdup (DEFAULT_USER_AGENT);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ g_mutex_init (&(src)->pause_lock);
+ g_cond_init (&(src)->open_end);
+#endif
/* get a list of all extensions */
src->extensions = gst_rtsp_ext_list_get ();
rtspsrc = GST_RTSPSRC (object);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ rtspsrc->is_audio_codec_supported = FALSE;
+ rtspsrc->is_video_codec_supported = FALSE;
+ if (rtspsrc->audio_codec)
+ {
+ g_free(rtspsrc->audio_codec);
+ rtspsrc->audio_codec = NULL;
+ }
+ if (rtspsrc->video_codec)
+ {
+ g_free(rtspsrc->video_codec);
+ rtspsrc->video_codec = NULL;
+ }
+ if (rtspsrc->video_frame_size)
+ {
+ g_free(rtspsrc->video_frame_size);
+ rtspsrc->video_frame_size = NULL;
+ }
+#endif
gst_rtsp_ext_list_free (rtspsrc->extensions);
g_free (rtspsrc->conninfo.location);
gst_rtsp_url_free (rtspsrc->conninfo.url);
g_free (rtspsrc->multi_iface);
g_free (rtspsrc->user_agent);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ g_mutex_clear (&(rtspsrc)->pause_lock);
+ g_cond_clear (&(rtspsrc)->open_end);
+#endif
+
if (rtspsrc->sdp) {
gst_sdp_message_free (rtspsrc->sdp);
rtspsrc->sdp = NULL;
case PROP_TIMEOUT:
rtspsrc->udp_timeout = g_value_get_uint64 (value);
break;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ case PROP_START_POSITION:
+ rtspsrc->start_position = g_value_get_uint64 (value);
+ break;
+ case PROP_RESUME_POSITION:
+ rtspsrc->last_pos = g_value_get_uint64 (value);
+ GST_DEBUG_OBJECT (rtspsrc, "src->last_pos value set to %"GST_TIME_FORMAT,GST_TIME_ARGS (rtspsrc->last_pos));
+ break;
+#endif
case PROP_TCP_TIMEOUT:
gst_rtspsrc_set_tcp_timeout (rtspsrc, g_value_get_uint64 (value));
break;
case PROP_TIMEOUT:
g_value_set_uint64 (value, rtspsrc->udp_timeout);
break;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ case PROP_START_POSITION:
+ g_value_set_uint64 (value, rtspsrc->start_position);
+ break;
+ case PROP_RESUME_POSITION:
+ g_value_set_uint64 (value, rtspsrc->last_pos);
+ break;
+#endif
case PROP_TCP_TIMEOUT:
{
guint64 timeout;
GstStructure *s;
const gchar *enc;
PtMapItem item;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gchar *encoder , *mediatype;
+#endif
pt = atoi (gst_sdp_media_get_format (media, i));
GST_DEBUG_OBJECT (src, " looking at %d pt: %d", i, pt);
stream->container = TRUE;
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if ((mediatype = gst_structure_get_string (s, "media")))
+ {
+ GST_DEBUG_OBJECT (src, " mediatype : %s",mediatype);
+ if(!strcmp(mediatype, "video"))
+ {
+ if ((encoder = gst_structure_get_string (s, "encoding-name")))
+ {
+ GST_DEBUG_OBJECT (src, " encoder : %s",encoder);
+ if((!strcmp(encoder,"H261")) ||
+ (!strcmp(encoder,"H263")) ||
+ (!strcmp(encoder,"H263-1998")) || (!strcmp(encoder,"H263-2000")) ||
+ (!strcmp(encoder,"H264")) ||
+ (!strcmp(encoder,"MP4V-ES")))
+ {
+ src->is_video_codec_supported = TRUE;
+ GST_DEBUG_OBJECT (src, "Supported Video Codec %s",encoder);
+ }
+ else
+ {
+ GST_DEBUG_OBJECT (src, "Unsupported Video Codec %s",encoder);
+ }
+ }
+
+ src->video_codec = g_strdup(encoder);
+ src->video_frame_size = g_strdup(gst_structure_get_string(s, "a-framesize"));
+ GST_DEBUG_OBJECT (src, "video_codec %s , video_frame_size %s ",src->video_codec,src->video_frame_size);
+ }
+ else if (!strcmp(mediatype, "audio"))
+ {
+ if ((encoder = gst_structure_get_string (s, "encoding-name")))
+ {
+ GST_DEBUG_OBJECT (src, " encoder : %s",encoder);
+ if((!strcmp(encoder,"MP4A-LATM")) ||
+ (!strcmp(encoder,"AMR")) ||(!strcmp(encoder,"AMR-WB")) ||(!strcmp(encoder,"AMR-NB")) ||
+ (!strcmp(encoder,"mpeg4-generic")) ||(!strcmp(encoder,"MPEG4-GENERIC")) ||
+ (!strcmp(encoder,"QCELP")) || ((strstr(encoder, "G726"))||(strstr(encoder, "PCMU"))))
+ {
+ src->is_audio_codec_supported = TRUE;
+ GST_DEBUG_OBJECT (src, "Supported Audio Codec %s",encoder);
+ }
+ else
+ {
+ GST_DEBUG_OBJECT (src, "Unsupported Audio Codec %s",encoder);
+ }
+ }
+
+ src->audio_codec = g_strdup(encoder);
+ GST_DEBUG_OBJECT (src, "audio_codec %s ",src->audio_codec);
+ }
+ }
+#endif
+
/* Merge in global caps */
/* Intersect will merge in missing fields to the current caps */
outcaps = gst_caps_intersect (caps, global_caps);
{
GList *walk;
+ GST_WARNING_OBJECT(src, "Setting [%s] element state to: %s \n", GST_ELEMENT_NAME(GST_ELEMENT_CAST (src)),gst_element_state_get_name(state));
if (src->manager)
gst_element_set_state (GST_ELEMENT_CAST (src->manager), state);
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_BAD_SERVER,"Could not receive message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not receive message. (%s)", str));
+#endif
g_free (str);
gst_rtsp_message_unset (&message);
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_SERVICE_UNAVAILABLE,"Could not handle server message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not handle server message. (%s)", str));
+#endif
g_free (str);
gst_rtsp_message_unset (&message);
return GST_FLOW_ERROR;
src->conninfo.connected = FALSE;
if (res != GST_RTSP_EINTR) {
- GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ_WRITE, (NULL),
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_CONNECTION_FAIL,"Could not connect to server.");
+#else
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ_WRITE, (NULL),
("Could not connect to server. (%s)", str));
+#endif
g_free (str);
ret = GST_FLOW_ERROR;
} else {
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_SERVER_DISCONNECTED,"Could not receive message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not receive message. (%s)", str));
+#endif
g_free (str);
return GST_FLOW_ERROR;
}
gst_rtsp_message_unset (&message);
if (res != GST_RTSP_EINTR) {
- GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_SERVICE_UNAVAILABLE,"Could not handle server message.");
+#else
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not handle server message. (%s)", str));
+#endif
g_free (str);
ret = GST_FLOW_ERROR;
} else {
{
src->cur_protocols = 0;
/* no transport possible, post an error and stop */
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_BAD_TRANSPORT,"Could not receive any UDP packets for seconds, maybe your firewall is blocking it. No other protocols to try.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not receive any UDP packets for %.4f seconds, maybe your "
"firewall is blocking it. No other protocols to try.",
gst_guint64_to_gdouble (src->udp_timeout / 1000000.0)));
+#endif
return GST_RTSP_ERROR;
}
open_failed:
static void
gst_rtspsrc_loop_complete_cmd (GstRTSPSrc * src, gint cmd)
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GstStructure *s;
+#endif
+ GST_WARNING_OBJECT (src, "Got cmd %s", cmd_to_string (cmd));
+
switch (cmd) {
case CMD_OPEN:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_DEBUG_OBJECT (src, "rtsp_duration %"GST_TIME_FORMAT", rtsp_audio_codec %s , rtsp_video_codec %s , rtsp_video_frame_size %s",
+ GST_TIME_ARGS(src->segment.duration),src->audio_codec,src->video_codec,src->video_frame_size);
+
+ /* post message */
+ s = gst_message_new_element (GST_OBJECT_CAST (src),
+ gst_structure_new ("rtspsrc_properties",
+ "rtsp_duration",G_TYPE_UINT64,src->segment.duration,
+ "rtsp_audio_codec", G_TYPE_STRING, src->audio_codec,
+ "rtsp_video_codec", G_TYPE_STRING, src->video_codec,
+ "rtsp_video_frame_size", G_TYPE_STRING, src->video_frame_size,NULL));
+
+ gst_element_post_message (GST_ELEMENT_CAST (src), s);
+#endif
GST_ELEMENT_PROGRESS (src, COMPLETE, "open", ("Opened Stream"));
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* rtspsrc PAUSE state should be here for parsing sdp before PAUSE state changed. */
+ g_mutex_lock(&(src)->pause_lock);
+ g_cond_signal (&(src)->open_end);
+ g_mutex_unlock(&(src)->pause_lock);
+#endif
break;
case CMD_PLAY:
GST_ELEMENT_PROGRESS (src, COMPLETE, "request", ("Sent PLAY request"));
switch (cmd) {
case CMD_OPEN:
GST_ELEMENT_PROGRESS (src, ERROR, "open", ("Open failed"));
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+/*[shweta.agg] Ending conditional wait for pause when open fails.*/
+ g_mutex_lock(&(src)->pause_lock);
+ g_cond_signal (&(src)->open_end);
+ g_mutex_unlock(&(src)->pause_lock);
+ GST_WARNING_OBJECT (src, "ending conditional wait for pause as open is failed.");
+#endif
break;
case CMD_PLAY:
GST_ELEMENT_PROGRESS (src, ERROR, "request", ("PLAY failed"));
{
/* Output an error indicating that we couldn't connect because there were
* no supported authentication protocols */
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_NOT_AUTHORIZED,"No supported authentication protocol was found");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
("No supported authentication protocol was found"));
+#endif
return FALSE;
}
no_user_pass:
gchar *str = gst_rtsp_strresult (res);
if (res != GST_RTSP_EINTR) {
- GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_CONNECTION_FAIL,"Could not send message.");
+#else
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "send interrupted");
}
gchar *str = gst_rtsp_strresult (res);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_SERVER_DISCONNECTED,"Could not receive message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not receive message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "receive interrupted");
}
switch (response->type_data.response.code) {
case GST_RTSP_STS_NOT_FOUND:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_BAD_REQUEST,"STS NOT FOUND");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND, (NULL), ("%s",
response->type_data.response.reason));
+#endif
break;
case GST_RTSP_STS_UNAUTHORIZED:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_NOT_AUTHORIZED,"STS NOT AUTHORIZED");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, NOT_AUTHORIZED, (NULL), ("%s",
response->type_data.response.reason));
+#endif
break;
case GST_RTSP_STS_MOVED_PERMANENTLY:
case GST_RTSP_STS_MOVE_TEMPORARILY:
res = GST_RTSP_OK;
break;
default:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_UNEXPECTED_MSG,"Got error response from Server");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Got error response: %d (%s).", response->type_data.response.code,
response->type_data.response.reason));
+#endif
break;
}
/* if we return ERROR we should unset the response ourselves */
/* ERRORS */
no_describe:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,"Server does not support DESCRIBE.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
("Server does not support DESCRIBE."));
+#endif
return FALSE;
}
no_setup:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,"Server does not support SETUP.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
("Server does not support SETUP."));
+#endif
return FALSE;
}
}
/* ERRORS */
no_protocols:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_INVALID_PROTOCOL,"Could not connect to server, no protocols left");
+#else
/* no transport possible, post an error and stop */
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not connect to server, no protocols left"));
+#endif
return GST_RTSP_ERROR;
}
no_streams:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_CONTENT_NOT_FOUND,"SDP contains no streams");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("SDP contains no streams"));
+#endif
return GST_RTSP_ERROR;
}
create_request_failed:
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_BAD_REQUEST,"Could not create request.");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto cleanup_error;
}
setup_transport_failed:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_BAD_REQUEST,"Could not setup transport.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("Could not setup transport."));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
{
const gchar *str = gst_rtsp_status_as_text (code);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_UNEXPECTED_MSG,"Error from Server .");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Error (%d): %s", code, GST_STR_NULL (str)));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
gchar *str = gst_rtsp_strresult (res);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_CONNECTION_FAIL,"Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "send interrupted");
}
}
no_transport:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_BAD_TRANSPORT,"Server did not select transport.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("Server did not select transport."));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
{
/* none of the available error codes is really right .. */
if (unsupported_real) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,"No supported stream was found. You might need to install a GStreamer RTSP extension plugin for Real media streams.");
+#else
GST_ELEMENT_ERROR (src, STREAM, CODEC_NOT_FOUND,
(_("No supported stream was found. You might need to install a "
"GStreamer RTSP extension plugin for Real media streams.")),
(NULL));
+#endif
} else {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,"No supported stream was found. You might need to allow more transport protocols or may otherwise be missing the right GStreamer RTSP extension plugin.");
+#else
GST_ELEMENT_ERROR (src, STREAM, CODEC_NOT_FOUND,
(_("No supported stream was found. You might need to allow "
"more transport protocols or may otherwise be missing "
"the right GStreamer RTSP extension plugin.")), (NULL));
+#endif
}
return GST_RTSP_ERROR;
}
/* we need to start playback without clipping from the position reported by
* the server */
segment->start = seconds;
+#ifndef TIZEN_FEATURE_RTSP_MODIFICATION
+/*
+[prasenjit.c] The range-min points to the start of the segment , not the current position.
+After getting the current position from MSL during normal pause/resume or during seek , we should not
+update the segment->position again with the rtp header npt timestamp.
+*/
segment->position = seconds;
+#endif
if (therange->max.type == GST_RTSP_TIME_NOW)
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ seconds = 0;
+#else
seconds = -1;
+#endif
else if (therange->max.type == GST_RTSP_TIME_END)
seconds = -1;
else
src->control = g_strdup (control);
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ src->is_audio_codec_supported = FALSE;
+ src->is_video_codec_supported = FALSE;
+#endif
+
/* create streams */
n_streams = gst_sdp_message_medias_len (sdp);
for (i = 0; i < n_streams; i++) {
}
src->state = GST_RTSP_STATE_INIT;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ //[prasenjit.c] Check for the support for the Media codecs
+ if ((!src->is_audio_codec_supported)&&(!src->is_video_codec_supported))
+ {
+ GST_ERROR_OBJECT (src, "UnSupported Media Type !!!! \n");
+ goto unsupported_file_type;
+ }
+ else
+ {
+ GST_DEBUG_OBJECT (src, "Supported Media Type. \n");
+ }
+#endif
/* setup streams */
if ((res = gst_rtspsrc_setup_streams (src, async)) < 0)
goto setup_failed;
gst_rtspsrc_cleanup (src);
return res;
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ unsupported_file_type:
+ {
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,"No supported stream was found");
+ res = GST_RTSP_ERROR;
+ gst_rtspsrc_cleanup (src);
+ return res;
+ }
+#endif
}
static GstRTSPResult
/* ERRORS */
no_url:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_INVALID_URL,"No valid RTSP URL was provided");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND, (NULL),
("No valid RTSP URL was provided"));
+#endif
goto cleanup_error;
}
connect_failed:
gchar *str = gst_rtsp_strresult (res);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_CONNECTION_FAIL,"Failed to connect.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ_WRITE, (NULL),
("Failed to connect. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "connect interrupted");
}
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_BAD_REQUEST,"Could not create request.");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto cleanup_error;
}
}
wrong_content_type:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_OPTION_NOT_SUPPORTED,"Server does not support SDP. ");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("Server does not support SDP, got %s.", respcont));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
no_describe:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,"Server can not provide an SDP.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("Server can not provide an SDP."));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_BAD_REQUEST,"Could not create request.");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto close;
}
gst_rtsp_message_unset (&request);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_CONNECTION_FAIL,"Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "TEARDOWN interrupted");
}
gen_range_header (GstRTSPSrc * src, GstSegment * segment)
{
gchar val_str[G_ASCII_DTOSTR_BUF_SIZE] = { 0, };
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (src->start_position !=0 && segment->position == 0) {
+ segment->position = src->start_position;
+ src->start_position = 0;
+ }
+#endif
if (src->range && src->range->min.type == GST_RTSP_TIME_NOW) {
g_strlcpy (val_str, "now", sizeof (val_str));
} else {
((gdouble) segment->position) / GST_SECOND);
}
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_DEBUG_OBJECT (src, "Range Header Added : npt=%s-",val_str);
+#endif
return g_strdup_printf ("npt=%s-", val_str);
}
goto create_request_failed;
if (src->need_range) {
+#ifndef TIZEN_FEATURE_RTSP_MODIFICATION
hval = gen_range_header (src, segment);
gst_rtsp_message_take_header (&request, GST_RTSP_HDR_RANGE, hval);
+#endif
/* store the newsegment event so it can be sent from the streaming thread. */
src->need_segment = TRUE;
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ else
+ {
+/*
+[prasenjit.c] Updating position with the MSL current position as gst_rtspsrc_get_position() does not return correct position.
+*/
+ GST_DEBUG_OBJECT (src, " During normal pause-resume , segment->position=%" GST_TIME_FORMAT",src->start_position=%"GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment->position),GST_TIME_ARGS (src->start_position));
+ segment->position = src->last_pos;
+ }
+
+/*
+[prasenjit.c] Sending the npt range request for each play request for updating the segment position properly.
+*/
+ hval = gen_range_header (src, segment);
+ gst_rtsp_message_take_header (&request, GST_RTSP_HDR_RANGE, hval);
+#endif
if (segment->rate != 1.0) {
gchar hval[G_ASCII_DTOSTR_BUF_SIZE];
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_BAD_REQUEST,"Could not create request. ");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto done;
}
gst_rtsp_message_unset (&request);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_CONNECTION_FAIL,"Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "PLAY interrupted");
}
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_BAD_REQUEST,"Could not create request.");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto done;
}
gst_rtsp_message_unset (&request);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message ( src,
+ GST_RTSPSRC_ERROR_CONNECTION_FAIL,"Could not send message. ");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "PAUSE interrupted");
}
{
GstRTSPSrc *rtspsrc;
GstStateChangeReturn ret;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ guint64 end_time;
+#endif
rtspsrc = GST_RTSPSRC (element);
+ GST_WARNING_OBJECT(rtspsrc, "State change transition: %d \n", transition);
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
ret = GST_STATE_CHANGE_SUCCESS;
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* don't change to PAUSE state before complete stream opend.
+ see gst_rtspsrc_loop_complete_cmd() */
+ g_mutex_lock(&(rtspsrc)->pause_lock);
+ end_time = g_get_monotonic_time () + 10 * G_TIME_SPAN_SECOND;
+ if (!g_cond_wait_until (&(rtspsrc)->open_end, &(rtspsrc)->pause_lock, end_time)) {
+ GST_WARNING_OBJECT(rtspsrc, "time out: stream opend is not completed yet..");
+ }
+ g_mutex_unlock(&(rtspsrc)->pause_lock);
+#endif
ret = GST_STATE_CHANGE_NO_PREROLL;
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING: