FREE_STRING (msg->video_formats);
}
+ if (msg->direct_audio_codecs) {
+ guint i = 0;
+ if (msg->direct_audio_codecs->list) {
+ for (; i < msg->direct_audio_codecs->count; i++) {
+ FREE_STRING(msg->direct_audio_codecs->list[i].audio_format);
+ msg->direct_audio_codecs->list[i].modes = 0;
+ msg->direct_audio_codecs->list[i].latency = 0;
+ }
+ FREE_STRING(msg->direct_audio_codecs->list);
+ }
+ FREE_STRING(msg->direct_audio_codecs);
+ }
+
+ if (msg->direct_video_formats) {
+ FREE_STRING(msg->direct_video_formats->list);
+ FREE_STRING(msg->direct_video_formats);
+ }
+
if (msg->video_3d_formats) {
FREE_STRING (msg->video_3d_formats->list);
FREE_STRING (msg->video_3d_formats);
if (msg->idr_request) {
FREE_STRING (msg->idr_request);
}
+
+ if (msg->direct_mode) {
+ FREE_STRING(msg->direct_mode);
+ }
+
if (msg->tcp_ports) {
FREE_STRING(msg->tcp_ports);
}
msg->video_formats->list = NULL;
}
}
+ } else if (!g_strcmp0 (attr, GST_STRING_WFD2_AUDIO_CODECS)) {
+ msg->direct_audio_codecs = g_new0 (GstWFD2AudioCodeclist, 1);
+ if (strlen (v)) {
+ guint i = 0;
+ msg->direct_audio_codecs->count = strlen (v) / 16;
+ msg->direct_audio_codecs->list =
+ g_new0 (GstWFDAudioCodec, msg->direct_audio_codecs->count);
+ for (; i < msg->direct_audio_codecs->count; i++) {
+ WFD_SKIP_SPACE (v);
+ WFD_READ_STRING (msg->direct_audio_codecs->list[i].audio_format);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_audio_codecs->list[i].modes);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_audio_codecs->list[i].latency);
+ WFD_SKIP_COMMA (v);
+ }
+ }
+ } else if (!g_strcmp0 (attr, GST_STRING_WFD2_VIDEO_FORMATS)) {
+ msg->direct_video_formats = g_new0 (GstWFD2VideoCodeclist, 1);
+ if (strlen (v)) {
+ msg->direct_video_formats->count = 1;
+ msg->direct_video_formats->list = g_new0 (GstWFDVideoCodec, 1);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_video_formats->list->native);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_video_formats->
+ list->preferred_display_mode_supported);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.profile);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.level);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.
+ misc_params.CEA_Support);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.
+ misc_params.VESA_Support);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.
+ misc_params.HH_Support);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.
+ misc_params.latency);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.
+ misc_params.min_slice_size);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.
+ misc_params.slice_enc_params);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.
+ misc_params.frame_rate_control_support);
+ WFD_SKIP_SPACE (v);
+ if (msg->direct_video_formats->list->preferred_display_mode_supported == 1) {
+ WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.max_hres);
+ WFD_SKIP_SPACE (v);
+ WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.max_vres);
+ WFD_SKIP_SPACE (v);
+ }
+ }
} else if (!g_strcmp0 (attr, GST_STRING_WFD_3D_VIDEO_FORMATS)) {
msg->video_3d_formats = g_new0 (GstWFD3DFormats, 1);
if (strlen (v)) {
} else if (!g_strcmp0 (attr, GST_STRING_WFD_IDR_REQUEST)) {
msg->idr_request = g_new0 (GstWFDIdrRequest, 1);
msg->idr_request->idr_request = TRUE;
+ } else if (!g_strcmp0 (attr, GST_STRING_WFD2_DIRECT_STREAMING_MODE)) {
+ msg->direct_mode = g_new0 (GstWFD2DirectStreamingMode, 1);
+ if (strlen (v)) {
+ WFD_SKIP_SPACE (v);
+ if (!g_strcmp0 (v, "active"))
+ msg->direct_mode->direct_mode = TRUE;
+ else
+ msg->direct_mode->direct_mode = FALSE;
+ }
} else if (!g_strcmp0 (attr, GST_STRING_WFD2_TCP_PORTS)) {
msg->tcp_ports = g_new0 (GstWFDTCPPorts, 1);
if (strlen (v)) {
g_string_append_printf (lines, "\r\n");
}
+ /* list of direct audio codecs */
+ if (msg->direct_audio_codecs) {
+ g_string_append_printf (lines, GST_STRING_WFD2_AUDIO_CODECS);
+ if (msg->direct_audio_codecs->list) {
+ g_string_append_printf (lines, ":");
+ for (i = 0; i < msg->direct_audio_codecs->count; i++) {
+ g_string_append_printf (lines, " %s",
+ msg->direct_audio_codecs->list[i].audio_format);
+ g_string_append_printf (lines, " %08x",
+ msg->direct_audio_codecs->list[i].modes);
+ g_string_append_printf (lines, " %02x",
+ msg->direct_audio_codecs->list[i].latency);
+ if ((i + 1) < msg->direct_audio_codecs->count)
+ g_string_append_printf (lines, ",");
+ }
+ }
+ g_string_append_printf (lines, "\r\n");
+ }
+
+ /* list of direct video codecs */
+ if (msg->direct_video_formats) {
+ g_string_append_printf (lines, GST_STRING_WFD2_VIDEO_FORMATS);
+ if (msg->direct_video_formats->list) {
+ g_string_append_printf (lines, ":");
+ g_string_append_printf (lines, " %02x", msg->direct_video_formats->list->native);
+ g_string_append_printf (lines, " %02x",
+ msg->direct_video_formats->list->preferred_display_mode_supported);
+ g_string_append_printf (lines, " %02x",
+ msg->direct_video_formats->list->H264_codec.profile);
+ g_string_append_printf (lines, " %02x",
+ msg->direct_video_formats->list->H264_codec.level);
+ g_string_append_printf (lines, " %08x",
+ msg->direct_video_formats->list->H264_codec.misc_params.CEA_Support);
+ g_string_append_printf (lines, " %08x",
+ msg->direct_video_formats->list->H264_codec.misc_params.VESA_Support);
+ g_string_append_printf (lines, " %08x",
+ msg->direct_video_formats->list->H264_codec.misc_params.HH_Support);
+ g_string_append_printf (lines, " %02x",
+ msg->direct_video_formats->list->H264_codec.misc_params.latency);
+ g_string_append_printf (lines, " %04x",
+ msg->direct_video_formats->list->H264_codec.misc_params.min_slice_size);
+ g_string_append_printf (lines, " %04x",
+ msg->direct_video_formats->list->H264_codec.misc_params.slice_enc_params);
+ g_string_append_printf (lines, " %02x",
+ msg->direct_video_formats->list->H264_codec.
+ misc_params.frame_rate_control_support);
+
+ if (msg->direct_video_formats->list->H264_codec.max_hres)
+ g_string_append_printf (lines, " %04x",
+ msg->direct_video_formats->list->H264_codec.max_hres);
+ else
+ g_string_append_printf (lines, " none");
+
+ if (msg->direct_video_formats->list->H264_codec.max_vres)
+ g_string_append_printf (lines, " %04x",
+ msg->direct_video_formats->list->H264_codec.max_vres);
+ else
+ g_string_append_printf (lines, " none");
+ }
+ g_string_append_printf (lines, "\r\n");
+ }
/* list of video 3D codecs */
if (msg->video_3d_formats) {
g_string_append_printf (lines, GST_STRING_WFD_3D_VIDEO_FORMATS);
g_string_append_printf (lines, "\r\n");
}
+ if (msg->direct_mode && msg->direct_mode->direct_mode) {
+ g_string_append_printf (lines, GST_STRING_WFD2_DIRECT_STREAMING_MODE);
+ g_string_append_printf (lines, ":");
+ g_string_append_printf (lines, " active");
+ g_string_append_printf (lines, "\r\n");
+ }
+
if (msg->tcp_ports) {
g_string_append_printf (lines, GST_STRING_WFD2_TCP_PORTS);
if (msg->tcp_ports->profile) {
g_string_append_printf (lines, " %d", msg->buf_len->buf_len);
g_string_append_printf (lines, "\r\n");
}
+
return g_string_free (lines, FALSE);
}
g_string_append_printf (lines, GST_STRING_WFD_VIDEO_FORMATS);
g_string_append_printf (lines, "\r\n");
}
+ /* list of direct audio codecs */
+ if (msg->direct_audio_codecs) {
+ g_string_append_printf (lines, GST_STRING_WFD2_AUDIO_CODECS);
+ g_string_append_printf (lines, "\r\n");
+ }
+ /* list of direct video codecs */
+ if (msg->direct_video_formats) {
+ g_string_append_printf (lines, GST_STRING_WFD2_VIDEO_FORMATS);
+ g_string_append_printf (lines, "\r\n");
+ }
/* list of video 3D codecs */
if (msg->video_3d_formats) {
g_string_append_printf (lines, GST_STRING_WFD_3D_VIDEO_FORMATS);
}
}
+ if (msg->direct_audio_codecs) {
+ guint i = 0;
+ g_print ("Audio supported formats for direct streaming : \n");
+ for (; i < msg->direct_audio_codecs->count; i++) {
+ g_print ("Codec: %s\n", msg->direct_audio_codecs->list[i].audio_format);
+ if (!strcmp (msg->direct_audio_codecs->list[i].audio_format, "LPCM")) {
+ if (msg->direct_audio_codecs->list[i].modes & GST_WFD_FREQ_44100)
+ g_print (" Freq: %d\n", 44100);
+ if (msg->direct_audio_codecs->list[i].modes & GST_WFD_FREQ_48000)
+ g_print (" Freq: %d\n", 48000);
+ g_print (" Channels: %d\n", 2);
+ }
+ if (!strcmp (msg->direct_audio_codecs->list[i].audio_format, "AAC")) {
+ g_print (" Freq: %d\n", 48000);
+ if (msg->direct_audio_codecs->list[i].modes & GST_WFD_CHANNEL_2)
+ g_print (" Channels: %d\n", 2);
+ if (msg->direct_audio_codecs->list[i].modes & GST_WFD_CHANNEL_4)
+ g_print (" Channels: %d\n", 4);
+ if (msg->direct_audio_codecs->list[i].modes & GST_WFD_CHANNEL_6)
+ g_print (" Channels: %d\n", 6);
+ if (msg->direct_audio_codecs->list[i].modes & GST_WFD_CHANNEL_8)
+ g_print (" Channels: %d\n", 8);
+ }
+ if (!strcmp (msg->direct_audio_codecs->list[i].audio_format, "AC3")) {
+ g_print (" Freq: %d\n", 48000);
+ if (msg->direct_audio_codecs->list[i].modes & GST_WFD_CHANNEL_2)
+ g_print (" Channels: %d\n", 2);
+ if (msg->direct_audio_codecs->list[i].modes & GST_WFD_CHANNEL_4)
+ g_print (" Channels: %d\n", 4);
+ if (msg->direct_audio_codecs->list[i].modes & GST_WFD_CHANNEL_6)
+ g_print (" Channels: %d\n", 6);
+ }
+ g_print (" Bitwidth: %d\n", 16);
+ g_print (" Latency: %d\n", msg->direct_audio_codecs->list[i].latency);
+ }
+ }
+
+
+ if (msg->direct_video_formats) {
+ g_print ("Video supported formats for direct streaming : \n");
+ if (msg->direct_video_formats->list) {
+ guint nativeindex = 0;
+ g_print ("Codec: H264\n");
+ if ((msg->direct_video_formats->list->native & 0x7) ==
+ GST_WFD_VIDEO_CEA_RESOLUTION) {
+ g_print (" Native type: CEA\n");
+ } else if ((msg->direct_video_formats->list->native & 0x7) ==
+ GST_WFD_VIDEO_VESA_RESOLUTION) {
+ g_print (" Native type: VESA\n");
+ } else if ((msg->direct_video_formats->list->native & 0x7) ==
+ GST_WFD_VIDEO_HH_RESOLUTION) {
+ g_print (" Native type: HH\n");
+ }
+ nativeindex = msg->direct_video_formats->list->native >> 3;
+ g_print (" Resolution: %d\n", (1 << nativeindex));
+
+ if (msg->direct_video_formats->list->
+ H264_codec.profile & GST_WFD_H264_BASE_PROFILE) {
+ g_print (" Profile: BASE\n");
+ } else if (msg->direct_video_formats->list->
+ H264_codec.profile & GST_WFD_H264_HIGH_PROFILE) {
+ g_print (" Profile: HIGH\n");
+ }
+ if (msg->direct_video_formats->list->H264_codec.level & GST_WFD_H264_LEVEL_3_1) {
+ g_print (" Level: 3.1\n");
+ } else if (msg->direct_video_formats->list->
+ H264_codec.level & GST_WFD_H264_LEVEL_3_2) {
+ g_print (" Level: 3.2\n");
+ } else if (msg->direct_video_formats->list->
+ H264_codec.level & GST_WFD_H264_LEVEL_4) {
+ g_print (" Level: 4\n");
+ } else if (msg->direct_video_formats->list->
+ H264_codec.level & GST_WFD_H264_LEVEL_4_1) {
+ g_print (" Level: 4.1\n");
+ } else if (msg->direct_video_formats->list->
+ H264_codec.level & GST_WFD_H264_LEVEL_4_2) {
+ g_print (" Level: 4.2\n");
+ }
+ g_print (" Latency: %d\n",
+ msg->direct_video_formats->list->H264_codec.misc_params.latency);
+ g_print (" min_slice_size: %x\n",
+ msg->direct_video_formats->list->H264_codec.misc_params.min_slice_size);
+ g_print (" slice_enc_params: %x\n",
+ msg->direct_video_formats->list->H264_codec.misc_params.slice_enc_params);
+ g_print (" frame_rate_control_support: %x\n",
+ msg->direct_video_formats->list->H264_codec.
+ misc_params.frame_rate_control_support);
+ if (msg->direct_video_formats->list->H264_codec.max_hres) {
+ g_print (" Max Height: %04d\n",
+ msg->direct_video_formats->list->H264_codec.max_hres);
+ }
+ if (msg->direct_video_formats->list->H264_codec.max_vres) {
+ g_print (" Max Width: %04d\n",
+ msg->direct_video_formats->list->H264_codec.max_vres);
+ }
+ }
+ }
+
if (msg->video_3d_formats) {
g_print ("wfd_3d_formats");
g_print ("\r\n");
g_print (GST_STRING_WFD_IDR_REQUEST);
g_print ("\r\n");
}
+
+ if (msg->direct_mode) {
+ g_print (GST_STRING_WFD2_DIRECT_STREAMING_MODE);
+ g_print ("\r\n");
+ }
+
if (msg->tcp_ports) {
g_print (" TCP Ports : \n");
if (msg->tcp_ports->profile) {
}
GstWFDResult
+gst_wfd_message_set_supported_direct_audio_format (GstWFDMessage * msg,
+ GstWFDAudioFormats a_codec,
+ guint a_freq, guint a_channels, guint a_bitwidth, guint32 a_latency)
+{
+ guint temp = a_codec;
+ guint i = 0;
+ guint pcm = 0, aac = 0, ac3 = 0;
+
+ g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL);
+
+ if (!msg->direct_audio_codecs)
+ msg->direct_audio_codecs = g_new0 (GstWFD2AudioCodeclist, 1);
+
+ if (a_codec != GST_WFD_AUDIO_UNKNOWN) {
+ while (temp) {
+ msg->direct_audio_codecs->count++;
+ temp >>= 1;
+ }
+ msg->direct_audio_codecs->list =
+ g_new0 (GstWFDAudioCodec, msg->direct_audio_codecs->count);
+ for (; i < msg->direct_audio_codecs->count; i++) {
+ if ((a_codec & GST_WFD_AUDIO_LPCM) && (!pcm)) {
+ msg->direct_audio_codecs->list[i].audio_format = g_strdup ("LPCM");
+ msg->direct_audio_codecs->list[i].modes = a_freq;
+ msg->direct_audio_codecs->list[i].latency = a_latency;
+ pcm = 1;
+ } else if ((a_codec & GST_WFD_AUDIO_AAC) && (!aac)) {
+ msg->direct_audio_codecs->list[i].audio_format = g_strdup ("AAC");
+ msg->direct_audio_codecs->list[i].modes = a_channels;
+ msg->direct_audio_codecs->list[i].latency = a_latency;
+ aac = 1;
+ } else if ((a_codec & GST_WFD_AUDIO_AC3) && (!ac3)) {
+ msg->direct_audio_codecs->list[i].audio_format = g_strdup ("AC3");
+ msg->direct_audio_codecs->list[i].modes = a_channels;
+ msg->direct_audio_codecs->list[i].latency = a_latency;
+ ac3 = 1;
+ }
+ }
+ }
+ return GST_WFD_OK;
+}
+
+GstWFDResult
+gst_wfd_message_set_preferred_direct_audio_format (GstWFDMessage * msg,
+ GstWFDAudioFormats a_codec,
+ GstWFDAudioFreq a_freq,
+ GstWFDAudioChannels a_channels, guint a_bitwidth, guint32 a_latency)
+{
+ g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL);
+
+ if (!msg->direct_audio_codecs)
+ msg->direct_audio_codecs = g_new0 (GstWFD2AudioCodeclist, 1);
+
+ msg->direct_audio_codecs->list = g_new0 (GstWFDAudioCodec, 1);
+ msg->direct_audio_codecs->count = 1;
+ if (a_codec == GST_WFD_AUDIO_LPCM) {
+ msg->direct_audio_codecs->list->audio_format = g_strdup ("LPCM");
+ msg->direct_audio_codecs->list->modes = a_freq;
+ msg->direct_audio_codecs->list->latency = a_latency;
+ } else if (a_codec == GST_WFD_AUDIO_AAC) {
+ msg->direct_audio_codecs->list->audio_format = g_strdup ("AAC");
+ msg->direct_audio_codecs->list->modes = a_channels;
+ msg->direct_audio_codecs->list->latency = a_latency;
+ } else if (a_codec == GST_WFD_AUDIO_AC3) {
+ msg->direct_audio_codecs->list->audio_format = g_strdup ("AC3");
+ msg->direct_audio_codecs->list->modes = a_channels;
+ msg->direct_audio_codecs->list->latency = a_latency;
+ }
+ return GST_WFD_OK;
+}
+
+GstWFDResult
+gst_wfd_message_get_supported_direct_audio_format (GstWFDMessage * msg,
+ guint * a_codec,
+ guint * a_freq, guint * a_channels, guint * a_bitwidth, guint32 * a_latency)
+{
+ guint i = 0;
+ g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL);
+ g_return_val_if_fail (msg->direct_audio_codecs != NULL, GST_WFD_EINVAL);
+
+ for (; i < msg->direct_audio_codecs->count; i++) {
+ if (!g_strcmp0 (msg->direct_audio_codecs->list[i].audio_format, "LPCM")) {
+ *a_codec |= GST_WFD_AUDIO_LPCM;
+ *a_freq |= msg->direct_audio_codecs->list[i].modes;
+ *a_channels |= GST_WFD_CHANNEL_2;
+ *a_bitwidth = 16;
+ *a_latency = msg->direct_audio_codecs->list[i].latency;
+ } else if (!g_strcmp0 (msg->direct_audio_codecs->list[i].audio_format, "AAC")) {
+ *a_codec |= GST_WFD_AUDIO_AAC;
+ *a_freq |= GST_WFD_FREQ_48000;
+ *a_channels |= msg->direct_audio_codecs->list[i].modes;
+ *a_bitwidth = 16;
+ *a_latency = msg->direct_audio_codecs->list[i].latency;
+ } else if (!g_strcmp0 (msg->direct_audio_codecs->list[i].audio_format, "AC3")) {
+ *a_codec |= GST_WFD_AUDIO_AC3;
+ *a_freq |= GST_WFD_FREQ_48000;
+ *a_channels |= msg->direct_audio_codecs->list[i].modes;
+ *a_bitwidth = 16;
+ *a_latency = msg->direct_audio_codecs->list[i].latency;
+ }
+ }
+ return GST_WFD_OK;
+}
+
+GstWFDResult
+gst_wfd_message_get_preferred_direct_audio_format (GstWFDMessage * msg,
+ GstWFDAudioFormats * a_codec,
+ GstWFDAudioFreq * a_freq,
+ GstWFDAudioChannels * a_channels, guint * a_bitwidth, guint32 * a_latency)
+{
+ g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL);
+
+ if (!g_strcmp0 (msg->direct_audio_codecs->list->audio_format, "LPCM")) {
+ *a_codec = GST_WFD_AUDIO_LPCM;
+ *a_freq = msg->direct_audio_codecs->list->modes;
+ *a_channels = GST_WFD_CHANNEL_2;
+ *a_bitwidth = 16;
+ *a_latency = msg->direct_audio_codecs->list->latency;
+ } else if (!g_strcmp0 (msg->direct_audio_codecs->list->audio_format, "AAC")) {
+ *a_codec = GST_WFD_AUDIO_AAC;
+ *a_freq = GST_WFD_FREQ_48000;
+ *a_channels = msg->direct_audio_codecs->list->modes;
+ *a_bitwidth = 16;
+ *a_latency = msg->direct_audio_codecs->list->latency;
+ } else if (!g_strcmp0 (msg->direct_audio_codecs->list->audio_format, "AC3")) {
+ *a_codec = GST_WFD_AUDIO_AC3;
+ *a_freq = GST_WFD_FREQ_48000;
+ *a_channels = msg->direct_audio_codecs->list->modes;
+ *a_bitwidth = 16;
+ *a_latency = msg->direct_audio_codecs->list->latency;
+ }
+ return GST_WFD_OK;
+}
+
+GstWFDResult
+gst_wfd_message_set_supported_direct_video_format (GstWFDMessage * msg,
+ GstWFDVideoCodecs v_codec,
+ GstWFDVideoNativeResolution v_native,
+ guint64 v_native_resolution,
+ guint64 v_cea_resolution,
+ guint64 v_vesa_resolution,
+ guint64 v_hh_resolution,
+ guint v_profile,
+ guint v_level,
+ guint32 v_latency,
+ guint32 v_max_height,
+ guint32 v_max_width,
+ guint32 min_slice_size, guint32 slice_enc_params, guint frame_rate_control)
+{
+ guint nativeindex = 0;
+ guint64 temp = v_native_resolution;
+
+ g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL);
+
+ if (!msg->direct_video_formats)
+ msg->direct_video_formats = g_new0 (GstWFD2VideoCodeclist, 1);
+
+ if (v_codec != GST_WFD_VIDEO_UNKNOWN) {
+ msg->direct_video_formats->list = g_new0 (GstWFDVideoCodec, 1);
+ while (temp) {
+ nativeindex++;
+ temp >>= 1;
+ }
+
+ if (nativeindex) msg->direct_video_formats->list->native = nativeindex - 1;
+ msg->direct_video_formats->list->native <<= 3;
+
+ if (v_native == GST_WFD_VIDEO_VESA_RESOLUTION)
+ msg->direct_video_formats->list->native |= 1;
+ else if (v_native == GST_WFD_VIDEO_HH_RESOLUTION)
+ msg->direct_video_formats->list->native |= 2;
+
+ msg->direct_video_formats->list->preferred_display_mode_supported = 1;
+ msg->direct_video_formats->list->H264_codec.profile = v_profile;
+ msg->direct_video_formats->list->H264_codec.level = v_level;
+ msg->direct_video_formats->list->H264_codec.max_hres = v_max_height;
+ msg->direct_video_formats->list->H264_codec.max_vres = v_max_width;
+ msg->direct_video_formats->list->H264_codec.misc_params.CEA_Support =
+ v_cea_resolution;
+ msg->direct_video_formats->list->H264_codec.misc_params.VESA_Support =
+ v_vesa_resolution;
+ msg->direct_video_formats->list->H264_codec.misc_params.HH_Support =
+ v_hh_resolution;
+ msg->direct_video_formats->list->H264_codec.misc_params.latency = v_latency;
+ msg->direct_video_formats->list->H264_codec.misc_params.min_slice_size =
+ min_slice_size;
+ msg->direct_video_formats->list->H264_codec.misc_params.slice_enc_params =
+ slice_enc_params;
+ msg->direct_video_formats->list->H264_codec.
+ misc_params.frame_rate_control_support = frame_rate_control;
+ }
+ return GST_WFD_OK;
+}
+
+GstWFDResult
+gst_wfd_message_set_preferred_direct_video_format (GstWFDMessage * msg,
+ GstWFDVideoCodecs v_codec,
+ GstWFDVideoNativeResolution v_native,
+ guint64 v_native_resolution,
+ guint64 v_cea_resolution,
+ guint64 v_vesa_resolution,
+ guint64 v_hh_resolution,
+ GstWFDVideoH264Profile v_profile,
+ GstWFDVideoH264Level v_level,
+ guint32 v_latency,
+ guint32 v_max_height,
+ guint32 v_max_width,
+ guint32 min_slice_size, guint32 slice_enc_params, guint frame_rate_control)
+{
+ guint nativeindex = 0;
+ guint64 temp = v_native_resolution;
+
+ g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL);
+
+ if (!msg->direct_video_formats)
+ msg->direct_video_formats = g_new0 (GstWFD2VideoCodeclist, 1);
+ msg->direct_video_formats->list = g_new0 (GstWFDVideoCodec, 1);
+
+ while (temp) {
+ nativeindex++;
+ temp >>= 1;
+ }
+
+ if (nativeindex)
+ msg->direct_video_formats->list->native = nativeindex - 1;
+ msg->direct_video_formats->list->native <<= 3;
+
+ if (v_native == GST_WFD_VIDEO_VESA_RESOLUTION)
+ msg->direct_video_formats->list->native |= 1;
+ else if (v_native == GST_WFD_VIDEO_HH_RESOLUTION)
+ msg->direct_video_formats->list->native |= 2;
+
+ msg->direct_video_formats->list->preferred_display_mode_supported = 0;
+ msg->direct_video_formats->list->H264_codec.profile = v_profile;
+ msg->direct_video_formats->list->H264_codec.level = v_level;
+ msg->direct_video_formats->list->H264_codec.max_hres = v_max_height;
+ msg->direct_video_formats->list->H264_codec.max_vres = v_max_width;
+ msg->direct_video_formats->list->H264_codec.misc_params.CEA_Support =
+ v_cea_resolution;
+ msg->direct_video_formats->list->H264_codec.misc_params.VESA_Support =
+ v_vesa_resolution;
+ msg->direct_video_formats->list->H264_codec.misc_params.HH_Support = v_hh_resolution;
+ msg->direct_video_formats->list->H264_codec.misc_params.latency = v_latency;
+ msg->direct_video_formats->list->H264_codec.misc_params.min_slice_size =
+ min_slice_size;
+ msg->direct_video_formats->list->H264_codec.misc_params.slice_enc_params =
+ slice_enc_params;
+ msg->direct_video_formats->list->H264_codec.misc_params.frame_rate_control_support =
+ frame_rate_control;
+ return GST_WFD_OK;
+}
+
+GstWFDResult
+gst_wfd_message_get_supported_direct_video_format (GstWFDMessage * msg,
+ GstWFDVideoCodecs * v_codec,
+ GstWFDVideoNativeResolution * v_native,
+ guint64 * v_native_resolution,
+ guint64 * v_cea_resolution,
+ guint64 * v_vesa_resolution,
+ guint64 * v_hh_resolution,
+ guint * v_profile,
+ guint * v_level,
+ guint32 * v_latency,
+ guint32 * v_max_height,
+ guint32 * v_max_width,
+ guint32 * min_slice_size,
+ guint32 * slice_enc_params, guint * frame_rate_control)
+{
+ guint nativeindex = 0;
+
+ g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL);
+ *v_codec = GST_WFD_VIDEO_H264;
+ *v_native = msg->direct_video_formats->list->native & 0x7;
+ nativeindex = msg->direct_video_formats->list->native >> 3;
+ *v_native_resolution = ((guint64) 1) << nativeindex;
+ *v_profile = msg->direct_video_formats->list->H264_codec.profile;
+ *v_level = msg->direct_video_formats->list->H264_codec.level;
+ *v_max_height = msg->direct_video_formats->list->H264_codec.max_hres;
+ *v_max_width = msg->direct_video_formats->list->H264_codec.max_vres;
+ *v_cea_resolution =
+ msg->direct_video_formats->list->H264_codec.misc_params.CEA_Support;
+ *v_vesa_resolution =
+ msg->direct_video_formats->list->H264_codec.misc_params.VESA_Support;
+ *v_hh_resolution =
+ msg->direct_video_formats->list->H264_codec.misc_params.HH_Support;
+ *v_latency = msg->direct_video_formats->list->H264_codec.misc_params.latency;
+ *min_slice_size =
+ msg->direct_video_formats->list->H264_codec.misc_params.min_slice_size;
+ *slice_enc_params =
+ msg->direct_video_formats->list->H264_codec.misc_params.slice_enc_params;
+ *frame_rate_control =
+ msg->direct_video_formats->list->H264_codec.
+ misc_params.frame_rate_control_support;
+ return GST_WFD_OK;
+}
+
+GstWFDResult
+gst_wfd_message_get_preferred_direct_video_format (GstWFDMessage * msg,
+ GstWFDVideoCodecs * v_codec,
+ GstWFDVideoNativeResolution * v_native,
+ guint64 * v_native_resolution,
+ guint64 * v_cea_resolution,
+ guint64 * v_vesa_resolution,
+ guint64 * v_hh_resolution,
+ GstWFDVideoH264Profile * v_profile,
+ GstWFDVideoH264Level * v_level,
+ guint32 * v_latency,
+ guint32 * v_max_height,
+ guint32 * v_max_width,
+ guint32 * min_slice_size,
+ guint32 * slice_enc_params, guint * frame_rate_control)
+{
+ guint nativeindex = 0;
+ g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL);
+
+ *v_codec = GST_WFD_VIDEO_H264;
+ *v_native = msg->direct_video_formats->list->native & 0x7;
+ nativeindex = msg->direct_video_formats->list->native >> 3;
+ *v_native_resolution = ((guint64) 1) << nativeindex;
+ *v_profile = msg->direct_video_formats->list->H264_codec.profile;
+ *v_level = msg->direct_video_formats->list->H264_codec.level;
+ *v_max_height = msg->direct_video_formats->list->H264_codec.max_hres;
+ *v_max_width = msg->direct_video_formats->list->H264_codec.max_vres;
+ *v_cea_resolution =
+ msg->direct_video_formats->list->H264_codec.misc_params.CEA_Support;
+ *v_vesa_resolution =
+ msg->direct_video_formats->list->H264_codec.misc_params.VESA_Support;
+ *v_hh_resolution =
+ msg->direct_video_formats->list->H264_codec.misc_params.HH_Support;
+ *v_latency = msg->direct_video_formats->list->H264_codec.misc_params.latency;
+ *min_slice_size =
+ msg->direct_video_formats->list->H264_codec.misc_params.min_slice_size;
+ *slice_enc_params =
+ msg->direct_video_formats->list->H264_codec.misc_params.slice_enc_params;
+ *frame_rate_control =
+ msg->direct_video_formats->list->H264_codec.
+ misc_params.frame_rate_control_support;
+ return GST_WFD_OK;
+}
+
+GstWFDResult
gst_wfd_message_set_display_edid (GstWFDMessage * msg,
gboolean edid_supported, guint32 edid_blockcount, gchar * edid_playload)
{
return GST_WFD_OK;
}
+
+GstWFDResult
+gst_wfd_message_set_direct_streaming_mode(GstWFDMessage *msg, gboolean enable)
+{
+ g_return_val_if_fail(msg != NULL, GST_WFD_EINVAL);
+
+ if (!msg->direct_mode)
+ msg->direct_mode = g_new0(GstWFD2DirectStreamingMode, 1);
+
+ msg->direct_mode->direct_mode = enable;
+ return GST_WFD_OK;
+}
+
GstWFDResult
gst_wfd_messge_set_preferred_tcp_ports (GstWFDMessage *msg,
GstWFDRTSPTransMode trans,
#define GST_STRING_WFD_CONNECTOR_TYPE "wfd_connector_type"
#define GST_STRING_WFD_IDR_REQUEST "wfd_idr_request"
+#define GST_STRING_WFD2_AUDIO_CODECS "wfd2_audio_codecs"
+#define GST_STRING_WFD2_VIDEO_FORMATS "wfd2_video_formats"
+#define GST_STRING_WFD2_DIRECT_STREAMING_MODE "wfd2_direct_streaming_mode"
+
#define GST_STRING_WFD2_TCP_PORTS "wfd2_tcp_ports"
#define GST_STRING_WFD2_BUFFER_LEN "wfd2_buffer_len"
#define GST_STRING_WFD2_AUDIO_STATUS "wfd2_audio_playback_status"
#define GST_STRING_WFD2_VIDEO_STATUS "wfd2_video_playback_status"
+
/**
* GstWFDResult:
* @GST_WFD_OK: A successful return value
GstWFDAudioCodec *list;
} GstWFDAudioCodeclist;
+typedef struct {
+ guint count;
+ GstWFDAudioCodec *list;
+} GstWFD2AudioCodeclist;
typedef struct {
guint CEA_Support;
} GstWFDVideoCodeclist;
typedef struct {
+ guint count;
+ GstWFDVideoCodec *list;
+} GstWFD2VideoCodeclist;
+
+typedef struct {
guint video_3d_capability;
guint latency;
guint min_slice_size;
} GstWFDIdrRequest;
typedef struct {
+ gboolean direct_mode;
+} GstWFD2DirectStreamingMode;
+
+typedef struct {
gchar *profile;
guint32 rtp_port0;
guint32 rtp_port1;
guint vid_bufsize;
guint64 vid_pts;
} GstWFDVideoReport;
+
/**
* GstWFDMessage:
* @version: the protocol version
typedef struct {
GstWFDAudioCodeclist *audio_codecs;
GstWFDVideoCodeclist *video_formats;
+ GstWFD2AudioCodeclist *direct_audio_codecs;
+ GstWFD2VideoCodeclist *direct_video_formats;
GstWFD3DFormats *video_3d_formats;
GstWFDContentProtection *content_protection;
GstWFDDisplayEdid *display_edid;
GstWFDStandby *standby;
GstWFDConnectorType *connector_type;
GstWFDIdrRequest *idr_request;
+ GstWFD2DirectStreamingMode *direct_mode;
GstWFDTCPPorts *tcp_ports;
GstWFDBufferLen *buf_len;
GstWFDAudioReport *audio_status;
guint32 *slice_enc_params,
guint *frame_rate_control);
+GstWFDResult gst_wfd_message_set_supported_direct_audio_format(GstWFDMessage *msg,
+ GstWFDAudioFormats a_codec,
+ guint a_freq, guint a_channels,
+ guint a_bitwidth, guint32 a_latency);
+
+GstWFDResult gst_wfd_message_set_preferred_direct_audio_format(GstWFDMessage *msg,
+ GstWFDAudioFormats a_codec,
+ GstWFDAudioFreq a_freq,
+ GstWFDAudioChannels a_channels,
+ guint a_bitwidth, guint32 a_latency);
+
+GstWFDResult gst_wfd_message_get_supported_direct_audio_format (GstWFDMessage *msg,
+ guint *a_codec,
+ guint *a_freq,
+ guint *a_channels,
+ guint *a_bitwidth,
+ guint32 *a_latency);
+
+GstWFDResult gst_wfd_message_get_preferred_direct_audio_format (GstWFDMessage *msg,
+ GstWFDAudioFormats *a_codec,
+ GstWFDAudioFreq *a_freq,
+ GstWFDAudioChannels *a_channels,
+ guint *a_bitwidth, guint32 *a_latency);
+
+GstWFDResult gst_wfd_message_set_supported_direct_video_format (GstWFDMessage *msg,
+ GstWFDVideoCodecs v_codec,
+ GstWFDVideoNativeResolution v_native,
+ guint64 v_native_resolution,
+ guint64 v_cea_resolution,
+ guint64 v_vesa_resolution,
+ guint64 v_hh_resolution,
+ guint v_profile,
+ guint v_level,
+ guint32 v_latency,
+ guint32 v_max_height,
+ guint32 v_max_width,
+ guint32 min_slice_size,
+ guint32 slice_enc_params,
+ guint frame_rate_control);
+
+GstWFDResult gst_wfd_message_set_preferred_direct_video_format(GstWFDMessage *msg,
+ GstWFDVideoCodecs v_codec,
+ GstWFDVideoNativeResolution v_native,
+ guint64 v_native_resolution,
+ guint64 v_cea_resolution,
+ guint64 v_vesa_resolution,
+ guint64 v_hh_resolution,
+ GstWFDVideoH264Profile v_profile,
+ GstWFDVideoH264Level v_level,
+ guint32 v_latency,
+ guint32 v_max_height,
+ guint32 v_max_width,
+ guint32 min_slice_size,
+ guint32 slice_enc_params,
+ guint frame_rate_control);
+
+GstWFDResult gst_wfd_message_get_supported_direct_video_format(GstWFDMessage *msg,
+ GstWFDVideoCodecs *v_codec,
+ GstWFDVideoNativeResolution *v_native,
+ guint64 *v_native_resolution,
+ guint64 *v_cea_resolution,
+ guint64 *v_vesa_resolution,
+ guint64 *v_hh_resolution,
+ guint *v_profile,
+ guint *v_level,
+ guint32 *v_latency,
+ guint32 *v_max_height,
+ guint32 *v_max_width,
+ guint32 *min_slice_size,
+ guint32 *slice_enc_params,
+ guint *frame_rate_control);
+
+GstWFDResult gst_wfd_message_get_preferred_direct_video_format(GstWFDMessage *msg,
+ GstWFDVideoCodecs *v_codec,
+ GstWFDVideoNativeResolution *v_native,
+ guint64 *v_native_resolution,
+ guint64 *v_cea_resolution,
+ guint64 *v_vesa_resolution,
+ guint64 *v_hh_resolution,
+ GstWFDVideoH264Profile *v_profile,
+ GstWFDVideoH264Level *v_level,
+ guint32 *v_latency,
+ guint32 *v_max_height,
+ guint32 *v_max_width,
+ guint32 *min_slice_size,
+ guint32 *slice_enc_params,
+ guint *frame_rate_control);
+
GstWFDResult gst_wfd_message_set_display_edid (GstWFDMessage *msg,
gboolean edid_supported,
guint32 edid_blockcount,
GstWFDResult gst_wfd_message_get_av_format_change_timing(GstWFDMessage *msg,
guint64 *PTS,
guint64 *DTS);
+
+GstWFDResult gst_wfd_message_set_direct_streaming_mode(GstWFDMessage *msg,
+ gboolean enable);
+
GstWFDResult gst_wfd_messge_set_preferred_tcp_ports (GstWFDMessage *msg,
GstWFDRTSPTransMode trans,
GstWFDRTSPProfile profile,
guint32 crtp_port0;
guint32 crtp_port1;
+ gboolean direct_streaming_supported;
+ gint direct_streaming_state;
+ guint8 direct_detected_video_codec;
+ guint8 direct_detected_audio_codec;
+
gboolean protection_enabled;
GstWFDHDCPProtection hdcp_version;
guint32 hdcp_tcpport;
static GstRTSPResult handle_M4_message (GstRTSPWFDClient * client);
static GstRTSPResult handle_M16_message (GstRTSPWFDClient * client);
+static GstRTSPResult handle_M4_direct_streaming_message (GstRTSPWFDClient * client);
+
G_DEFINE_TYPE (GstRTSPWFDClient, gst_rtsp_wfd_client, GST_TYPE_RTSP_CLIENT);
static void
priv->stats_timer_id = -1;
priv->rtcp_stats_enabled = FALSE;
memset (&priv->stats, 0x00, sizeof (GstRTSPClientRTPStats));
+
+ priv->direct_streaming_supported = FALSE;
+ priv->direct_streaming_state = 0;
+
priv->sink_user_agent = NULL;
priv->ts_mode = WFD_TS_UDP;
error:
return GST_RTSP_ERROR;
}
+
static void
handle_wfd_response (GstRTSPClient * client, GstRTSPContext * ctx)
{
}
}
+ if (msg->direct_video_formats) {
+ priv->direct_streaming_supported = TRUE;
+ }
+
/* Get the Video formats supported by WFDSink */
if (msg->video_formats && msg->video_formats->count > 0) {
wfd_res =
M3_REQ_MSG,
M3_RES_MSG,
M4_REQ_MSG,
+ M4_DS_REQ_MSG,
M4_RES_MSG,
M5_REQ_MSG,
TEARDOWN_TRIGGER,
goto error;
}
+ /* set the supported audio formats by the WFD server for direct streaming */
+ wfd_res =
+ gst_wfd_message_set_supported_direct_audio_format (msg, GST_WFD_AUDIO_UNKNOWN,
+ GST_WFD_FREQ_UNKNOWN, GST_WFD_CHANNEL_UNKNOWN, 0, 0);
+ if (wfd_res != GST_WFD_OK) {
+ GST_ERROR_OBJECT (client,
+ "Failed to set supported audio formats for direct streaming on wfd message...");
+ goto error;
+ }
+
+ /* set the supported Video formats by the WFD server for direct streaming */
+ wfd_res =
+ gst_wfd_message_set_supported_direct_video_format (msg, GST_WFD_VIDEO_UNKNOWN,
+ GST_WFD_VIDEO_CEA_RESOLUTION, GST_WFD_CEA_UNKNOWN, GST_WFD_CEA_UNKNOWN,
+ GST_WFD_VESA_UNKNOWN, GST_WFD_HH_UNKNOWN, GST_WFD_H264_UNKNOWN_PROFILE,
+ GST_WFD_H264_LEVEL_UNKNOWN, 0, 0, 0, 0, 0, 0);
+ if (wfd_res != GST_WFD_OK) {
+ GST_ERROR_OBJECT (client,
+ "Failed to set supported video formats for direct streaming on wfd message...");
+ goto error;
+ }
wfd_res = gst_wfd_message_set_display_edid (msg, 0, 0, NULL);
if (wfd_res != GST_WFD_OK) {
GST_ERROR_OBJECT (client,
/* Parameters for the preffered video formats */
GstWFDVideoCodecs tvideocodec = GST_WFD_VIDEO_UNKNOWN;
- GstWFDVideoCEAResolution tcCEAResolution = GST_WFD_CEA_UNKNOWN;
- GstWFDVideoVESAResolution tcVESAResolution = GST_WFD_VESA_UNKNOWN;
- GstWFDVideoHHResolution tcHHResolution = GST_WFD_HH_UNKNOWN;
+ guint64 tcCEAResolution = GST_WFD_CEA_UNKNOWN;
+ guint64 tcVESAResolution = GST_WFD_VESA_UNKNOWN;
+ guint64 tcHHResolution = GST_WFD_HH_UNKNOWN;
GstWFDVideoH264Profile tcProfile;
GstWFDVideoH264Level tcLevel;
guint64 resolution_supported = 0;
goto error;
}
+ if (priv->direct_streaming_supported) {
+ wfd_res =
+ gst_wfd_message_set_preferred_direct_audio_format (msg, taudiocodec, taudiofreq,
+ taudiochannels, priv->cBitwidth, priv->caLatency);
+ if (wfd_res != GST_WFD_OK) {
+ GST_ERROR_OBJECT (priv, "Failed to set preffered audio formats for direct streaming...");
+ goto error;
+ }
+ }
+
/* Set the preffered video formats */
tvideocodec = wfd_get_preferred_video_codec (priv->video_codec, priv->cvCodec);
GST_INFO_OBJECT (priv, "Set the video formats. source codec %d, sink codec %d, Negotiated code %d",
priv->cCEAResolution, priv->video_native_resolution, &priv->cMaxWidth,
&priv->cMaxHeight, &priv->cFramerate, &priv->cInterleaved);
GST_DEBUG
- ("wfd negotiated resolution: %08x, width: %d, height: %d, framerate: %d, interleaved: %d",
+ ("wfd negotiated resolution: %" G_GUINT64_FORMAT ", width: %d, height: %d, framerate: %d, interleaved: %d",
tcCEAResolution, priv->cMaxWidth, priv->cMaxHeight, priv->cFramerate,
priv->cInterleaved);
} else if (priv->video_native_resolution == GST_WFD_VIDEO_VESA_RESOLUTION) {
&priv->cMaxWidth, &priv->cMaxHeight, &priv->cFramerate,
&priv->cInterleaved);
GST_DEBUG
- ("wfd negotiated resolution: %08x, width: %d, height: %d, framerate: %d, interleaved: %d",
+ ("wfd negotiated resolution: %" G_GUINT64_FORMAT ", width: %d, height: %d, framerate: %d, interleaved: %d",
tcVESAResolution, priv->cMaxWidth, priv->cMaxHeight, priv->cFramerate,
priv->cInterleaved);
} else if (priv->video_native_resolution == GST_WFD_VIDEO_HH_RESOLUTION) {
priv->cHHResolution, priv->video_native_resolution, &priv->cMaxWidth,
&priv->cMaxHeight, &priv->cFramerate, &priv->cInterleaved);
GST_DEBUG
- ("wfd negotiated resolution: %08x, width: %d, height: %d, framerate: %d, interleaved: %d",
+ ("wfd negotiated resolution: %" G_GUINT64_FORMAT ", width: %d, height: %d, framerate: %d, interleaved: %d",
tcHHResolution, priv->cMaxWidth, priv->cMaxHeight, priv->cFramerate,
priv->cInterleaved);
}
goto error;
}
+ if (priv->direct_streaming_supported) {
+ wfd_res =
+ gst_wfd_message_set_preferred_direct_video_format (msg, priv->cvCodec,
+ priv->video_native_resolution, GST_WFD_CEA_UNKNOWN, tcCEAResolution,
+ tcVESAResolution, tcHHResolution, tcProfile, tcLevel, priv->cvLatency,
+ priv->cMaxWidth, priv->cMaxHeight, priv->cmin_slice_size,
+ priv->cslice_enc_params, priv->cframe_rate_control);
+
+ if (wfd_res != GST_WFD_OK) {
+ GST_ERROR_OBJECT (client, "Failed to set preffered video formats for direct streaming...");
+ goto error;
+ }
+ }
+
/* set the preffered RTP ports for the WFD server */
wfd_res =
gst_wfd_messge_set_preferred_rtp_ports (msg, GST_WFD_RTSP_TRANS_RTP,
} else {
*len = strlen (*data);
}
+ } else if (msg_type == M4_DS_REQ_MSG) {
+ GstRTSPUrl *url = NULL;
+
+ GstRTSPClient *parent_client = GST_RTSP_CLIENT_CAST (client);
+ GstRTSPConnection *connection =
+ gst_rtsp_client_get_connection (parent_client);
+
+ /* Parameters for the preffered audio formats */
+ GstWFDAudioFreq taudiofreq = GST_WFD_FREQ_UNKNOWN;
+ GstWFDAudioChannels taudiochannels = GST_WFD_CHANNEL_UNKNOWN;
+ /* Parameters for the preffered video formats */
+ guint64 tcCEAResolution = GST_WFD_CEA_UNKNOWN;
+ guint64 tcVESAResolution = GST_WFD_VESA_UNKNOWN;
+ guint64 tcHHResolution = GST_WFD_HH_UNKNOWN;
+ GstWFDVideoH264Profile tcProfile;
+ GstWFDVideoH264Level tcLevel;
+ guint64 resolution_supported = 0;
+
+ url = gst_rtsp_connection_get_url (connection);
+ if (url == NULL) {
+ GST_ERROR_OBJECT (client, "Failed to get connection URL");
+ return;
+ }
+
+ /* create M4 for direct streaming request to be sent */
+ wfd_res = gst_wfd_message_new (&msg);
+ if (wfd_res != GST_WFD_OK) {
+ GST_ERROR_OBJECT (client, "Failed to create wfd message...");
+ goto error;
+ }
+
+ wfd_res = gst_wfd_message_init (msg);
+ if (wfd_res != GST_WFD_OK) {
+ GST_ERROR_OBJECT (client, "Failed to init wfd message...");
+ goto error;
+ }
+
+ buf = g_string_new ("");
+ if (buf == NULL)
+ goto error;
+
+ g_string_append_printf (buf, "rtsp://");
+
+ if (priv->host_address) {
+ g_string_append (buf, priv->host_address);
+ } else {
+ GST_ERROR_OBJECT (client, "Failed to get host address");
+ if (buf) g_string_free (buf, TRUE);
+ goto error;
+ }
+
+ g_string_append_printf (buf, "/wfd1.0/streamid=0");
+ wfd_res =
+ gst_wfd_message_set_presentation_url (msg, g_string_free (buf, FALSE),
+ NULL);
+
+ if (wfd_res != GST_WFD_OK) {
+ GST_ERROR_OBJECT (client, "Failed to set presentation url");
+ goto error;
+ }
+
+ if (priv->cFreq & GST_WFD_FREQ_48000)
+ taudiofreq = GST_WFD_FREQ_48000;
+ else if (priv->cFreq & GST_WFD_FREQ_44100)
+ taudiofreq = GST_WFD_FREQ_44100;
+ priv->cFreq = taudiofreq;
+
+ /* TODO-WFD: Currently only 2 channels is present */
+ if (priv->cChanels & GST_WFD_CHANNEL_8)
+ taudiochannels = GST_WFD_CHANNEL_2;
+ else if (priv->cChanels & GST_WFD_CHANNEL_6)
+ taudiochannels = GST_WFD_CHANNEL_2;
+ else if (priv->cChanels & GST_WFD_CHANNEL_4)
+ taudiochannels = GST_WFD_CHANNEL_2;
+ else if (priv->cChanels & GST_WFD_CHANNEL_2)
+ taudiochannels = GST_WFD_CHANNEL_2;
+ priv->cChanels = taudiochannels;
+
+ wfd_res =
+ gst_wfd_message_set_preferred_direct_audio_format (msg,
+ priv->direct_detected_audio_codec, taudiofreq,
+ taudiochannels, priv->cBitwidth, priv->caLatency);
+ if (wfd_res != GST_WFD_OK) {
+ GST_ERROR_OBJECT (priv, "Failed to set preffered audio formats for direct streaming...");
+ goto error;
+ }
+
+ /* Set the preffered video formats */
+ priv->cProfile = tcProfile = GST_WFD_H264_BASE_PROFILE;
+ priv->cLevel = tcLevel = GST_WFD_H264_LEVEL_3_1;
+
+ resolution_supported = priv->video_resolution_supported;
+
+ /* TODO-WFD: Need to verify this logic
+ if(priv->edid_supported) {
+ if (priv->edid_hres < 1920) resolution_supported = resolution_supported & 0x8C7F;
+ if (priv->edid_hres < 1280) resolution_supported = resolution_supported & 0x1F;
+ if (priv->edid_hres < 720) resolution_supported = resolution_supported & 0x01;
+ }
+ */
+
+ if (priv->video_native_resolution == GST_WFD_VIDEO_CEA_RESOLUTION) {
+ tcCEAResolution =
+ wfd_get_preferred_resolution (resolution_supported,
+ priv->cCEAResolution, priv->video_native_resolution, &priv->cMaxWidth,
+ &priv->cMaxHeight, &priv->cFramerate, &priv->cInterleaved);
+ GST_DEBUG
+ ("wfd negotiated resolution: %" G_GUINT64_FORMAT ", width: %d, height: %d, framerate: %d, interleaved: %d",
+ tcCEAResolution, priv->cMaxWidth, priv->cMaxHeight, priv->cFramerate,
+ priv->cInterleaved);
+ } else if (priv->video_native_resolution == GST_WFD_VIDEO_VESA_RESOLUTION) {
+ tcVESAResolution =
+ wfd_get_preferred_resolution (resolution_supported,
+ priv->cVESAResolution, priv->video_native_resolution,
+ &priv->cMaxWidth, &priv->cMaxHeight, &priv->cFramerate,
+ &priv->cInterleaved);
+ GST_DEBUG
+ ("wfd negotiated resolution: %" G_GUINT64_FORMAT ", width: %d, height: %d, framerate: %d, interleaved: %d",
+ tcVESAResolution, priv->cMaxWidth, priv->cMaxHeight, priv->cFramerate,
+ priv->cInterleaved);
+ } else if (priv->video_native_resolution == GST_WFD_VIDEO_HH_RESOLUTION) {
+ tcHHResolution =
+ wfd_get_preferred_resolution (resolution_supported,
+ priv->cHHResolution, priv->video_native_resolution, &priv->cMaxWidth,
+ &priv->cMaxHeight, &priv->cFramerate, &priv->cInterleaved);
+ GST_DEBUG
+ ("wfd negotiated resolution: %" G_GUINT64_FORMAT ", width: %d, height: %d, framerate: %d, interleaved: %d",
+ tcHHResolution, priv->cMaxWidth, priv->cMaxHeight, priv->cFramerate,
+ priv->cInterleaved);
+ }
+
+ wfd_res =
+ gst_wfd_message_set_preferred_direct_video_format (msg,
+ priv->direct_detected_video_codec,
+ priv->video_native_resolution, GST_WFD_CEA_UNKNOWN, tcCEAResolution,
+ tcVESAResolution, tcHHResolution, tcProfile, tcLevel, priv->cvLatency,
+ priv->cMaxWidth, priv->cMaxHeight, priv->cmin_slice_size,
+ priv->cslice_enc_params, priv->cframe_rate_control);
+
+ if (wfd_res != GST_WFD_OK) {
+ GST_ERROR_OBJECT (client, "Failed to set preffered video formats for direct streaming...");
+ goto error;
+ }
+
+ wfd_res =
+ gst_wfd_message_set_direct_streaming_mode (msg, TRUE);
+
+ if (wfd_res != GST_WFD_OK) {
+ GST_ERROR_OBJECT (client, "Failed to set preffered video formats for direct streaming...");
+ goto error;
+ }
+
+ *data = gst_wfd_message_as_text (msg);
+ if (*data == NULL) {
+ GST_ERROR_OBJECT (client, "Failed to get wfd message as text...");
+ goto error;
+ } else {
+ *len = strlen (*data);
+ }
} else if (msg_type == TS_REQ_MSG) {
/* create transport switch request to be sent */
wfd_res = gst_wfd_message_new (&msg);
priv->crtp_port1 = port;
}
+static void
+direct_stream_end_cb (GstRTSPMediaFactoryWFD *factory, void *user_data)
+{
+ GstRTSPWFDClient *client = GST_RTSP_WFD_CLIENT_CAST (user_data);
+ GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client);
+ GstRTSPResult res = GST_RTSP_OK;
+
+ priv->direct_streaming_state = 0;
+ res = handle_M4_message (client);
+
+ if (res != GST_RTSP_OK) {
+ GST_ERROR_OBJECT (client, "Failed to send message for direct streaming");
+ }
+}
+
+GstRTSPResult
+gst_rtsp_wfd_client_set_direct_streaming(GstRTSPWFDClient * client,
+ gint direct_streaming, gchar *urisrc)
+{
+ GstRTSPClient *parent_client = GST_RTSP_CLIENT_CAST (client);
+ GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client);
+ GstRTSPResult res = GST_RTSP_OK;
+
+ GstRTSPMediaFactory *factory = NULL;
+ GstRTSPMountPoints *mount_points = NULL;
+ gchar *path = NULL;
+ gint matched = 0;
+
+ if (priv->direct_streaming_supported == FALSE) {
+ GST_ERROR_OBJECT (client, "Direct streaming not supported by client");
+ return GST_RTSP_ERROR;
+ }
+
+ if (priv->direct_streaming_state == direct_streaming) {
+ GST_DEBUG_OBJECT (client, "Direct streaming state not changed");
+ return res;
+ }
+
+ if (!(mount_points = gst_rtsp_client_get_mount_points (parent_client))) {
+ res = GST_RTSP_ERROR;
+ GST_ERROR_OBJECT (client, "Failed to set direct streaing: no mount points...");
+ goto no_mount_points;
+ }
+
+ path = g_strdup(WFD_MOUNT_POINT);
+ if (!path) {
+ res = GST_RTSP_ERROR;
+ GST_ERROR_OBJECT (client, "Failed to set direct streaing: no path...");
+ goto no_path;
+ }
+
+ if (!(factory = gst_rtsp_mount_points_match (mount_points,
+ path, &matched))) {
+ GST_ERROR_OBJECT (client, "Failed to set direct streaing: no factory...");
+ res = GST_RTSP_ERROR;
+ goto no_factory;
+ }
+
+ if (direct_streaming) {
+ res = gst_rtsp_media_factory_wfd_uri_type_find (factory,
+ urisrc, &priv->direct_detected_video_codec,
+ &priv->direct_detected_audio_codec);
+
+ if (res != GST_RTSP_OK) {
+ GST_ERROR_OBJECT (client, "Failed to create direct streaming pipeline");
+ goto no_pipe;
+ }
+ }
+
+ if (!(priv->direct_detected_video_codec & GST_WFD_VIDEO_H264)) {
+ GST_ERROR_OBJECT (client, "Detected video codec not supported");
+ res = GST_RTSP_ERROR;
+ goto no_pipe;
+ }
+
+ if (!(priv->direct_detected_audio_codec & GST_WFD_AUDIO_AAC ||
+ priv->direct_detected_audio_codec & GST_WFD_AUDIO_LPCM ||
+ priv->direct_detected_audio_codec & GST_WFD_AUDIO_AC3)) {
+ GST_ERROR_OBJECT (client, "Detected audio codec not supported");
+ res = GST_RTSP_ERROR;
+ goto no_pipe;
+ }
+
+ g_signal_connect_object (GST_RTSP_MEDIA_FACTORY_WFD_CAST (factory), "direct-stream-end",
+ G_CALLBACK (direct_stream_end_cb), client, 0);
+
+ res = gst_rtsp_media_factory_wfd_set_direct_streaming (factory,
+ direct_streaming, urisrc);
+
+ if (res != GST_RTSP_OK) {
+ GST_ERROR_OBJECT (client, "Failed to create direct streaming pipeline");
+ goto no_pipe;
+ }
+
+ if (direct_streaming) {
+ res = handle_M4_direct_streaming_message (client);
+
+ if (res != GST_RTSP_OK) {
+ GST_ERROR_OBJECT (client, "Failed to send message for direct streaming");
+ goto no_pipe;
+ }
+ }
+
+ priv->direct_streaming_state = direct_streaming;
+
+no_pipe:
+ g_object_unref(factory);
+no_factory:
+ g_free(path);
+no_path:
+ g_object_unref(mount_points);
+no_mount_points:
+ return res;
+}
+
+/**
+* prepare_direct_streaming_request:
+* @client: client object
+* @request : requst message to be prepared
+* @url : url need to be in the request
+*
+* Prepares request based on @method & @message_type
+*
+* Returns: a #GstRTSPResult.
+*/
+static GstRTSPResult
+prepare_direct_streaming_request (GstRTSPWFDClient * client, GstRTSPMessage * request)
+{
+ GstRTSPResult res = GST_RTSP_OK;
+ gchar *msg = NULL;
+ guint msglen = 0;
+ GString *msglength;
+
+ GST_DEBUG_OBJECT (client, "Preparing request for direct streaming");
+
+ /* initialize the request */
+ res = gst_rtsp_message_init_request (request, GST_RTSP_SET_PARAMETER,
+ (gchar *) "rtsp://localhost/wfd1.0");
+ if (res < 0) {
+ GST_ERROR ("init request failed");
+ return res;
+ }
+
+ /* add content type */
+ res =
+ gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_TYPE,
+ "text/parameters");
+ if (res != GST_RTSP_OK) {
+ GST_ERROR_OBJECT (client, "Failed to add header to rtsp request...");
+ goto error;
+ }
+
+ _set_wfd_message_body (client, M4_DS_REQ_MSG, &msg, &msglen);
+ msglength = g_string_new ("");
+ g_string_append_printf (msglength, "%d", msglen);
+ GST_DEBUG ("M4 for direct streaming server side message body: %s", msg);
+
+ /* add content-length type */
+ res =
+ gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_LENGTH,
+ g_string_free (msglength, FALSE));
+ if (res != GST_RTSP_OK) {
+ GST_ERROR_OBJECT (client, "Failed to add header to rtsp message...");
+ goto error;
+ }
+
+ res = gst_rtsp_message_set_body (request, (guint8 *) msg, msglen);
+ if (res != GST_RTSP_OK) {
+ GST_ERROR_OBJECT (client, "Failed to add header to rtsp message...");
+ goto error;
+ }
+
+ g_free (msg);
+
+ return res;
+
+error:
+ return GST_RTSP_ERROR;
+}
+
+static GstRTSPResult
+handle_M4_direct_streaming_message (GstRTSPWFDClient * client)
+{
+ GstRTSPResult res = GST_RTSP_OK;
+ GstRTSPMessage request = { 0 };
+
+ res = prepare_direct_streaming_request (client, &request);
+ if (GST_RTSP_OK != res) {
+ GST_ERROR_OBJECT (client, "Failed to prepare M4 request....\n");
+ goto error;
+ }
+
+ GST_DEBUG_OBJECT (client, "Sending SET_PARAMETER request message for direct streaming (M4)...");
+
+ gst_send_request (client, NULL, &request);
+
+ return res;
+
+error:
+ return res;
+}
/**
* prepare_transport_switch_request:
GstRTSPWFDClient * client, guint8 video_codec);
GstRTSPResult gst_rtsp_wfd_client_set_audio_codec (
GstRTSPWFDClient * client, guint8 audio_codec);
+GstRTSPResult gst_rtsp_wfd_client_set_direct_streaming(
+ GstRTSPWFDClient * client, gint direct_streaming, gchar *urisrc);
GstRTSPResult gst_prepare_request (GstRTSPWFDClient * client,
GstRTSPMessage * request, GstRTSPMethod method, gchar * url);
void gst_send_request (GstRTSPWFDClient * client,
*/
#include <stdio.h>
+#include <string.h>
+
#include "rtsp-media-factory-wfd.h"
#include "gstwfdmessage.h"
#include "rtsp-media-ext.h"
#define GST_RTSP_MEDIA_FACTORY_WFD_LOCK(f) (g_mutex_lock(GST_RTSP_MEDIA_FACTORY_WFD_GET_LOCK(f)))
#define GST_RTSP_MEDIA_FACTORY_WFD_UNLOCK(f) (g_mutex_unlock(GST_RTSP_MEDIA_FACTORY_WFD_GET_LOCK(f)))
+typedef struct _GstRTPSMediaWFDTypeFindResult GstRTPSMediaWFDTypeFindResult;
+
+struct _GstRTPSMediaWFDTypeFindResult{
+ gint h264_found;
+ gint aac_found;
+ gint ac3_found;
+ GstElementFactory *demux_fact;
+ GstElementFactory *src_fact;
+};
+
+typedef struct _GstRTSPMediaWFDDirectPipelineData GstRTSPMediaWFDDirectPipelineData;
+
+struct _GstRTSPMediaWFDDirectPipelineData {
+ GstBin *pipeline;
+ GstElement *ap;
+ GstElement *vp;
+ GstElement *aq;
+ GstElement *vq;
+ GstElement *tsmux;
+ GstElement *mux_fs;
+ gchar *uri;
+};
+
+
struct _GstRTSPMediaFactoryWFDPrivate
{
GMutex lock;
guint video_framerate;
guint video_enc_skip_inbuf_value;
GstElement *video_queue;
+ GstBin *video_srcbin;
GstElement *venc;
guint decide_udp_bitrate[21];
guint8 audio_freq;
guint8 audio_bitrate;
GstElement *audio_queue;
+ GstBin *audio_srcbin;
+
+ GMutex direct_lock;
+ GCond direct_cond;
+ GType decodebin_type;
+ GstBin *discover_pipeline;
+ GstRTPSMediaWFDTypeFindResult res;
+ GstRTSPMediaWFDDirectPipelineData *direct_pipe;
+ GstBin *stream_bin;
+ GstElement *mux;
+ GstElement *mux_queue;
+ GstElement *pay;
+ GstElement *stub_fs;
+ GMainLoop *discover_loop;
guint64 video_resolution_supported;
{
SIGNAL_MEDIA_CONSTRUCTED,
SIGNAL_MEDIA_CONFIGURE,
+ SIGNAL_DIRECT_STREAMING_END,
SIGNAL_LAST
};
GST_DEBUG_CATEGORY_STATIC (rtsp_media_wfd_debug);
#define GST_CAT_DEFAULT rtsp_media_wfd_debug
+static guint gst_rtsp_media_factory_wfd_signals[SIGNAL_LAST] = { 0 };
+
static void gst_rtsp_media_factory_wfd_get_property (GObject * object,
guint propid, GValue * value, GParamSpec * pspec);
static void gst_rtsp_media_factory_wfd_set_property (GObject * object,
gobject_class->set_property = gst_rtsp_media_factory_wfd_set_property;
gobject_class->finalize = gst_rtsp_media_factory_wfd_finalize;
+ gst_rtsp_media_factory_wfd_signals[SIGNAL_DIRECT_STREAMING_END] =
+ g_signal_new ("direct-stream-end", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPMediaFactoryWFDClass,
+ direct_stream_end), NULL, NULL, g_cclosure_marshal_generic,
+ G_TYPE_NONE, 0, G_TYPE_NONE);
+
factory_class->construct = rtsp_media_factory_wfd_construct;
factory_class->create_element = rtsp_media_factory_wfd_create_element;
priv->video_height = 480;
priv->video_framerate = 30;
priv->video_enc_skip_inbuf_value = 5;
+ priv->video_srcbin = NULL;
priv->min_udp_bitrate = 938861;
priv->max_udp_bitrate = 1572864;
priv->decided_udp_bitrate = FALSE;
priv->audio_do_timestamp = FALSE;
priv->audio_channels = GST_WFD_CHANNEL_2;
priv->audio_freq = GST_WFD_FREQ_48000;
+ priv->audio_srcbin = NULL;
+
+ g_mutex_init (&priv->direct_lock);
+ g_cond_init (&priv->direct_cond);
+
+ priv->discover_pipeline = NULL;
+ priv->direct_pipe = NULL;
+ memset (&priv->res, 0x00, sizeof (GstRTPSMediaWFDTypeFindResult));
+ priv->stream_bin = NULL;
+ priv->mux = NULL;
+ priv->mux_queue = NULL;
+ priv->pay = NULL;
g_mutex_init (&priv->lock);
}
g_free (priv->launch);
g_mutex_clear (&priv->lock);
+ g_mutex_clear (&priv->direct_lock);
+ g_cond_clear (&priv->direct_cond);
+
if (priv->audio_device)
g_free (priv->audio_device);
if (priv->audio_encoder_aac)
return TRUE;
}
+ priv->audio_srcbin = (GstBin *)gst_bin_new ("audio");
+
/* create audio src element */
audiosrc = gst_element_factory_make ("pulsesrc", "audiosrc");
if (!audiosrc) {
goto create_error;
}
- gst_bin_add_many (srcbin, audiosrc, acaps, aenc, aqueue, NULL);
+ gst_bin_add_many (priv->audio_srcbin, audiosrc, acaps, aenc, aqueue, NULL);
+ gst_bin_add (srcbin, GST_ELEMENT (priv->audio_srcbin));
if (!gst_element_link_many (audiosrc, acaps, aenc, aqueue, NULL)) {
GST_ERROR_OBJECT (factory, "Failed to link audio src elements...");
goto create_error;
}
- gst_bin_add_many (srcbin, audiosrc, acaps2, audio_convert, acaps, aqueue,
- NULL);
+ gst_bin_add_many (priv->audio_srcbin, audiosrc, acaps2, audio_convert, acaps, aqueue, NULL);
+ gst_bin_add (srcbin, GST_ELEMENT (priv->audio_srcbin));
if (!gst_element_link_many (audiosrc, acaps2, audio_convert, acaps, aqueue,
NULL)) {
priv = factory->priv;
GST_INFO_OBJECT (factory, "picked videotestsrc as video source");
+ priv->video_srcbin = (GstBin *)gst_bin_new ("video");
videosrc = gst_element_factory_make ("videotestsrc", "videosrc");
if (NULL == videosrc) {
goto create_error;
}
- gst_bin_add_many (srcbin, videosrc, vcaps, videoconvert, venc_caps, venc,
- vparse, vqueue, NULL);
+ gst_bin_add_many (priv->video_srcbin, videosrc, vcaps, videoconvert, venc_caps, venc, vparse, vqueue, NULL);
+ gst_bin_add (srcbin, GST_ELEMENT (priv->video_srcbin));
if (!gst_element_link_many (videosrc, vcaps, videoconvert, venc_caps, venc,
vparse, vqueue, NULL)) {
GST_ERROR_OBJECT (factory, "Failed to link video src elements...");
return TRUE;
}
+ priv->video_srcbin = (GstBin *)gst_bin_new ("video");
+
videosrc = gst_element_factory_make ("waylandsrc", "videosrc");
if (NULL == videosrc) {
GST_ERROR_OBJECT (factory, "failed to create ximagesrc element");
goto create_error;
}
- gst_bin_add_many (srcbin, videosrc, vcaps, venc, vparse, vqueue, NULL);
+ gst_bin_add_many (priv->video_srcbin, videosrc, vcaps, venc, vparse, vqueue, NULL);
+ gst_bin_add (srcbin, GST_ELEMENT (priv->video_srcbin));
if (!gst_element_link_many (videosrc, vcaps, venc, vparse, vqueue, NULL)) {
GST_ERROR_OBJECT (factory, "Failed to link video src elements...");
goto create_error;
GstRTSPMediaFactoryWFDPrivate *priv = NULL;
priv = factory->priv;
+ priv->video_srcbin = (GstBin *)gst_bin_new ("video");
videosrc = gst_element_factory_make ("camerasrc", "videosrc");
if (NULL == videosrc) {
goto create_error;
}
- gst_bin_add_many (srcbin, videosrc, vcaps, venc, vparse, vqueue, NULL);
+ gst_bin_add_many (priv->video_srcbin, videosrc, vcaps, venc, vparse, vqueue, NULL);
+ gst_bin_add (srcbin, GST_ELEMENT (priv->video_srcbin));
if (!gst_element_link_many (videosrc, vcaps, venc, vparse, vqueue, NULL)) {
GST_ERROR_OBJECT (factory, "Failed to link video src elements...");
priv = factory->priv;
GST_INFO_OBJECT (factory, "picked ximagesrc as video source");
+ priv->video_srcbin = (GstBin *)gst_bin_new ("video");
videosrc = gst_element_factory_make ("ximagesrc", "videosrc");
if (NULL == videosrc) {
goto create_error;
}
- gst_bin_add_many (srcbin, videosrc, videoscale, videoconvert, vcaps, venc,
+ gst_bin_add_many (priv->video_srcbin, videosrc, videoscale, videoconvert, vcaps, venc,
venc_caps, vparse, vqueue, NULL);
+ gst_bin_add (srcbin, GST_ELEMENT (priv->video_srcbin));
if (!gst_element_link_many (videosrc, videoscale, videoconvert, vcaps, venc,
venc_caps, vparse, vqueue, NULL)) {
GST_ERROR_OBJECT (factory, "Failed to link video src elements...");
priv = factory->priv;
GST_INFO_OBJECT (factory, "picked xvimagesrc as video source");
+ priv->video_srcbin = (GstBin *)gst_bin_new ("video");
videosrc = gst_element_factory_make ("xvimagesrc", "videosrc");
if (NULL == videosrc) {
goto create_error;
}
- gst_bin_add_many (srcbin, videosrc, vcaps, venc, vparse, vqueue, NULL);
+ gst_bin_add_many (priv->video_srcbin, videosrc, vcaps, venc, vparse, vqueue, NULL);
+ gst_bin_add (srcbin, GST_ELEMENT (priv->video_srcbin));
if (!gst_element_link_many (videosrc, vcaps, venc, vparse, vqueue, NULL)) {
GST_ERROR_OBJECT (factory, "Failed to link video src elements...");
goto create_error;
GstPad *srcpad = NULL;
GstPad *mux_vsinkpad = NULL;
GstPad *mux_asinkpad = NULL;
+ GstPad *ghost_pad = NULL;
priv = factory->priv;
GST_ERROR_OBJECT (factory, "Failed to get srcpad from video queue...");
goto create_error;
}
+ ghost_pad = gst_ghost_pad_new ("video_src", srcpad);
+ gst_element_add_pad (GST_ELEMENT (priv->video_srcbin), ghost_pad);
- if (gst_pad_link (srcpad, mux_vsinkpad) != GST_PAD_LINK_OK) {
+ if (gst_pad_link (ghost_pad, mux_vsinkpad) != GST_PAD_LINK_OK) {
GST_ERROR_OBJECT (factory,
"Failed to link video queue src pad & muxer video sink pad...");
goto create_error;
gst_object_unref (mux_vsinkpad);
gst_object_unref (srcpad);
srcpad = NULL;
+ ghost_pad = NULL;
}
GST_INFO_OBJECT (factory, "Check audio codec... %d", priv->audio_codec);
GST_ERROR_OBJECT (factory, "Failed to get srcpad from audio queue...");
goto create_error;
}
+ ghost_pad = gst_ghost_pad_new ("audio_src", srcpad);
+ gst_element_add_pad (GST_ELEMENT (priv->audio_srcbin), ghost_pad);
/* link audio queue's srcpad & muxer sink pad */
- if (gst_pad_link (srcpad, mux_asinkpad) != GST_PAD_LINK_OK) {
+ if (gst_pad_link (ghost_pad, mux_asinkpad) != GST_PAD_LINK_OK) {
GST_ERROR_OBJECT (factory,
"Failed to link audio queue src pad & muxer audio sink pad...");
goto create_error;
GST_DEBUG_OBJECT (factory, "successfully created source bin...");
+ priv->stream_bin = srcbin;
+ priv->mux = gst_object_ref (mux);
+ priv->mux_queue = gst_object_ref (mux_queue);
+ priv->pay = gst_object_ref (payload);
+
return GST_ELEMENT_CAST (srcbin);
create_error:
return NULL;
}
}
+
+gint type_detected = FALSE;
+gint linked = FALSE;
+static gint in_pad_probe;
+
+static GstPadProbeReturn
+_rtsp_media_factory_wfd_restore_pipe_probe_cb (GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
+{
+ GstPad *old_src = NULL;
+ GstPad *sink = NULL;
+ GstPad *old_sink = NULL;
+ GstPad *new_src = NULL;
+ GstRTSPMediaFactoryWFD *factory = NULL;
+ GstRTSPMediaFactoryWFDPrivate *priv = NULL;
+ GstRTSPMediaWFDDirectPipelineData *pipe_data = NULL;
+
+ if (!g_atomic_int_compare_and_exchange (&in_pad_probe, FALSE, TRUE))
+ return GST_PAD_PROBE_OK;
+
+ factory = (GstRTSPMediaFactoryWFD *) user_data;
+ priv = factory->priv;
+ pipe_data = priv->direct_pipe;
+
+ gst_element_sync_state_with_parent (GST_ELEMENT(priv->audio_srcbin));
+ gst_element_sync_state_with_parent (GST_ELEMENT(priv->video_srcbin));
+ gst_element_sync_state_with_parent (GST_ELEMENT(priv->mux));
+ gst_element_sync_state_with_parent (GST_ELEMENT(priv->mux_queue));
+
+ sink = gst_element_get_static_pad (priv->pay, "sink");
+ old_src = gst_pad_get_peer (sink);
+ gst_pad_unlink (old_src, sink);
+
+ new_src = gst_element_get_static_pad (priv->mux_queue, "src");
+ old_sink = gst_pad_get_peer (new_src);
+ gst_pad_unlink (new_src, old_sink);
+ gst_element_set_state (priv->stub_fs, GST_STATE_NULL);
+ gst_bin_remove ((GstBin *)priv->stream_bin, priv->stub_fs);
+
+ gst_pad_link (new_src, sink);
+ gst_object_unref (new_src);
+ gst_object_unref (old_sink);
+
+ gst_element_set_state (GST_ELEMENT(pipe_data->pipeline), GST_STATE_PAUSED);
+
+ /* signal that new pipeline linked */
+ g_mutex_lock (&priv->direct_lock);
+ g_cond_signal (&priv->direct_cond);
+ linked = TRUE;
+ g_mutex_unlock (&priv->direct_lock);
+
+ return GST_PAD_PROBE_REMOVE;
+}
+
+static gboolean
+_rtsp_media_factory_wfd_destroy_direct_pipe(void *user_data)
+{
+ GstRTSPMediaFactoryWFD *factory = NULL;
+ GstRTSPMediaFactoryWFDPrivate *priv = NULL;
+ GstRTSPMediaWFDDirectPipelineData *pipe_data = NULL;
+
+ factory = (GstRTSPMediaFactoryWFD *) user_data;
+ priv = factory->priv;
+ pipe_data = priv->direct_pipe;
+
+ GST_DEBUG_OBJECT (factory, "Deleting pipeline");
+ gst_element_set_state (GST_ELEMENT(pipe_data->pipeline), GST_STATE_NULL);
+ gst_bin_remove ((GstBin *)priv->stream_bin, GST_ELEMENT(pipe_data->pipeline));
+ g_free (pipe_data);
+ g_signal_emit (factory,
+ gst_rtsp_media_factory_wfd_signals[SIGNAL_DIRECT_STREAMING_END], 0, NULL);
+ return FALSE;
+}
+
+static void
+_rtsp_media_factory_wfd_demux_pad_added_cb (GstElement *element,
+ GstPad *pad,
+ gpointer data)
+{
+ GstPad *sinkpad = NULL;
+ GstCaps *caps = gst_pad_get_current_caps (pad);
+ gchar *pad_name = gst_pad_get_name (pad);
+ gchar *pad_caps = gst_caps_to_string (caps);
+ GstRTSPMediaFactoryWFD *factory = NULL;
+ GstRTSPMediaFactoryWFDPrivate *priv = NULL;
+ GstRTSPMediaWFDDirectPipelineData *pipe_data = NULL;
+
+ factory = (GstRTSPMediaFactoryWFD *) data;
+ priv = factory->priv;
+ pipe_data = priv->direct_pipe;
+
+ if (g_strrstr (g_ascii_strdown(pad_caps, -1), "audio")) {
+ sinkpad = gst_element_get_static_pad (pipe_data->ap, "sink");
+ if (gst_pad_is_linked (sinkpad)) {
+ gst_object_unref (sinkpad);
+ GST_DEBUG_OBJECT (factory, "pad linked");
+ return;
+ }
+ if (gst_pad_link (pad, sinkpad) != GST_PAD_LINK_OK)
+ GST_DEBUG_OBJECT (factory, "can't link demux %s pad", pad_name);
+
+ gst_object_unref (sinkpad);
+ sinkpad = NULL;
+ }
+ if (g_strrstr (g_ascii_strdown(pad_caps, -1), "video")) {
+ if (g_strrstr (g_ascii_strdown(pad_caps, -1), "h264")) {
+ sinkpad = gst_element_get_static_pad (pipe_data->vp, "sink");
+ if (gst_pad_link (pad, sinkpad) != GST_PAD_LINK_OK)
+ GST_DEBUG_OBJECT (factory, "can't link demux %s pad", pad_name);
+
+ gst_object_unref (sinkpad);
+ sinkpad = NULL;
+ }
+ }
+
+ g_free (pad_caps);
+ g_free (pad_name);
+}
+
+static GstPadProbeReturn
+_rtsp_media_factory_wfd_pay_pad_probe_cb (GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
+{
+ GstPad *old_src = NULL;
+ GstPad *sink = NULL;
+ GstPad *old_sink = NULL;
+ GstPad *new_src = NULL;
+ GstPad *fas_sink = NULL;
+ GstPad *gp = NULL;
+ GstRTSPMediaFactoryWFD *factory = NULL;
+ GstRTSPMediaFactoryWFDPrivate *priv = NULL;
+ GstRTSPMediaWFDDirectPipelineData *pipe_data = NULL;
+
+ if (!g_atomic_int_compare_and_exchange (&in_pad_probe, FALSE, TRUE))
+ return GST_PAD_PROBE_OK;
+
+ factory = (GstRTSPMediaFactoryWFD *) user_data;
+ priv = factory->priv;
+ pipe_data = priv->direct_pipe;
+
+ sink = gst_element_get_static_pad (priv->pay, "sink");
+ old_src = gst_pad_get_peer (sink);
+ gst_pad_unlink (old_src, sink);
+
+ new_src = gst_element_get_static_pad (pipe_data->tsmux, "src");
+ old_sink = gst_pad_get_peer (new_src);
+ gst_pad_unlink (new_src, old_sink);
+ gst_element_set_state (pipe_data->mux_fs, GST_STATE_NULL);
+ gst_bin_remove ((GstBin *)pipe_data->pipeline, pipe_data->mux_fs);
+
+ gp = gst_ghost_pad_new ("audio_file", new_src);
+ gst_pad_set_active(gp,TRUE);
+ gst_element_add_pad (GST_ELEMENT (pipe_data->pipeline), gp);
+ gst_pad_link (gp, sink);
+ gst_object_unref (new_src);
+ gst_object_unref (old_sink);
+
+ priv->stub_fs = gst_element_factory_make ("fakesink", NULL);
+ gst_bin_add (priv->stream_bin, priv->stub_fs);
+ gst_element_sync_state_with_parent (priv->stub_fs);
+ fas_sink = gst_element_get_static_pad (priv->stub_fs, "sink");
+ gst_pad_link (old_src, fas_sink);
+ gst_object_unref (old_src);
+ gst_object_unref (fas_sink);
+ gst_element_set_state (GST_ELEMENT(priv->audio_srcbin), GST_STATE_PAUSED);
+ gst_element_set_state (GST_ELEMENT(priv->video_srcbin), GST_STATE_PAUSED);
+ gst_element_set_state (GST_ELEMENT(priv->mux), GST_STATE_PAUSED);
+ gst_element_set_state (GST_ELEMENT(priv->mux_queue), GST_STATE_PAUSED);
+
+ /* signal that new pipeline linked */
+ g_mutex_lock (&priv->direct_lock);
+ linked = TRUE;
+ g_cond_signal (&priv->direct_cond);
+ g_mutex_unlock (&priv->direct_lock);
+
+ return GST_PAD_PROBE_REMOVE;
+}
+
+static gboolean
+_rtsp_media_factory_wfd_relink_pipeline(GstRTSPMediaFactoryWFD * factory)
+{
+ GstRTSPMediaFactoryWFDPrivate *priv = NULL;
+ GstPad *probe_pad = NULL;
+ gint64 end_time = 0;
+
+ priv = factory->priv;
+
+ probe_pad = gst_element_get_static_pad (priv->pay, "sink");
+ if (probe_pad == NULL)
+ return FALSE;
+
+ in_pad_probe = FALSE;
+ linked = FALSE;
+ gst_pad_add_probe (probe_pad, GST_PAD_PROBE_TYPE_IDLE, _rtsp_media_factory_wfd_restore_pipe_probe_cb, factory, NULL);
+
+ g_mutex_lock (&factory->priv->direct_lock);
+ end_time = g_get_monotonic_time () + 5 * G_TIME_SPAN_SECOND;
+ if (!g_cond_wait_until (&factory->priv->direct_cond, &factory->priv->direct_lock, end_time)) {
+ g_mutex_unlock (&factory->priv->direct_lock);
+ GST_ERROR_OBJECT (factory, "Failed to relink pipeline");
+ return linked;
+ }
+ g_mutex_unlock (&factory->priv->direct_lock);
+ return linked;
+}
+
+
+static GstPadProbeReturn
+_rtsp_media_factory_wfd_src_pad_probe_cb(GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
+{
+ GstRTSPMediaFactoryWFD *factory = NULL;
+ GstEvent *event = GST_PAD_PROBE_INFO_EVENT(info);
+
+ factory = (GstRTSPMediaFactoryWFD *) user_data;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ GST_INFO_OBJECT (factory, "Got event: %s in direct streaming", GST_EVENT_TYPE_NAME (event));
+ info->data = NULL;
+ info->data = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, gst_structure_new_empty ("fillEOS"));
+
+ if (!_rtsp_media_factory_wfd_relink_pipeline(factory)) {
+ GST_ERROR_OBJECT (factory, "Failed to relink pipeline");
+ return GST_PAD_PROBE_REMOVE;
+ }
+
+ g_idle_add((GSourceFunc)_rtsp_media_factory_wfd_destroy_direct_pipe, factory);
+ return GST_PAD_PROBE_REMOVE;
+ }
+
+ return GST_PAD_PROBE_OK;
+}
+
+static gboolean
+_rtsp_media_factory_wfd_create_direct_pipeline(GstRTSPMediaFactoryWFD * factory)
+{
+ GstElement *src = NULL;
+ GstElement *demux = NULL;
+ gchar *path = NULL;
+ GstPad *srcpad = NULL;
+ GstPad *mux_vsinkpad = NULL;
+ GstPad *mux_asinkpad = NULL;
+ GstRTSPMediaFactoryWFDPrivate *priv = NULL;
+ GstRTSPMediaWFDDirectPipelineData *pipe_data = NULL;
+
+ priv = factory->priv;
+ pipe_data = priv->direct_pipe;
+
+ pipe_data->pipeline = (GstBin *) gst_bin_new ("direct");
+
+ src = gst_element_factory_create(priv->res.src_fact, NULL);
+ demux = gst_element_factory_create(priv->res.demux_fact, NULL);
+ pipe_data->ap = gst_element_factory_make ("aacparse", NULL);
+ pipe_data->vp = gst_element_factory_make ("h264parse", NULL);
+ pipe_data->aq = gst_element_factory_make ("queue", NULL);
+ pipe_data->vq = gst_element_factory_make ("queue", NULL);
+ pipe_data->tsmux = gst_element_factory_make ("mpegtsmux", NULL);
+ pipe_data->mux_fs = gst_element_factory_make ("fakesink", NULL);
+
+ if (src == NULL || demux == NULL || pipe_data->tsmux == NULL ||
+ pipe_data->ap == NULL || pipe_data->vp == NULL ||
+ pipe_data->aq == NULL || pipe_data->vq == NULL ||
+ pipe_data->mux_fs == NULL) {
+ GST_ERROR_OBJECT (factory, "Not all element created");
+ return FALSE;
+ }
+
+ if (g_strrstr (g_ascii_strdown(g_type_name(G_OBJECT_TYPE(src)), -1), "file")) {
+ path = g_filename_from_uri (pipe_data->uri, NULL, NULL);
+ if (path == NULL) {
+ GST_ERROR_OBJECT(factory, "No file path");
+ return FALSE;
+ }
+ g_object_set (src, "location", path, NULL);
+ g_free (path);
+ } else
+ g_object_set (src, "uri", pipe_data->uri, NULL);
+
+ gst_bin_add_many (pipe_data->pipeline, src, demux, pipe_data->ap,
+ pipe_data->vp, pipe_data->aq, pipe_data->vq,
+ pipe_data->tsmux, pipe_data->mux_fs, NULL);
+
+ if (!gst_element_link (src, demux)) {
+ GST_ERROR_OBJECT (factory, "Can't link src with demux");
+ return FALSE;
+ }
+
+ if (!gst_element_link (pipe_data->ap, pipe_data->aq)) {
+ GST_ERROR_OBJECT (factory, "Can't link audio parse and queue");
+ return FALSE;
+ }
+
+ if (!gst_element_link (pipe_data->vp, pipe_data->vq)) {
+ GST_ERROR_OBJECT (factory, "Can't link video parse and queue");
+ return FALSE;
+ }
+
+ if (!gst_element_link (pipe_data->tsmux, pipe_data->mux_fs)) {
+ GST_DEBUG_OBJECT (factory, "Can't link muxer and fakesink");
+ return FALSE;
+ }
+
+ g_signal_connect_object (demux, "pad-added", G_CALLBACK (_rtsp_media_factory_wfd_demux_pad_added_cb), factory, 0);
+
+ gst_bin_add (priv->stream_bin, GST_ELEMENT (pipe_data->pipeline));
+
+
+ /* request video sink pad from muxer, which has elementary pid 0x1011 */
+ mux_vsinkpad = gst_element_get_request_pad (pipe_data->tsmux, "sink_4113");
+ if (!mux_vsinkpad) {
+ GST_ERROR_OBJECT (factory, "Failed to get sink pad from muxer...");
+ return FALSE;
+ }
+
+ /* request srcpad from video queue */
+ srcpad = gst_element_get_static_pad (pipe_data->vq, "src");
+ if (!srcpad) {
+ GST_ERROR_OBJECT (factory, "Failed to get srcpad from video queue...");
+ }
+
+ if (gst_pad_link (srcpad, mux_vsinkpad) != GST_PAD_LINK_OK) {
+ GST_ERROR_OBJECT (factory, "Failed to link video queue src pad & muxer video sink pad...");
+ return FALSE;
+ }
+
+ gst_object_unref (mux_vsinkpad);
+ gst_object_unref (srcpad);
+ srcpad = NULL;
+
+ /* request audio sink pad from muxer, which has elementary pid 0x1100 */
+ mux_asinkpad = gst_element_get_request_pad (pipe_data->tsmux, "sink_4352");
+ if (!mux_asinkpad) {
+ GST_ERROR_OBJECT (factory, "Failed to get sinkpad from muxer...");
+ return FALSE;
+ }
+
+ /* request srcpad from audio queue */
+ srcpad = gst_element_get_static_pad (pipe_data->aq, "src");
+ if (!srcpad) {
+ GST_ERROR_OBJECT (factory, "Failed to get srcpad from audio queue...");
+ return FALSE;
+ }
+
+ /* link audio queue's srcpad & muxer sink pad */
+ if (gst_pad_link (srcpad, mux_asinkpad) != GST_PAD_LINK_OK) {
+ GST_ERROR_OBJECT (factory, "Failed to link audio queue src pad & muxer audio sink pad...");
+ return FALSE;
+ }
+ gst_object_unref (mux_asinkpad);
+ gst_object_unref (srcpad);
+ srcpad = NULL;
+
+ gst_element_sync_state_with_parent (GST_ELEMENT (pipe_data->pipeline));
+
+ srcpad = gst_element_get_static_pad (priv->pay, "sink");
+
+ in_pad_probe = FALSE;
+ gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_IDLE, _rtsp_media_factory_wfd_pay_pad_probe_cb, factory, NULL);
+ gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, _rtsp_media_factory_wfd_src_pad_probe_cb, factory, NULL);
+
+ return TRUE;
+}
+
+static void
+_rtsp_media_factory_wfd_decodebin_element_added_cb (GstElement *decodebin,
+ GstElement *child, void *user_data)
+{
+ gchar *elem_name = g_ascii_strdown(g_type_name(G_OBJECT_TYPE(child)), -1);
+ GstRTSPMediaFactoryWFD *factory = NULL;
+ GstRTSPMediaFactoryWFDPrivate *priv = NULL;
+
+ factory = (GstRTSPMediaFactoryWFD *) user_data;
+ priv = factory->priv;
+
+ if (g_strrstr (elem_name, "h264"))
+ priv->res.h264_found++;
+ if (g_strrstr (elem_name, "aac"))
+ priv->res.aac_found++;
+ if (g_strrstr (elem_name, "ac3"))
+ priv->res.ac3_found++;
+ if (g_strrstr (elem_name, "demux"))
+ priv->res.demux_fact = gst_element_get_factory(child);
+}
+
+static void
+_rtsp_media_factory_wfd_uridecodebin_element_added_cb (GstElement *uridecodebin,
+ GstElement *child, void *user_data)
+{
+ GstRTSPMediaFactoryWFD *factory = NULL;
+ GstRTSPMediaFactoryWFDPrivate *priv = NULL;
+
+ factory = (GstRTSPMediaFactoryWFD *) user_data;
+ priv = factory->priv;
+
+ if (g_strrstr (g_ascii_strdown(g_type_name(G_OBJECT_TYPE(child)), -1), "src"))
+ priv->res.src_fact = gst_element_get_factory(child);
+
+ if (G_OBJECT_TYPE(child) == priv->decodebin_type)
+ g_signal_connect_object (child, "element-added",
+ G_CALLBACK (_rtsp_media_factory_wfd_decodebin_element_added_cb), factory, 0);
+}
+
+static void
+_rtsp_media_factory_wfd_discover_pad_added_cb (GstElement *uridecodebin, GstPad *pad,
+ GstBin *pipeline)
+{
+ GstPad *sinkpad = NULL;
+ GstCaps *caps;
+
+ GstElement *queue = gst_element_factory_make ("queue", NULL);
+ GstElement *sink = gst_element_factory_make ("fakesink", NULL);
+
+ if (G_UNLIKELY (queue == NULL || sink == NULL))
+ goto error;
+
+ g_object_set (sink, "silent", TRUE, NULL);
+ g_object_set (queue, "max-size-buffers", 1, "silent", TRUE, NULL);
+
+ caps = gst_pad_query_caps (pad, NULL);
+
+ sinkpad = gst_element_get_static_pad (queue, "sink");
+ if (sinkpad == NULL)
+ goto error;
+
+ gst_caps_unref (caps);
+
+ gst_bin_add_many (pipeline, queue, sink, NULL);
+
+ if (!gst_element_link_pads_full (queue, "src", sink, "sink",
+ GST_PAD_LINK_CHECK_NOTHING))
+ goto error;
+ if (!gst_element_sync_state_with_parent (sink))
+ goto error;
+ if (!gst_element_sync_state_with_parent (queue))
+ goto error;
+
+ if (gst_pad_link_full (pad, sinkpad,
+ GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK)
+ goto error;
+ gst_object_unref (sinkpad);
+
+ return;
+
+error:
+ if (sinkpad)
+ gst_object_unref (sinkpad);
+ if (queue)
+ gst_object_unref (queue);
+ if (sink)
+ gst_object_unref (sink);
+ return;
+}
+
+static void
+_rtsp_media_factory_wfd_uridecode_no_pad_cb (GstElement * uridecodebin, void * user_data)
+{
+ GstRTSPMediaFactoryWFD *factory = NULL;
+ GstRTSPMediaFactoryWFDPrivate *priv = NULL;
+
+ factory = (GstRTSPMediaFactoryWFD *) user_data;
+ priv = factory->priv;
+ type_detected = TRUE;
+ g_main_loop_quit (priv->discover_loop);
+}
+
+static void
+_rtsp_media_factory_wfd_discover_pipe_bus_call (GstBus *bus,
+ GstMessage *msg,
+ gpointer data)
+{
+ GstRTSPMediaFactoryWFD *factory = NULL;
+ GstRTSPMediaFactoryWFDPrivate *priv = NULL;
+
+ factory = (GstRTSPMediaFactoryWFD *) data;
+ priv = factory->priv;
+
+ switch (GST_MESSAGE_TYPE (msg)) {
+ case GST_MESSAGE_ERROR: {
+ gchar *debug;
+ GError *error;
+
+ gst_message_parse_error (msg, &error, &debug);
+ g_free (debug);
+
+ GST_ERROR_OBJECT (factory, "Error: %s", error->message);
+ g_error_free (error);
+
+ type_detected = FALSE;
+ g_main_loop_quit (priv->discover_loop);
+ break;
+ }
+ default:
+ break;
+ }
+}
+
+static gboolean
+_rtsp_media_factory_wfd_find_media_type (GstRTSPMediaFactoryWFD * factory, gchar *uri)
+{
+ GstRTSPMediaFactoryWFDPrivate *priv = NULL;
+ GstElement *uridecode = NULL;
+ GstElement *tmp = NULL;
+ GstBus *bus;
+ GMainContext *context;
+ GSource *source;
+
+ priv = factory->priv;
+
+ context = g_main_context_new();
+ priv->discover_loop = g_main_loop_new(context, FALSE);
+
+ tmp = gst_element_factory_make ("decodebin", NULL);
+ priv->decodebin_type = G_OBJECT_TYPE (tmp);
+ gst_object_unref (tmp);
+
+ /* if a URI was provided, use it instead of the default one */
+ priv->discover_pipeline = (GstBin *) gst_pipeline_new ("Discover");
+ uridecode = gst_element_factory_make("uridecodebin", "uri");
+ g_object_set (G_OBJECT (uridecode), "uri", uri, NULL);
+ gst_bin_add (priv->discover_pipeline, uridecode);
+ if (priv->discover_pipeline == NULL || uridecode == NULL) {
+ GST_INFO_OBJECT (factory, "Failed to create type find pipeline");
+ type_detected = FALSE;
+ return FALSE;
+ }
+
+ /* we add a message handler */
+ bus = gst_pipeline_get_bus (GST_PIPELINE (priv->discover_pipeline));
+ source = gst_bus_create_watch (bus);
+ gst_bus_add_signal_watch (bus);
+
+ g_source_set_callback (source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
+ g_source_attach (source, context);
+ g_signal_connect_object (bus, "message",
+ G_CALLBACK (_rtsp_media_factory_wfd_discover_pipe_bus_call), factory, 0);
+
+ g_signal_connect_object (uridecode, "pad-added",
+ G_CALLBACK (_rtsp_media_factory_wfd_discover_pad_added_cb), priv->discover_pipeline, 0);
+ g_signal_connect_object (uridecode, "element-added",
+ G_CALLBACK (_rtsp_media_factory_wfd_uridecodebin_element_added_cb),
+ factory, 0);
+ g_signal_connect_object (uridecode, "no-more-pads",
+ G_CALLBACK (_rtsp_media_factory_wfd_uridecode_no_pad_cb), factory, 0);
+ gst_element_set_state (GST_ELEMENT (priv->discover_pipeline), GST_STATE_PLAYING);
+
+ g_main_loop_run(priv->discover_loop);
+
+ gst_element_set_state (GST_ELEMENT (priv->discover_pipeline), GST_STATE_NULL);
+ g_source_destroy(source);
+ g_source_unref (source);
+ g_main_loop_unref(priv->discover_loop);
+ g_main_context_unref(context);
+ gst_object_unref(bus);
+ gst_object_unref (GST_OBJECT (priv->discover_pipeline));
+
+ return TRUE;
+}
+
+gint
+gst_rtsp_media_factory_wfd_uri_type_find(GstRTSPMediaFactory *factory,
+ gchar *filesrc, guint8 *detected_video_codec, guint8 *detected_audio_codec)
+{
+ GstRTSPMediaFactoryWFD *_factory = GST_RTSP_MEDIA_FACTORY_WFD_CAST (factory);
+ GstRTSPMediaFactoryWFDPrivate *priv = _factory->priv;
+
+ type_detected = FALSE;
+
+ _rtsp_media_factory_wfd_find_media_type (_factory, filesrc);
+
+ if (type_detected == FALSE) {
+ GST_ERROR_OBJECT (_factory, "Media type cannot be detected");
+ return GST_RTSP_ERROR;
+ }
+ GST_INFO_OBJECT (_factory, "Media type detected");
+
+ if (priv->res.h264_found)
+ *detected_video_codec = GST_WFD_VIDEO_H264;
+
+ if (priv->res.aac_found)
+ *detected_audio_codec = GST_WFD_AUDIO_AAC;
+
+ if (priv->res.ac3_found)
+ *detected_audio_codec = GST_WFD_AUDIO_AC3;
+
+ return GST_RTSP_OK;
+}
+
+gint
+gst_rtsp_media_factory_wfd_set_direct_streaming(GstRTSPMediaFactory * factory,
+ gint direct_streaming, gchar *filesrc)
+{
+ GstRTSPMediaFactoryWFD *_factory = GST_RTSP_MEDIA_FACTORY_WFD_CAST (factory);
+ linked = FALSE;
+
+ if (direct_streaming == 0) {
+ if (!_rtsp_media_factory_wfd_relink_pipeline(_factory)) {
+ GST_ERROR_OBJECT (factory, "Failed to relink pipeline");
+ return GST_RTSP_ERROR;
+ }
+
+ _rtsp_media_factory_wfd_destroy_direct_pipe ((void *)_factory);
+
+ GST_INFO_OBJECT (_factory, "Direct streaming bin removed");
+
+ return GST_RTSP_OK;
+ }
+
+ _factory->priv->direct_pipe = g_new0 (GstRTSPMediaWFDDirectPipelineData, 1);
+ _factory->priv->direct_pipe->uri = g_strdup(filesrc);
+
+ if (!_rtsp_media_factory_wfd_create_direct_pipeline(_factory)) {
+ GST_ERROR_OBJECT (_factory, "Direct pipeline creation failed");
+ return GST_RTSP_ERROR;
+ }
+
+ g_mutex_lock (&_factory->priv->direct_lock);
+ while (linked != TRUE) {
+ gint64 end_time = g_get_monotonic_time () + 5 * G_TIME_SPAN_SECOND;
+ if (!g_cond_wait_until (&_factory->priv->direct_cond, &_factory->priv->direct_lock, end_time)) {
+ g_mutex_unlock (&_factory->priv->direct_lock);
+ GST_ERROR_OBJECT (_factory, "Direct pipeline linking failed");
+ return GST_RTSP_ERROR;
+ }
+ }
+ g_mutex_unlock (&_factory->priv->direct_lock);
+
+ GST_INFO_OBJECT (_factory, "Direct streaming bin created");
+
+ return GST_RTSP_OK;
+}
GstRTSPMedia * media);
void (*media_configure) (GstRTSPMediaFactoryWFD * factory,
GstRTSPMedia * media);
+ void (*direct_stream_end) (GstRTSPMediaFactoryWFD * factory);
/*< private > */
gpointer _gst_reserved[GST_PADDING_LARGE];
void gst_rtsp_media_factory_wfd_set_config_bitrate (GstRTSPMediaFactoryWFD *factory,
guint *config_bitrate);
+gint gst_rtsp_media_factory_wfd_uri_type_find(GstRTSPMediaFactory *factory,
+ gchar *filesrc, guint8 *detected_video_codec, guint8 *detected_audio_codec);
+gint gst_rtsp_media_factory_wfd_set_direct_streaming(GstRTSPMediaFactory *factory,
+ gint direct_streaming, gchar *filesrc);
+
G_END_DECLS
#endif /* __GST_RTSP_MEDIA_FACTORY_WFD_H__ */
#include "rtsp-server-wfd.h"
#include "rtsp-client-wfd.h"
-#include "rtsp-client-ext.h"
#define GST_RTSP_WFD_SERVER_GET_PRIVATE(obj) \
(G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_RTSP_WFD_SERVER, GstRTSPWFDServerPrivate))
GST_INFO_OBJECT (server, "New Client is being created");
/* a new client connected, create a session to handle the client. */
- //client = gst_rtsp_wfd_client_new();
- client = (GstRTSPWFDClient *) gst_rtsp_ext_client_new ();
+ client = gst_rtsp_wfd_client_new ();
thread_pool = gst_rtsp_server_get_thread_pool (server);
session_pool = gst_rtsp_server_get_session_pool (server);
}
GstRTSPResult
+gst_rtsp_wfd_server_set_direct_streaming (GstRTSPWFDServer *server,
+ gint direct_streaming, gchar *urisrc)
+{
+ GstRTSPResult res = GST_RTSP_OK;
+ GList *clients, *walk, *next;
+
+ g_return_val_if_fail (GST_IS_RTSP_SERVER (server), GST_RTSP_ERROR);
+
+ clients = gst_rtsp_server_client_filter (GST_RTSP_SERVER(server), NULL, NULL);
+ if (clients == NULL) {
+ GST_ERROR_OBJECT (server, "There is no client in this server");
+ }
+
+ for (walk = clients; walk; walk = next) {
+ GstRTSPClient *client = walk->data;
+
+ next = g_list_next (walk);
+
+ res =
+ gst_rtsp_wfd_client_set_direct_streaming (GST_RTSP_WFD_CLIENT (client),
+ direct_streaming, urisrc);
+ if (res != GST_RTSP_OK) {
+ GST_ERROR_OBJECT (server, "Failed to set direct streaming to %d", direct_streaming);
+ }
+ g_object_unref (client);
+ }
+
+ return res;
+}
+
+GstRTSPResult
gst_rtsp_wfd_server_switch_to_udp (GstRTSPWFDServer *server)
{
GstRTSPResult res = GST_RTSP_OK;
GstRTSPResult gst_rtsp_wfd_server_set_video_native_reso (GstRTSPWFDServer *server, guint64 native_reso);
GstRTSPResult gst_rtsp_wfd_server_set_video_codec (GstRTSPWFDServer *server, guint8 video_codec);
GstRTSPResult gst_rtsp_wfd_server_set_audio_codec (GstRTSPWFDServer *server, guint8 audio_codec);
+GstRTSPResult gst_rtsp_wfd_server_set_direct_streaming (GstRTSPWFDServer *server, gint direct_streaming, gchar *urisrc);
GstRTSPResult gst_rtsp_wfd_server_switch_to_udp (GstRTSPWFDServer *server);
GstRTSPResult gst_rtsp_wfd_server_switch_to_tcp (GstRTSPWFDServer *server);
Name: gst-rtsp-server
Summary: Multimedia Framework Library
Version: 1.6.1
-Release: 16
+Release: 17
Url: http://gstreamer.freedesktop.org/
Group: System/Libraries
License: LGPL-2.0+