From: Gilbok Lee Date: Fri, 8 Dec 2023 02:27:27 +0000 (+0900) Subject: Merge branch 'upstream/1.22.7' into tizen_gst_1.22.7 X-Git-Tag: accepted/tizen/unified/20240105.105444^2^2 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=refs%2Fchanges%2F11%2F302511%2F8;p=platform%2Fupstream%2Fgstreamer.git Merge branch 'upstream/1.22.7' into tizen_gst_1.22.7 Change-Id: Ifd237cb75154b3000f76c695a5234052de646535 --- 9fecc618c16a4ed99834d0202046b3e741613878 diff --cc packaging/gstreamer.spec index a1180e9841,0000000000..5cb78b7335 mode 100644,000000..100644 --- a/packaging/gstreamer.spec +++ b/packaging/gstreamer.spec @@@ -1,1334 -1,0 +1,1334 @@@ +%bcond_with x +%bcond_with wayland + +%define gst_branch 1.0 +%define _lib_gstreamer_dir %{_libdir}/gstreamer-%{gst_branch} +%define _lib_girepository %{_libdir}/girepository-%{gst_branch} +%define _enable_v4l2h264enc 0 +%define _enable_introspection 0 + +####################################################################### +## how to build each plugin : +## add define option at gbs build cmd like [ --define "plugin bad" ] +## if not, all the source code in subprojects will be built +####################################################################### + +%if 0%{?plugin:1} + %if "%{plugin}" == "core" + %define _name gstreamer + %define _core_opt "" + %endif + %if "%{plugin}" == "base" || "%{plugin}" == "good" || "%{plugin}" == "bad" || "%{plugin}" == "ugly" + %define _name gst-plugins-%{plugin} + %define _base_opt "" + %define _good_opt "" + %define _bad_opt "" + %define _ugly_opt "" + %endif + %if "%{plugin}" == "omx" || "%{plugin}" == "libav" + %define _name gst-%{plugin} + %define _omx_opt "" + %define _libav_opt "" + %endif + %if "%{plugin}" == "rs" + %define _name gst-rtsp-server + %define _rs_opt "" + %endif + %if "%{plugin}" == "es" + %define _name gst-editing-services + %define _es_opt "" + %endif + + %define _source_path subprojects/%{_name} + +%else + + %define plugin all + %define _name gstreamer + %define _source_path . + + # plugin option prefix + %define _core_opt gstreamer: + %define _base_opt gst-plugins-base: + %define _good_opt gst-plugins-good: + %define _bad_opt gst-plugins-bad: + %define _ugly_opt gst-plugins-ugly: + %define _omx_opt gst-omx: + %define _libav_opt gst-libav: + %define _rs_opt gst-rtsp-server: + %define _es_opt gst-editing-services: + +%endif + +Name: %{_name} - Version: 1.22.0 - Release: 38 ++Version: 1.22.7 ++Release: 0 +Summary: Streaming-Media Framework Runtime +License: LGPL-2.0+ +Group: Multimedia/Framework +Url: http://gstreamer.freedesktop.org/ +Source0: gstreamer-%{version}.tar.gz +Source1001: gstreamer.manifest +BuildRequires: meson >= 0.62.0 +BuildRequires: gettext-tools +BuildRequires: pkgconfig(glib-2.0) >= 2.32.0 + + +%if "%{plugin}" == "all" || "%{plugin}" == "core" + +BuildRequires: bison +BuildRequires: fdupes +BuildRequires: flex +BuildRequires: libtool +BuildRequires: pkgconfig(dlog) +%if "%{_enable_introspection}" == "1" +BuildRequires: pkgconfig(gobject-introspection-1.0) >= 1.31.1 +%endif +BuildRequires: pkgconfig(libxml-2.0) + +%description -n gstreamer +GStreamer is a streaming-media framework, based on graphs of filters +which operate on media data. Applications using this library can do +anything from real-time sound processing to playing videos, and just +about anything else media-related. Its plug-in-based architecture +means that new data types or processing capabilities can be added by +installing new plug-ins. + +%package -n gstreamer-utils +Summary: Streaming-Media Framework Runtime +Group: Multimedia/Framework +Provides: gstreamer:%{_bindir}/gst-launch-%{gst_branch} = %{version} +# Symbol for unversioned wrappers: +Provides: gstreamer-utils_versioned = %{version} + +%description -n gstreamer-utils +GStreamer is a streaming-media framework, based on graphs of filters +which operate on media data. Applications using this library can do +anything from real-time sound processing to playing videos, and just +about anything else media-related. Its plug-in-based architecture +means that new data types or processing capabilities can be added by +installing new plug-ins. + +%package -n gstreamer-devel +Summary: Include Files and Libraries mandatory for Development +Group: Development/Libraries +# gstreamer-utils is required for the gstreamer-provides rpm magic. +Requires: gstreamer-utils = %{version} +Requires: gstreamer = %{version} + +%description -n gstreamer-devel +This package contains all necessary include files and libraries needed +to develop applications that require these. + +%else + +BuildRequires: pkgconfig(gstreamer-1.0) + +%if "%{plugin}" != "base" +BuildRequires: pkgconfig(gstreamer-plugins-base-1.0) +%endif + +%endif + +####################################################### +## Package info of Base Plugin +####################################################### + +%if "%{plugin}" == "all" +%package -n gst-plugins-base +Summary: GStreamer Streaming-Media Framework Plug-Ins +Group: Multimedia/Framework +%endif + +%if "%{plugin}" == "all" || "%{plugin}" == "base" + +BuildRequires: orc >= 0.4.16 +BuildRequires: python +BuildRequires: pkgconfig(alsa) >= 0.9.1 +BuildRequires: pkgconfig(freetype2) >= 2.0.9 +BuildRequires: pkgconfig(libdrm) +BuildRequires: pkgconfig(libtbm) +BuildRequires: pkgconfig(libxml-2.0) +BuildRequires: pkgconfig(ogg) >= 1.0 +BuildRequires: pkgconfig(opus) +BuildRequires: pkgconfig(theoradec) >= 1.1 +BuildRequires: pkgconfig(theoraenc) >= 1.1 +BuildRequires: pkgconfig(vorbis) >= 1.0 +BuildRequires: pkgconfig(vorbisenc) >= 1.0 +BuildRequires: pkgconfig(zlib) +%if "%{_enable_introspection}" == "1" +BuildRequires: pkgconfig(gobject-introspection-1.0) >= 1.31.1 +%endif +%if "%{tizen_profile_name}" != "tv" +BuildRequires: update-desktop-files +%endif +%if %{with wayland} +%if 0%{?enable_gl:1} +BuildRequires: pkgconfig(gles20) +BuildRequires: pkgconfig(wayland-egl) >= 9.0 +%endif # wayland +%endif # gl +%if %{with x} +BuildRequires: pkgconfig(dri2proto) +BuildRequires: pkgconfig(libdri2) +BuildRequires: pkgconfig(ice) +BuildRequires: pkgconfig(sm) +BuildRequires: pkgconfig(xext) +BuildRequires: pkgconfig(xv) +BuildRequires: pkgconfig(xfixes) +%endif +# gstreamer-utils is required for the gstreamer-provides rpm magic. +Requires: gstreamer >= 1.0.0 +Requires: opus +Supplements: gstreamer + +%description -n gst-plugins-base +GStreamer is a streaming media framework based on graphs of filters +that operate on media data. Applications using this library can do +anything media-related, from real-time sound processing to playing +videos. Its plug-in-based architecture means that new data types or +processing capabilities can be added simply by installing new plug-ins. + +%if "%{tizen_profile_name}" != "tv" +%package -n gst-plugins-base-extension-adder +Summary: libgstadder.so for gst-plugins-base +Requires: gst-plugins-base = %{version}-%{release} +Provides: gst-plugins-base-profile_mobile = %{version}-%{release} + +%description -n gst-plugins-base-extension-adder +Libgstadder.so for gst-plugin-base. Supplicant for gst-plugin-base. +This is for Tizen mobile profile. +%endif + +%package -n gst-plugins-base-devel +Summary: Include files and Libraries +Requires: gst-plugins-base = %{version} + +%description -n gst-plugins-base-devel +This package contains all necessary include files and libraries needed +to compile and link applications that use gstreamer-plugins-base. + +%endif + +####################################################### +## Package info of Good Plugin +####################################################### + +%if "%{plugin}" == "all" +%package -n gst-plugins-good +Summary: GStreamer Streaming-Media Framework Plug-Ins +Group: Multimedia/Framework +%endif + +%if "%{plugin}" == "all" || "%{plugin}" == "good" + +BuildRequires: libjpeg-devel +BuildRequires: orc >= 0.4.16 +BuildRequires: python +BuildRequires: xsltproc +BuildRequires: pkgconfig(bzip2) +BuildRequires: pkgconfig(gio-2.0) +BuildRequires: pkgconfig(libpulse) >= 1.0 +BuildRequires: pkgconfig(libsoup-2.4) +BuildRequires: pkgconfig(libtbm) +BuildRequires: pkgconfig(libxml-2.0) >= 2.4.9 +BuildRequires: pkgconfig(vconf) +BuildRequires: pkgconfig(vpx) +BuildRequires: pkgconfig(zlib) +%if "%{tizen_profile_name}" != "tv" +BuildRequires: pkgconfig(libpng) >= 1.2 +%endif +%if %{with x} +BuildRequires: pkgconfig(ice) +BuildRequires: pkgconfig(sm) +BuildRequires: pkgconfig(xdamage) +BuildRequires: pkgconfig(xfixes) +# used by libgstvideo4linux2.so +BuildRequires: pkgconfig(xv) +%endif +Requires: gstreamer >= %{version} +Requires: gst-plugins-base >= %{version} +Requires: libsoup + +%description -n gst-plugins-good +GStreamer is a streaming media framework based on graphs of filters +that operate on media data. Applications using this library can do +anything media-related, from real-time sound processing to playing +videos. Its plug-in-based architecture means that new data types or +processing capabilities can be added simply by installing new plug-ins. + +%package -n gst-plugins-good-extra +Summary: Complementary plugins for gst-plugins-good +Group: Productivity/Multimedia/Other +Requires: gst-plugins-good = %{version} +Enhances: gst-plugins-good + +%description -n gst-plugins-good-extra +This package provides complementary plugins for gst-plugins-good and +plugins not included in official Tizen images, which may be used for development / experimental purposes. + +%if "%{tizen_profile_name}" != "tv" +%package -n gst-plugins-good-cairo +Summary: Cairo plugin for gst-plugins-good +Group: Multimedia/Framework +BuildRequires: pkgconfig(cairo) +BuildRequires: pkgconfig(cairo-gobject) +Requires: gst-plugins-good = %{version} +Enhances: gst-plugins-good + +%description -n gst-plugins-good-cairo +This package provides cairo plugin for gst-plugins-good and +this is not included in Tizen headless image. + +%package -n gst-plugins-good-v4l2 +Summary: Video4Linux2 plugin for gst-plugins-good +Group: Multimedia/Framework +BuildRequires: pkgconfig(libv4l2) +Requires: gst-plugins-good = %{version} +Enhances: gst-plugins-good + +%description -n gst-plugins-good-v4l2 +This package provides Video4Linux2 plugin for gst-plugins-good. +%endif + +%endif + +####################################################### +## Package info of Bad Plugin +####################################################### + +%if "%{plugin}" == "all" +%package -n gst-plugins-bad +Summary: GStreamer Streaming-Media Framework Plug-Ins +Group: Multimedia/Framework +%endif + +%if "%{plugin}" == "all" || "%{plugin}" == "bad" + +BuildRequires: python +BuildRequires: xsltproc +BuildRequires: pkgconfig(gio-2.0) >= 2.25.0 +BuildRequires: pkgconfig(libcurl) >= 7.21.0 +BuildRequires: pkgconfig(libexif) >= 0.6.16 +BuildRequires: pkgconfig(libsrtp2) >= 2.1.0 +BuildRequires: pkgconfig(libusb-1.0) +BuildRequires: pkgconfig(nice) +BuildRequires: pkgconfig(openal) +BuildRequires: pkgconfig(openssl1.1) +BuildRequires: pkgconfig(opus) +BuildRequires: pkgconfig(orc-0.4) >= 0.4.11 +BuildRequires: pkgconfig(sndfile) >= 1.0.16 +BuildRequires: pkgconfig(soundtouch) > 1.4 +%if "%{tizen_profile_name}" != "tv" +BuildRequires: pkgconfig(srt) +%endif +%if %{with wayland} +BuildRequires: pkgconfig(libdrm) +BuildRequires: pkgconfig(libxml-2.0) +BuildRequires: pkgconfig(wayland-client) >= 1.0.0 +BuildRequires: pkgconfig(wayland-cursor) >= 1.0.0 +BuildRequires: pkgconfig(wayland-protocols) +%endif +%if %{with x} +BuildRequires: pkgconfig(x11) +%endif +Requires: gstreamer >= %{version} + +%description -n gst-plugins-bad +GStreamer is a streaming media framework based on graphs of filters +that operate on media data. Applications using this library can do +anything media-related,from real-time sound processing to playing +videos. Its plug-in-based architecture means that new data types or +processing capabilities can be added simply by installing new plug-ins. + +%package -n gst-plugins-bad-devel +Summary: GStreamer Streaming-Media Framework Plug-Ins +Requires: gst-plugins-bad = %{version}-%{release} +Requires: gst-plugins-base-devel + +%description -n gst-plugins-bad-devel +GStreamer is a streaming media framework based on graphs of filters +that operate on media data. Applications using this library can do +anything media-related,from real-time sound processing to playing +videos. Its plug-in-based architecture means that new data types or +processing capabilities can be added simply by installing new plug-ins. + +%endif + +####################################################### +## Package info of Ugly Plugin +####################################################### + +%if "%{plugin}" == "all" +%package -n gst-plugins-ugly +Summary: GStreamer plugins from the "ugly" set +Group: Multimedia/Framework +%endif + +%if "%{plugin}" == "all" || "%{plugin}" == "ugly" + +BuildRequires: which +BuildRequires: pkgconfig(opencore-amrwb) +%if "%{tizen_profile_name}" != "tv" +BuildRequires: pkgconfig(opencore-amrnb) +%endif + +%description -n gst-plugins-ugly + GStreamer is a streaming media framework, based on graphs of filters + which operate on media data. Applications using this library can do + anything from real-time sound processing to playing videos, and just + about anything else media-related. Its plugin-based architecture means + that new data types or processing capabilities can be added simply by + installing new plug-ins. + . + This packages contains plugins from the "ugly" set, a set of + good-quality plug-ins that might pose distribution problems. + +%endif + +####################################################### +## Package info of Omx Plugin +####################################################### + +%if "%{tizen_profile_name}" != "tv" +%ifarch %{arm} aarch64 riscv64 + +%if "%{plugin}" == "all" +%package -n gst-omx +Summary: GStreamer plug-in that allows communication with OpenMAX IL components +Group: Multimedia/Framework +%endif + +%if "%{plugin}" == "all" || "%{plugin}" == "omx" + +BuildRequires: which +BuildRequires: pkgconfig(libtbm) +BuildRequires: pkgconfig(mm-common) +%if "%{target}" == "rpi" +BuildRequires: pkgconfig(bcm_host) +BuildRequires: pkgconfig(brcmegl) +%endif + +%description -n gst-omx +gst-openmax is a GStreamer plug-in that allows communication with OpenMAX IL components. +Multiple OpenMAX IL implementations can be used. + +%endif # plugin +%endif # arch +%endif # profile + +####################################################### +## Package info of libav Plugin +####################################################### + +%if "%{plugin}" == "all" +%package -n gst-libav +Summary: Libav plugin for GStreamer +Group: Multimedia/Framework +%endif + +%if "%{plugin}" == "all" || "%{plugin}" == "libav" + +BuildRequires: bzip2-devel +BuildRequires: gettext +BuildRequires: which +BuildRequires: yasm +BuildRequires: pkgconfig(libavcodec) +BuildRequires: pkgconfig(libavfilter) +BuildRequires: pkgconfig(libavformat) +BuildRequires: pkgconfig(libavutil) +BuildRequires: pkgconfig(orc-0.4) +BuildRequires: pkgconfig(theora) +BuildRequires: pkgconfig(vorbis) +BuildRequires: pkgconfig(zlib) + +%description -n gst-libav +This GStreamer plugin supports a large number of audio and video compression +formats through the use of the libav library. The plugin contains GStreamer +elements for decoding 90+ formats (AVI, MPEG, OGG, Matroska, ASF, ...), +demuxing 30+ formats and colorspace conversion. + +%endif + +####################################################### +## Package info of rtsp-server Plugin +####################################################### + +%if "%{plugin}" == "all" +%package -n gst-rtsp-server +Summary: Multimedia Framework Library +Group: System/Libraries +%endif + +%if "%{plugin}" == "all" || "%{plugin}" == "rs" + +Requires(post): /sbin/ldconfig +Requires(postun): /sbin/ldconfig + +%description -n gst-rtsp-server +This GStreamer plugin supports a large number of audio and video compression +formats through the use of the libav library. The plugin contains GStreamer +elements for decoding 90+ formats (AVI, MPEG, OGG, Matroska, ASF, ...), +demuxing 30+ formats and colorspace conversion. + +%package -n gst-rtsp-server-devel +Summary: Multimedia Framework RTSP server library (DEV) +Group: Development/Libraries +Requires: gst-rtsp-server = %{version}-%{release} + +%description -n gst-rtsp-server-devel + +%endif + +####################################################### +## Package info of editing-services Plugin +####################################################### + +%if "%{tizen_profile_name}" != "tv" + +%if "%{plugin}" == "all" +%package -n gst-editing-services +Summary: GStreamer Editing Service Plug-Ins +Group: Multimedia/Framework +%endif + +%if "%{plugin}" == "all" || "%{plugin}" == "es" + +BuildRequires: flex +BuildRequires: gtk-doc +BuildRequires: pkgconfig(libxml-2.0) +Requires: gstreamer >= 1.0.0 +Supplements: gstreamer + +%description -n gst-editing-services +This is a high-level library for facilitating the creation of audio/video +non-linear editors. + +%package -n gst-editing-services-devel +Summary: Development files for gst-editing-services +Requires: gst-editing-services = %{version}-%{release} + +%description -n gst-editing-services-devel +This package contains libraries and header files for +developing applications that use %{name} + +%endif # plugin + +%endif # profile + +####################################################### +## Prep +####################################################### + +%prep +%setup -q -n gstreamer-%{version} +cp %{SOURCE1001} ./gstreamer.manifest +cp %{SOURCE1001} ./gst-plugins-base.manifest +cp %{SOURCE1001} ./gst-plugins-good.manifest +cp %{SOURCE1001} ./gst-plugins-bad.manifest +cp %{SOURCE1001} ./gst-plugins-ugly.manifest +cp %{SOURCE1001} ./gst-libav.manifest +cp %{SOURCE1001} ./gst-rtsp-server.manifest +%if "%{tizen_profile_name}" != "tv" +cp %{SOURCE1001} ./gst-omx.manifest +cp %{SOURCE1001} ./gst-editing-services.manifest +%endif + +####################################################### +## Build +####################################################### + +%build + +%if "%{plugin}" != "all" +pushd %{_source_path} +%endif + +mkdir -p build +export CFLAGS="%{optflags} \ + -fno-strict-aliasing\ + -fstack-protector-strong\ + -Wl,-z,relro\ +%if "%{tizen_profile_name}" == "tv" + -Wno-declaration-after-statement\ +%endif + -D_FORTIFY_SOURCE=2" +export CXXFLAGS+=" -Wno-error" +export LDFLAGS+=" -pthread" + +meson --auto-features=disabled --prefix=/usr --libdir=%{_libdir} --datadir=%{_datadir} --sysconfdir=%{_hal_sysconfdir} \ +%if "%{plugin}" == "all" + %if "%{tizen_profile_name}" == "tv" + -D ges=disabled \ + -D omx=disabled \ + %endif # profile + %ifarch x86_64 %{ix86} + -D omx=disabled \ + %endif # arch +%endif # plugin +%if "%{plugin}" == "all" || "%{plugin}" == "core" + -D %{_core_opt}check=enabled \ + -D %{_core_opt}coretracers=enabled \ + -D %{_core_opt}extra-checks=enabled \ + -D %{_core_opt}ptp-helper-permissions=none \ + -D %{_core_opt}tools=enabled \ + %if "%{tizen_profile_name}" == "tv" + -D %{_core_opt}tv-profile=true \ + %endif # profile + %if "%{_enable_introspection}" == "1" + -D %{_core_opt}introspection=enabled \ + %endif # introspection +%endif # plugin +%if "%{plugin}" == "all" || "%{plugin}" == "base" + -D %{_base_opt}alsa=enabled \ + -D %{_base_opt}app=enabled \ + -D %{_base_opt}audioconvert=enabled \ + -D %{_base_opt}audiomixer=enabled \ + -D %{_base_opt}audiorate=enabled \ + -D %{_base_opt}audioresample=enabled \ + -D %{_base_opt}audiotestsrc=enabled \ + -D %{_base_opt}compositor=enabled \ + -D %{_base_opt}gio=enabled \ + -D %{_base_opt}gio-typefinder=enabled \ + -D %{_base_opt}ogg=enabled \ + -D %{_base_opt}opus=enabled \ + -D %{_base_opt}overlaycomposition=enabled \ + -D %{_base_opt}pbtypes=enabled \ + -D %{_base_opt}playback=enabled \ + -D %{_base_opt}rawparse=enabled \ + -D %{_base_opt}subparse=enabled \ + -D %{_base_opt}tbm=true \ + -D %{_base_opt}tcp=enabled \ + -D %{_base_opt}tools=disabled \ + -D %{_base_opt}theora=enabled \ + -D %{_base_opt}typefind=enabled \ + -D %{_base_opt}videoconvertscale=enabled \ + -D %{_base_opt}videorate=enabled \ + -D %{_base_opt}videotestsrc=enabled \ + -D %{_base_opt}volume=enabled \ + -D %{_base_opt}vorbis=enabled \ + %if "%{tizen_profile_name}" == "tv" + -D %{_base_opt}tv-profile=true \ + %else + -D %{_base_opt}adder=enabled \ + -D %{_base_opt}encoding=enabled \ + %if 0%{?enable_gl:1} + -D %{_base_opt}gl=enabled \ + %endif # gl + %endif # profile + %if "%{_enable_introspection}" == "1" + -D %{_base_opt}introspection=enabled \ + %endif # introspection +%endif # base plugin +%if "%{plugin}" == "all" || "%{plugin}" == "good" + %if 0%{?ENABLE_AALIB} + -D %{_good_opt}aalib=enabled \ + %endif + -D %{_good_opt}adaptivedemux2=enabled \ + -D %{_good_opt}apetag=enabled \ + -D %{_good_opt}audiofx=enabled \ + -D %{_good_opt}audioparsers=enabled \ + -D %{_good_opt}autodetect=enabled \ + -D %{_good_opt}avi=enabled \ + -D %{_good_opt}debugutils=enabled \ + -D %{_good_opt}deinterlace=enabled \ + -D %{_good_opt}icydemux=enabled \ + -D %{_good_opt}id3demux=enabled \ + -D %{_good_opt}interleave=enabled \ + -D %{_good_opt}isomp4=enabled \ + -D %{_good_opt}jpeg=enabled \ + -D %{_good_opt}law=enabled \ + -D %{_good_opt}pulse=enabled \ + -D %{_good_opt}replaygain=enabled \ + -D %{_good_opt}rtp=enabled \ + -D %{_good_opt}rtpmanager=enabled \ + -D %{_good_opt}rtsp=enabled \ + -D %{_good_opt}soup=enabled \ + -D %{_good_opt}tbm=true \ + -D %{_good_opt}udp=enabled \ + -D %{_good_opt}videocrop=enabled \ + -D %{_good_opt}videofilter=enabled \ + -D %{_good_opt}vpx=enabled \ + -D %{_good_opt}wavenc=enabled \ + -D %{_good_opt}wavparse=enabled \ + %if "%{_enable_v4l2h264enc}" == "1" + -D %{_good_opt}v4l2h264enc=enabled \ + %endif + %if "%{tizen_profile_name}" == "tv" + -D %{_good_opt}tv-profile=true \ + %ifnarch %{arm} aarch64 + -D %{_good_opt}v4l2=enabled \ + %endif + %else + -D %{_good_opt}cairo=enabled \ + -D %{_good_opt}effectv=enabled \ + -D %{_good_opt}flv=enabled \ + -D %{_good_opt}imagefreeze=enabled \ + -D %{_good_opt}matroska=enabled \ + -D %{_good_opt}multifile=enabled \ + -D %{_good_opt}png=enabled \ + -D %{_good_opt}smpte=enabled \ + -D %{_good_opt}v4l2=enabled \ + -D %{_good_opt}v4l2-libv4l2=enabled \ + -D %{_good_opt}v4l2-probe=true \ + -D %{_good_opt}videobox=enabled \ + -D %{_good_opt}videomixer=enabled \ + %endif # profile +%endif # plugin +%if "%{plugin}" == "all" || "%{plugin}" == "bad" + -D %{_bad_opt}aes=enabled \ + -D %{_bad_opt}audiobuffersplit=enabled \ + -D %{_bad_opt}audiolatency=enabled \ + -D %{_bad_opt}audiomixmatrix=enabled \ + -D %{_bad_opt}debugutils=enabled \ + -D %{_bad_opt}dtls=enabled \ + -D %{_bad_opt}gst_play_tests=false\ + -D %{_bad_opt}introspection=disabled \ + -D %{_bad_opt}ipcpipeline=enabled \ + -D %{_bad_opt}midi=enabled \ + -D %{_bad_opt}mpegtsdemux=enabled \ + -D %{_bad_opt}mpegtsmux=enabled \ + -D %{_bad_opt}netsim=enabled \ + -D %{_bad_opt}openal=enabled \ + -D %{_bad_opt}opus=enabled \ + -D %{_bad_opt}proxy=enabled \ + -D %{_bad_opt}sctp=enabled \ + -D %{_bad_opt}sdp=enabled \ + -D %{_bad_opt}shm=enabled \ + -D %{_bad_opt}soundtouch=enabled \ + -D %{_bad_opt}srtp=enabled \ + -D %{_bad_opt}timecode=enabled \ + -D %{_bad_opt}videoframe_audiolevel=enabled \ + -D %{_bad_opt}videoparsers=enabled \ + -D %{_bad_opt}wayland=enabled \ + -D %{_bad_opt}webrtc=enabled \ + %if %{with wayland} + -D %{_bad_opt}wayland=enabled \ + %endif + %if "%{tizen_profile_name}" == "tv" + -D %{_bad_opt}tv-profile=true \ + %else + -D %{_bad_opt}autoconvert=enabled \ + -D %{_bad_opt}camerabin2=enabled \ + -D %{_bad_opt}coloreffects=enabled \ + -D %{_bad_opt}dash=enabled \ + -D %{_bad_opt}gaudieffects=enabled \ + -D %{_bad_opt}gdp=enabled \ + -D %{_bad_opt}hls=enabled \ + -D %{_bad_opt}id3tag=enabled \ + -D %{_bad_opt}jpegformat=enabled \ + -D %{_bad_opt}mpegdemux=enabled \ + -D %{_bad_opt}rist=enabled \ + -D %{_bad_opt}rtp=enabled \ + -D %{_bad_opt}srt=enabled \ + -D %{_bad_opt}smoothstreaming=enabled \ + %if 0%{?enable_gl:1} + -D %{_bad_opt}gl=enabled \ + %endif # gl + %endif # profile +%endif # plugin +%if "%{plugin}" == "all" || "%{plugin}" == "ugly" + -D %{_ugly_opt}amrwbdec=enabled \ + %if "%{tizen_profile_name}" == "tv" + -D %{_ugly_opt}tv-profile=true \ + %else + -D %{_ugly_opt}amrnb=enabled \ + -D %{_ugly_opt}asfdemux=enabled \ + %endif # profile +%endif # plugin +%if "%{plugin}" == "all" || "%{plugin}" == "libav" + %if "%{tizen_profile_name}" == "tv" + -D %{_libav_opt}tv-profile=true \ + %endif # profile +%endif # plugin +%if "%{plugin}" == "all" || "%{plugin}" == "rs" + %if "%{tizen_profile_name}" == "tv" + -D %{_rs_opt}tv-profile=true \ + %endif # profile + -D %{_rs_opt}introspection=disabled \ + -D %{_rs_opt}rtspclientsink=enabled \ +%endif +%if "%{tizen_profile_name}" != "tv" + %ifarch %{arm} aarch64 riscv64 + %if "%{plugin}" == "all" || "%{plugin}" == "omx" + -D %{_omx_opt}tools=disabled \ + %if "%{target}" == "rpi" + -D %{_omx_opt}header_path=/opt/vc/include/interface/vmcs_host/khronos/IL \ + -D %{_omx_opt}target=rpi \ + %else + %ifarch riscv64 + -D %{_omx_opt}target=generic \ + %else + %ifarch aarch64 + -D %{_omx_opt}target=exynos64 \ + %else + -D %{_omx_opt}target=exynos \ + -D %{_omx_opt}vp9=true \ + %endif # arch aarch64 + %endif # arch riscv64 + %endif # target + %endif # plugin + %endif # arch + %if "%{plugin}" == "all" || "%{plugin}" == "es" + -D %{_es_opt}introspection=disabled \ + -D %{_es_opt}tools=enabled \ + -D %{_es_opt}xptv=enabled \ + -D %{_es_opt}python=enabled \ + %endif # plugin +%endif # profile + build + +ninja -C build all %{?_smp_mflags} + +%if "%{plugin}" != "all" +popd +%endif + +####################################################### +## Install +####################################################### + +%install +rm -rf %{buildroot} +export DESTDIR=%{buildroot} +ninja -C %{_source_path}/build install +mkdir -p %{buildroot}%{_datadir}/gstreamer-%{gst_branch}/presets +rm -rf %{buildroot}%{_datadir}/gstreamer-%{gst_branch}/encoding-profiles +rm -rf %{buildroot}%{_datadir}/locale + +%clean +rm -rf $RPM_BUILD_ROOT + +%post -p /sbin/ldconfig +%postun -p /sbin/ldconfig + +####################################################### +## Packaging rpms +####################################################### + +###################### gstreamer ###################### + +%if "%{plugin}" == "all" || "%{plugin}" == "core" + +%files -n gstreamer +%manifest gstreamer.manifest +%defattr(-, root, root) +%license subprojects/gstreamer/COPYING +%dir %{_datadir}/gstreamer-%{gst_branch} +%dir %{_datadir}/gstreamer-%{gst_branch}/presets +%dir %{_lib_gstreamer_dir} +%{_lib_gstreamer_dir}/libgstcoreelements.so +%{_lib_gstreamer_dir}/libgstcoretracers.so +%dir %{_libexecdir}/gstreamer-%{gst_branch} +%{_libexecdir}/gstreamer-%{gst_branch}/gst-hotdoc-plugins-scanner +%{_libexecdir}/gstreamer-%{gst_branch}/gst-plugin-scanner +%{_libexecdir}/gstreamer-%{gst_branch}/gst-ptp-helper +%exclude %{_libexecdir}/gstreamer-%{gst_branch}/gst-plugins-doc-cache-generator +%{_libdir}/libgstbase-%{gst_branch}.so.* +%{_libdir}/libgstcontroller-%{gst_branch}.so.* +%{_libdir}/libgstnet-%{gst_branch}.so.* +%{_libdir}/libgstcheck-%{gst_branch}.so.* +%{_libdir}/libgstreamer-%{gst_branch}.so.* +%if "%{_enable_introspection}" == "1" +%{_lib_girepository}/Gst-%{gst_branch}.typelib +%{_lib_girepository}/GstBase-%{gst_branch}.typelib +%{_lib_girepository}/GstController-%{gst_branch}.typelib +%{_lib_girepository}/GstNet-%{gst_branch}.typelib +%{_lib_girepository}/GstCheck-%{gst_branch}.typelib +%endif #introspection +%{_datadir}/gdb/auto-load/usr/%{_lib}/libgstreamer-%{gst_branch}*-gdb.py +%{_datadir}/gstreamer-%{gst_branch}/gdb/glib_gobject_helper.py +%{_datadir}/gstreamer-%{gst_branch}/gdb/gst_gdb.py + +%files -n gstreamer-utils +%manifest gstreamer.manifest +%defattr(-, root, root) +%license subprojects/gstreamer/COPYING +%{_bindir}/gst-inspect-%{gst_branch} +%{_bindir}/gst-launch-%{gst_branch} +%{_bindir}/gst-stats-%{gst_branch} +%{_bindir}/gst-typefind-%{gst_branch} +%exclude %doc %{_mandir}/man?/*-%{gst_branch}.* + +%files -n gstreamer-devel +%manifest gstreamer.manifest +%defattr(-, root, root) +%{_datadir}/aclocal/gst-element-check-%{gst_branch}.m4 +%{_includedir}/gstreamer-%{gst_branch}/gst/*.h +%{_includedir}/gstreamer-%{gst_branch}/gst/base/* +%{_includedir}/gstreamer-%{gst_branch}/gst/controller/* +%{_includedir}/gstreamer-%{gst_branch}/gst/net/* +%{_includedir}/gstreamer-%{gst_branch}/gst/check/* +%{_libdir}/libgstbase-%{gst_branch}.so +%{_libdir}/libgstcontroller-%{gst_branch}.so +%{_libdir}/libgstnet-%{gst_branch}.so +%{_libdir}/libgstcheck-%{gst_branch}.so +%{_libdir}/libgstreamer-%{gst_branch}.so +%{_libdir}/pkgconfig/gstreamer-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-base-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-controller-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-net-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-check-%{gst_branch}.pc +%if "%{_enable_introspection}" == "1" +%{_datadir}/gir-%{gst_branch}/Gst-%{gst_branch}.gir +%{_datadir}/gir-%{gst_branch}/GstBase-%{gst_branch}.gir +%{_datadir}/gir-%{gst_branch}/GstController-%{gst_branch}.gir +%{_datadir}/gir-%{gst_branch}/GstNet-%{gst_branch}.gir +%{_datadir}/gir-%{gst_branch}/GstCheck-%{gst_branch}.gir +%endif #introspection + +%changelog + +%endif + +###################### gst-plugins-base ###################### + +%if "%{plugin}" == "all" || "%{plugin}" == "base" + +%files -n gst-plugins-base +%manifest gst-plugins-base.manifest +%defattr(-, root, root) +%license subprojects/gst-plugins-base/COPYING +%{_lib_gstreamer_dir}/libgstalsa.so +%{_lib_gstreamer_dir}/libgstapp.so +%{_lib_gstreamer_dir}/libgstaudioconvert.so +%{_lib_gstreamer_dir}/libgstaudiomixer.so +%{_lib_gstreamer_dir}/libgstaudiorate.so +%{_lib_gstreamer_dir}/libgstaudioresample.so +%{_lib_gstreamer_dir}/libgstaudiotestsrc.so +%{_lib_gstreamer_dir}/libgstcompositor.so +%{_lib_gstreamer_dir}/libgstgio.so +%{_lib_gstreamer_dir}/libgstogg.so +%{_lib_gstreamer_dir}/libgstopus.so +%{_lib_gstreamer_dir}/libgstoverlaycomposition.so +%{_lib_gstreamer_dir}/libgstpbtypes.so +%{_lib_gstreamer_dir}/libgstplayback.so +%{_lib_gstreamer_dir}/libgstrawparse.so +%{_lib_gstreamer_dir}/libgstsubparse.so +%{_lib_gstreamer_dir}/libgsttcp.so +%{_lib_gstreamer_dir}/libgsttheora.so +%{_lib_gstreamer_dir}/libgsttypefindfunctions.so +%{_lib_gstreamer_dir}/libgstvideoconvertscale.so +%{_lib_gstreamer_dir}/libgstvideorate.so +%{_lib_gstreamer_dir}/libgstvideotestsrc.so +%{_lib_gstreamer_dir}/libgstvolume.so +%{_lib_gstreamer_dir}/libgstvorbis.so +%if %{with x} +%{_lib_gstreamer_dir}/libgstximagesink.so +%{_lib_gstreamer_dir}/libgstxvimagesink.so +%endif +%{_libdir}/libgstallocators-%{gst_branch}.so.* +%{_libdir}/libgstapp-%{gst_branch}.so.* +%{_libdir}/libgstaudio-%{gst_branch}.so.* +%{_libdir}/libgstfft-%{gst_branch}.so.* +%{_libdir}/libgstpbutils-%{gst_branch}.so.* +%{_libdir}/libgstriff-%{gst_branch}.so.* +%{_libdir}/libgstrtp-%{gst_branch}.so.* +%{_libdir}/libgstrtsp-%{gst_branch}.so.* +%{_libdir}/libgstsdp-%{gst_branch}.so.* +%{_libdir}/libgsttag-%{gst_branch}.so.* +%{_libdir}/libgstvideo-%{gst_branch}.so.* +%if "%{tizen_profile_name}" != "tv" +%{_lib_gstreamer_dir}/libgstencoding.so +%endif #profile +%if "%{_enable_introspection}" == "1" +%{_lib_girepository}/GstAllocators-%{gst_branch}.typelib +%{_lib_girepository}/GstApp-%{gst_branch}.typelib +%{_lib_girepository}/GstAudio-%{gst_branch}.typelib +%{_lib_girepository}/GstPbutils-%{gst_branch}.typelib +%{_lib_girepository}/GstRtp-%{gst_branch}.typelib +%{_lib_girepository}/GstRtsp-%{gst_branch}.typelib +%{_lib_girepository}/GstSdp-%{gst_branch}.typelib +%{_lib_girepository}/GstTag-%{gst_branch}.typelib +%{_lib_girepository}/GstVideo-%{gst_branch}.typelib +%endif #introspection +%dir %{_datadir}/gst-plugins-base/ +%dir %{_datadir}/gst-plugins-base/%{gst_branch}/ +%{_datadir}/gst-plugins-base/%{gst_branch}/license-translations.dict + +%if "%{tizen_profile_name}" != "tv" +%files -n gst-plugins-base-extension-adder +%{_lib_gstreamer_dir}/libgstadder.so +%license subprojects/gst-plugins-base/COPYING +%endif # profile + +%files -n gst-plugins-base-devel +%manifest gst-plugins-base.manifest +%defattr(-, root, root) +%{_includedir}/gstreamer-%{gst_branch}/gst/allocators/* +%{_includedir}/gstreamer-%{gst_branch}/gst/app/* +%{_includedir}/gstreamer-%{gst_branch}/gst/audio/* +%{_includedir}/gstreamer-%{gst_branch}/gst/fft/* +%{_includedir}/gstreamer-%{gst_branch}/gst/pbutils/* +%{_includedir}/gstreamer-%{gst_branch}/gst/riff/* +%{_includedir}/gstreamer-%{gst_branch}/gst/rtp/* +%{_includedir}/gstreamer-%{gst_branch}/gst/rtsp/* +%{_includedir}/gstreamer-%{gst_branch}/gst/sdp/* +%{_includedir}/gstreamer-%{gst_branch}/gst/tag/* +%{_includedir}/gstreamer-%{gst_branch}/gst/video/* +%if "%{plugin}" == "all" && "%{tizen_profile_name}" != "tv" +%exclude %{_includedir}/gstreamer-%{gst_branch}/gst/audio/audio-bad-prelude.h +%exclude %{_includedir}/gstreamer-%{gst_branch}/gst/audio/gstnonstreamaudiodecoder.h +%exclude %{_includedir}/gstreamer-%{gst_branch}/gst/audio/gstplanaraudioadapter.h +%endif +%{_libdir}/libgstallocators-%{gst_branch}.so +%{_libdir}/libgstapp-%{gst_branch}.so +%{_libdir}/libgstaudio-%{gst_branch}.so +%{_libdir}/libgstfft-%{gst_branch}.so +%{_libdir}/libgstpbutils-%{gst_branch}.so +%{_libdir}/libgstriff-%{gst_branch}.so +%{_libdir}/libgstrtp-%{gst_branch}.so +%{_libdir}/libgstrtsp-%{gst_branch}*.so +%{_libdir}/libgstsdp-%{gst_branch}.so +%{_libdir}/libgsttag-%{gst_branch}.so +%{_libdir}/libgstvideo-%{gst_branch}.so +%{_libdir}/pkgconfig/gstreamer-allocators-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-app-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-audio-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-fft-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-pbutils-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-plugins-base-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-riff-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-rtp-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-rtsp-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-sdp-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-tag-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-video-%{gst_branch}.pc +%if "%{_enable_introspection}" == "1" +%{_datadir}/gir-%{gst_branch}/GstAllocators-%{gst_branch}.gir +%{_datadir}/gir-%{gst_branch}/GstApp-%{gst_branch}.gir +%{_datadir}/gir-%{gst_branch}/GstAudio-%{gst_branch}.gir +%{_datadir}/gir-%{gst_branch}/GstPbutils-%{gst_branch}.gir +%{_datadir}/gir-%{gst_branch}/GstRtp-%{gst_branch}.gir +%{_datadir}/gir-%{gst_branch}/GstRtsp-%{gst_branch}.gir +%{_datadir}/gir-%{gst_branch}/GstSdp-%{gst_branch}.gir +%{_datadir}/gir-%{gst_branch}/GstTag-%{gst_branch}.gir +%{_datadir}/gir-%{gst_branch}/GstVideo-%{gst_branch}.gir +%endif # introspection + +%endif # plugin + +###################### gst-plugins-good ###################### + +%if "%{plugin}" == "all" || "%{plugin}" == "good" + +%files -n gst-plugins-good +%manifest gst-plugins-good.manifest +%defattr(-, root, root) +%license subprojects/gst-plugins-good/COPYING +%{_lib_gstreamer_dir}/libgstadaptivedemux2.so +%{_lib_gstreamer_dir}/libgstalaw.so +%{_lib_gstreamer_dir}/libgstapetag.so +%{_lib_gstreamer_dir}/libgstaudiofx.so +%{_lib_gstreamer_dir}/libgstaudioparsers.so +%{_lib_gstreamer_dir}/libgstautodetect.so +%{_lib_gstreamer_dir}/libgstavi.so +%{_lib_gstreamer_dir}/libgstdebug.so +%{_lib_gstreamer_dir}/libgstdeinterlace.so +%{_lib_gstreamer_dir}/libgsticydemux.so +%{_lib_gstreamer_dir}/libgstid3demux.so +%{_lib_gstreamer_dir}/libgstinterleave.so +%{_lib_gstreamer_dir}/libgstisomp4.so +%{_lib_gstreamer_dir}/libgstjpeg.so +%{_lib_gstreamer_dir}/libgstmulaw.so +%{_lib_gstreamer_dir}/libgstnavigationtest.so +%{_lib_gstreamer_dir}/libgstpulseaudio.so +%{_lib_gstreamer_dir}/libgstreplaygain.so +%{_lib_gstreamer_dir}/libgstrtp.so +%{_lib_gstreamer_dir}/libgstrtpmanager.so +%{_lib_gstreamer_dir}/libgstrtsp.so +%{_lib_gstreamer_dir}/libgstsoup.so +%{_lib_gstreamer_dir}/libgstudp.so +%{_lib_gstreamer_dir}/libgstvideocrop.so +%{_lib_gstreamer_dir}/libgstvideofilter.so +%{_lib_gstreamer_dir}/libgstvpx.so +%{_lib_gstreamer_dir}/libgstwavenc.so +%{_lib_gstreamer_dir}/libgstwavparse.so +%if %{with x} +%{_lib_gstreamer_dir}/libgstximagesrc.so +%endif # x +%if "%{tizen_profile_name}" != "tv" +%{_lib_gstreamer_dir}/libgsteffectv.so +%{_lib_gstreamer_dir}/libgstflv.so +%{_lib_gstreamer_dir}/libgstimagefreeze.so +%{_lib_gstreamer_dir}/libgstmatroska.so +%{_lib_gstreamer_dir}/libgstmultifile.so +%{_lib_gstreamer_dir}/libgstsmpte.so +%{_lib_gstreamer_dir}/libgstvideobox.so +%{_lib_gstreamer_dir}/libgstvideomixer.so +%else # profile +%ifnarch %{arm} aarch64 +%{_lib_gstreamer_dir}/libgstvideo4linux2.so +%endif # arch +%endif # profile +%{_datadir}/gstreamer-%{gst_branch}/presets/GstQTMux.prs +%{_datadir}/gstreamer-%{gst_branch}/presets/GstVP8Enc.prs + +%files -n gst-plugins-good-extra +%manifest gst-plugins-good.manifest +%defattr(-, root, root) +%license subprojects/gst-plugins-good/COPYING +%if 0%{?ENABLE_AALIB} +%{_lib_gstreamer_dir}/libgstaasink.so +%endif +%if "%{tizen_profile_name}" != "tv" +%{_lib_gstreamer_dir}/libgstimagefreeze.so +%{_lib_gstreamer_dir}/libgstpng.so +%endif + +%if "%{tizen_profile_name}" != "tv" +%files -n gst-plugins-good-cairo +%manifest gst-plugins-good.manifest +%defattr(-, root, root) +%license subprojects/gst-plugins-good/COPYING +%{_lib_gstreamer_dir}/libgstcairo.so + +%files -n gst-plugins-good-v4l2 +%manifest gst-plugins-good.manifest +%defattr(-, root, root) +%license subprojects/gst-plugins-good/COPYING +%{_lib_gstreamer_dir}/libgstvideo4linux2.so +%endif + +%endif + +###################### gst-plugins-bad ###################### + +%if "%{plugin}" == "all" || "%{plugin}" == "bad" + +%files -n gst-plugins-bad +%manifest gst-plugins-bad.manifest +%defattr(-, root, root) +%license subprojects/gst-plugins-bad/COPYING +%if "%{tizen_profile_name}" != "tv" +%{_lib_gstreamer_dir}/libgstautoconvert.so +%{_lib_gstreamer_dir}/libgstcamerabin.so +%{_lib_gstreamer_dir}/libgstcoloreffects.so +%{_lib_gstreamer_dir}/libgstdash.so +%{_lib_gstreamer_dir}/libgstgaudieffects.so +%{_lib_gstreamer_dir}/libgstgdp.so +%{_lib_gstreamer_dir}/libgsthls.so +%{_lib_gstreamer_dir}/libgstid3tag.so +%{_lib_gstreamer_dir}/libgstjpegformat.so +%{_lib_gstreamer_dir}/libgstmpegpsdemux.so +%{_lib_gstreamer_dir}/libgstrist.so +%{_lib_gstreamer_dir}/libgstsmoothstreaming.so +%{_lib_gstreamer_dir}/libgstsrt.so +%{_libdir}/libgstadaptivedemux-%{gst_branch}.so.0* +%{_libdir}/libgstbadaudio-%{gst_branch}.so.0* +%{_libdir}/libgstbasecamerabinsrc-%{gst_branch}.so.0* +%{_libdir}/libgstcuda-%{gst_branch}.so.0* +%{_libdir}/libgstinsertbin-%{gst_branch}.so.0* +%{_libdir}/libgstisoff-%{gst_branch}.so.0* +%{_libdir}/libgstphotography-%{gst_branch}.so.0* +%{_libdir}/libgstplay-%{gst_branch}.so.0* +%{_libdir}/libgstplayer-%{gst_branch}.so.0* +%{_libdir}/libgsturidownloader-%{gst_branch}.so.0* +%if 0%{?enable_gl:1} +%{_lib_gstreamer_dir}/libgstopengl.so +%{_libdir}/libgstgl-%{gst_branch}.so.0* +%endif +%endif # profile +%{_lib_gstreamer_dir}/libgstaes.so +%{_lib_gstreamer_dir}/libgstaudiobuffersplit.so +%{_lib_gstreamer_dir}/libgstaudiolatency.so +%{_lib_gstreamer_dir}/libgstaudiomixmatrix.so +%{_lib_gstreamer_dir}/libgstdebugutilsbad.so +%{_lib_gstreamer_dir}/libgstdtls.so +%{_lib_gstreamer_dir}/libgstipcpipeline.so +%{_lib_gstreamer_dir}/libgstmidi.so +%{_lib_gstreamer_dir}/libgstmpegtsdemux.so +%{_lib_gstreamer_dir}/libgstmpegtsmux.so +%{_lib_gstreamer_dir}/libgstnetsim.so +%{_lib_gstreamer_dir}/libgstnice.so +%{_lib_gstreamer_dir}/libgstopenal.so +%{_lib_gstreamer_dir}/libgstopusparse.so +%{_lib_gstreamer_dir}/libgstproxy.so +%if "%{tizen_profile_name}" != "tv" +%{_lib_gstreamer_dir}/libgstrtpmanagerbad.so +%endif +%{_lib_gstreamer_dir}/libgstsctp.so +%{_lib_gstreamer_dir}/libgstsdpelem.so +%{_lib_gstreamer_dir}/libgstshm.so +%{_lib_gstreamer_dir}/libgstsoundtouch.so +%{_lib_gstreamer_dir}/libgstsrtp.so +%{_lib_gstreamer_dir}/libgsttimecode.so +%{_lib_gstreamer_dir}/libgstvideoframe_audiolevel.so +%{_lib_gstreamer_dir}/libgstvideoparsersbad.so +%if %{with wayland} +%{_lib_gstreamer_dir}/libgstwaylandsink.so +%endif +%{_lib_gstreamer_dir}/libgstwebrtc.so +%{_libdir}/libgstcodecparsers-%{gst_branch}.so.0* +%{_libdir}/libgstcodecs-%{gst_branch}.so.0* +%{_libdir}/libgstmpegts-%{gst_branch}.so.0* +%{_libdir}/libgstsctp-%{gst_branch}.so.0* +%{_libdir}/libgsttranscoder-%{gst_branch}.so.0* +%if %{with wayland} +%{_libdir}/libgstwayland-%{gst_branch}.so.0* +%endif +%{_libdir}/libgstwebrtc-%{gst_branch}.so.0* +%{_libdir}/libgstwebrtcnice-%{gst_branch}.so.0* +%exclude %{_bindir}/gst-transcoder-%{gst_branch} + +%files -n gst-plugins-bad-devel +%manifest gst-plugins-bad.manifest +%defattr(-, root, root) +%if "%{tizen_profile_name}" != "tv" +%{_includedir}/gstreamer-%{gst_branch}/gst/audio/audio-bad-prelude.h +%{_includedir}/gstreamer-%{gst_branch}/gst/audio/gstnonstreamaudiodecoder.h +%{_includedir}/gstreamer-%{gst_branch}/gst/audio/gstplanaraudioadapter.h +%{_includedir}/gstreamer-%{gst_branch}/gst/basecamerabinsrc/* +%{_includedir}/gstreamer-%{gst_branch}/gst/cuda/* +%{_includedir}/gstreamer-%{gst_branch}/gst/insertbin/* +%{_includedir}/gstreamer-%{gst_branch}/gst/interfaces/* +%{_includedir}/gstreamer-%{gst_branch}/gst/isoff/* +%{_includedir}/gstreamer-%{gst_branch}/gst/play/* +%{_includedir}/gstreamer-%{gst_branch}/gst/player/* +%{_includedir}/gstreamer-%{gst_branch}/gst/uridownloader/* +%endif +%{_includedir}/gstreamer-%{gst_branch}/gst/codecparsers/* +%{_includedir}/gstreamer-%{gst_branch}/gst/mpegts/* +%{_includedir}/gstreamer-%{gst_branch}/gst/sctp/* +%{_includedir}/gstreamer-%{gst_branch}/gst/transcoder/* +%if %{with wayland} +%{_includedir}/gstreamer-%{gst_branch}/gst/wayland/* +%endif +%{_includedir}/gstreamer-%{gst_branch}/gst/webrtc/* +%if "%{tizen_profile_name}" != "tv" +%{_libdir}/libgstadaptivedemux*.so +%{_libdir}/libgstbadaudio*.so +%{_libdir}/libgstbasecamerabinsrc*.so +%{_libdir}/libgstcuda*.so +%{_libdir}/libgstinsertbin*.so +%{_libdir}/libgstisoff*.so +%{_libdir}/libgstphotography*.so +%{_libdir}/libgstplay*.so +%{_libdir}/libgsturidownloader*.so +%if 0%{?enable_gl:1} +%{_lib_gstreamer_dir}/include/gst/gl/* +%endif # gl +%endif # profile + +%{_libdir}/libgstcodecparsers*.so +%{_libdir}/libgstcodecs*.so +%{_libdir}/libgstmpegts*.so +%{_libdir}/libgstsctp*.so +%{_libdir}/libgsttranscoder*.so +%if %{with wayland} +%{_libdir}/libgstwayland*.so +%endif +%{_libdir}/libgstwebrtc*.so +%{_libdir}/pkgconfig/gstreamer-codecparsers-%{gst_branch}.pc +%if "%{tizen_profile_name}" != "tv" +%{_libdir}/pkgconfig/gstreamer-bad-audio-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-cuda-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-insertbin-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-photography-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-play-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-player-%{gst_branch}.pc +%if 0%{?enable_gl:1} +%{_libdir}/pkgconfig/gstreamer-gl-*.pc +%endif # gl +%endif # profile +%{_libdir}/pkgconfig/gstreamer-mpegts-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-plugins-bad-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-sctp-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-transcoder-%{gst_branch}.pc +%if %{with wayland} +%{_libdir}/pkgconfig/gstreamer-wayland-%{gst_branch}.pc +%endif +%{_libdir}/pkgconfig/gstreamer-webrtc-%{gst_branch}.pc +%{_libdir}/pkgconfig/gstreamer-webrtc-nice-%{gst_branch}.pc + +%endif # plugin + +###################### gst-plugins-ugly ###################### + +%if "%{plugin}" == "all" || "%{plugin}" == "ugly" + +%files -n gst-plugins-ugly +%manifest gst-plugins-ugly.manifest +%defattr(-,root,root,-) +%license subprojects/gst-plugins-ugly/COPYING +%if "%{tizen_profile_name}" != "tv" +%{_lib_gstreamer_dir}/libgstamrnb.so +%{_lib_gstreamer_dir}/libgstasf.so +%exclude %{_datadir}/gstreamer-%{gst_branch}/presets/GstAmrnbEnc.prs +%endif # profile +%{_lib_gstreamer_dir}/libgstamrwbdec.so + +%endif # plugin + +######################## gst-libav ########################### + +%if "%{plugin}" == "all" || "%{plugin}" == "libav" + +%files -n gst-libav +%manifest gst-libav.manifest +%defattr(-,root,root,-) +%license subprojects/gst-libav/COPYING +%{_lib_gstreamer_dir}/libgstlibav.so + +%endif + +###################### gst-rtsp-server ####################### + +%if "%{plugin}" == "all" || "%{plugin}" == "rs" + +%files -n gst-rtsp-server +%manifest gst-rtsp-server.manifest +%defattr(-,root,root,-) +%license subprojects/gst-rtsp-server/COPYING +%{_libdir}/libgstrtspserver*.so.* +%{_lib_gstreamer_dir}/libgstrtspclientsink.so + +%files -n gst-rtsp-server-devel +%defattr(-,root,root,-) +%{_libdir}/libgstrtspserver*.so +%{_includedir}/gstreamer-%{gst_branch}/gst/rtsp-server/rtsp-*.h +%{_includedir}/gstreamer-%{gst_branch}/gst/rtsp-server/gstwfd*.h +%{_libdir}/pkgconfig/gstreamer-rtsp-server-%{gst_branch}.pc + +%endif + +%if "%{tizen_profile_name}" != "tv" + +########################## gst-omx ########################### +%ifarch %{arm} aarch64 riscv64 + +%if "%{plugin}" == "all" || "%{plugin}" == "omx" + +%files -n gst-omx +%manifest gst-omx.manifest +%defattr(-,root,root,-) +%license subprojects/gst-omx/COPYING +%{_lib_gstreamer_dir}/libgstomx.so + +%endif + +%endif # arch + +###################### gst-editing-services ####################### + +%if "%{plugin}" == "all" || "%{plugin}" == "es" + +%files -n gst-editing-services +%manifest gst-editing-services.manifest +%defattr(-,root,root,-) +%license subprojects/gst-editing-services/COPYING +%{_libdir}/libges-%{gst_branch}.so.* +%{_lib_gstreamer_dir}/libgstges.so +%{_lib_gstreamer_dir}/libgstnle.so +%exclude %{_libdir}/gst-validate-launcher/python/launcher/apps/geslaunch.py +%exclude %{_datadir}/gstreamer-%{gst_branch}/validate/scenarios/ges-edit-clip-while-paused.scenario +%{_bindir}/ges-launch-%{gst_branch} + +%files -n gst-editing-services-devel +%{_libdir}/libges-%{gst_branch}.so +%{_includedir}/gstreamer-%{gst_branch}/ges/* +%{_libdir}/pkgconfig/gst-editing-services-%{gst_branch}.pc + +%endif # plugin + +%endif # profile diff --cc subprojects/gst-libav/ext/libav/gstavmux.c index 22fe7300f9,9f907e2692..bcbafd510d --- a/subprojects/gst-libav/ext/libav/gstavmux.c +++ b/subprojects/gst-libav/ext/libav/gstavmux.c @@@ -1127,11 -724,6 +1119,11 @@@ gst_ffmpegmux_collected (GstCollectPad AVPacket pkt = { 0, }; GstMapInfo map; +#ifdef TIZEN_FEATURE_LIBAV - av_init_packet (&pkt); ++ av_packet_unref(&pkt); + pkt.is_mux = 1; +#endif /* TIZEN_FEATURE_LIBAV */ + /* push out current buffer */ buf = gst_collect_pads_pop (ffmpegmux->collect, (GstCollectData *) best_pad); diff --cc subprojects/gst-plugins-bad/ext/webrtc/webrtcdatachannel.c index b9f26dbaeb,1305c740ac..e4691f07b6 --- a/subprojects/gst-plugins-bad/ext/webrtc/webrtcdatachannel.c +++ b/subprojects/gst-plugins-bad/ext/webrtc/webrtcdatachannel.c @@@ -782,6 -793,6 +796,9 @@@ webrtc_data_channel_start_negotiation ( g_return_if_fail (channel->sctp_transport != NULL); buffer = construct_open_packet (channel); ++#ifdef TIZEN_FEATURE_WEBRTC_MODIFICATION ++ GstWebRTCBin *webrtcbin = NULL; ++#endif GST_INFO_OBJECT (channel, "Sending channel open for SCTP stream %i " "label \"%s\" protocol %s ordered %s", channel->parent.id, @@@ -796,9 -807,6 +813,13 @@@ if (gst_app_src_push_buffer (GST_APP_SRC (channel->appsrc), buffer) == GST_FLOW_OK) { channel->opened = TRUE; +#ifdef TIZEN_FEATURE_WEBRTC_MODIFICATION - channel->webrtcbin->priv->data_channels_opened++; ++ webrtcbin = g_weak_ref_get (&channel->webrtcbin_weak); ++ if (webrtcbin) { ++ webrtcbin->priv->data_channels_opened++; ++ g_object_unref (webrtcbin); ++ } +#endif _channel_enqueue_task (channel, (ChannelTask) _emit_on_open, NULL, NULL); } else { GError *error = NULL; diff --cc subprojects/gst-plugins-base/gst/playback/gstdecodebin3.c index 82cc065862,017fa2293e..61bfc0b374 --- a/subprojects/gst-plugins-base/gst/playback/gstdecodebin3.c +++ b/subprojects/gst-plugins-base/gst/playback/gstdecodebin3.c @@@ -482,10 -461,6 +482,10 @@@ static void gst_decodebin3_set_propert const GValue * value, GParamSpec * pspec); static void gst_decodebin3_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec); +#ifdef TIZEN_FEATURE_RESOURCE_MANAGER - static gboolean gst_decodebin3_request_resource (GstElement * element, ++static gboolean gst_decodebin3_request_resource (GstDecodebin3 * dbin, + GstStreamCollection * collection, GstStream * stream); +#endif static gboolean parsebin_autoplug_continue_cb (GstElement * parsebin, GstPad * pad, GstCaps * caps, GstDecodebin3 * dbin); @@@ -548,28 -520,6 +548,28 @@@ static GList *create_decoder_factory_li /* FIXME: Really make all the parser stuff a self-contained helper object */ #include "gstdecodebin3-parse.c" +#ifdef TIZEN_FEATURE_RESOURCE_MANAGER - static gboolean gst_decodebin3_request_resource (GstElement * element, ++static gboolean gst_decodebin3_request_resource (GstDecodebin3 * dbin, + GstStreamCollection * collection, GstStream * stream) +{ + /* do not consider the resource limit */ + return TRUE; +} + +static gboolean +_gst_boolean_accumulator (GSignalInvocationHint * ihint, + GValue * return_accu, const GValue * handler_return, gpointer dummy) +{ + gboolean myboolean; + + myboolean = g_value_get_boolean (handler_return); + if (!(ihint->run_type & G_SIGNAL_RUN_CLEANUP)) + g_value_set_boolean (return_accu, myboolean); + + return myboolean; +} +#endif + static gboolean _gst_int_accumulator (GSignalInvocationHint * ihint, GValue * return_accu, const GValue * handler_return, gpointer dummy) diff --cc subprojects/gst-plugins-base/gst/subparse/gstsubparse.h index 4d239fd5c6,8673466768..f6541dad1e --- a/subprojects/gst-plugins-base/gst/subparse/gstsubparse.h +++ b/subprojects/gst-plugins-base/gst/subparse/gstsubparse.h @@@ -85,11 -73,12 +85,12 @@@ struct _GstSubParse /* seek */ guint64 offset; - + /* Segment */ + guint32 segment_seqnum; GstSegment segment; gboolean need_segment; - + gboolean flushing; gboolean valid_utf8; gchar *detected_encoding; diff --cc subprojects/gst-plugins-good/ext/pulse/pulsesink.c index 9a20e0fc42,c0ffff388e..071f0c2290 --- a/subprojects/gst-plugins-good/ext/pulse/pulsesink.c +++ b/subprojects/gst-plugins-good/ext/pulse/pulsesink.c @@@ -792,19 -757,6 +792,19 @@@ gst_pulsering_stream_latency_cb (pa_str GST_TIMEVAL_TO_TIME (info->timestamp), info->write_index_corrupt, info->write_index, info->read_index_corrupt, info->read_index, info->sink_usec, sink_usec); +#ifdef TIZEN_FEATURE_PULSE_MODIFICATION + if (!psink || !psink->auto_render_delay) + return; + + if (sink_usec < info->sink_usec) + gst_base_sink_set_render_delay (GST_BASE_SINK(psink), + (info->sink_usec - sink_usec) * G_GINT64_CONSTANT (1000)); + else + gst_base_sink_set_render_delay (GST_BASE_SINK(psink), 0); + + GST_DEBUG_OBJECT (psink, - "Current render delay is %llu", gst_base_sink_get_render_delay (GST_BASE_SINK(psink))); ++ "Current render delay is %" G_GUINT64_FORMAT, gst_base_sink_get_render_delay (GST_BASE_SINK(psink))); +#endif } static void diff --cc subprojects/gst-plugins-good/gst/isomp4/qtdemux.h index e0f90bb702,830ed2fda5..7074ffb83f --- a/subprojects/gst-plugins-good/gst/isomp4/qtdemux.h +++ b/subprojects/gst-plugins-good/gst/isomp4/qtdemux.h @@@ -65,10 -65,19 +65,24 @@@ enum QtDemuxStat QTDEMUX_STATE_BUFFER_MDAT /* Buffering the mdat atom */ }; ++ +#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION +typedef struct _QtDemuxSphericalMetadata QtDemuxSphericalMetadata; +#endif + + typedef enum { + /* Regular behaviour */ + VARIANT_NONE, + + /* We're working with a MediaSource Extensions ISO BMFF Bytestream. */ + VARIANT_MSE_BYTESTREAM, + + /* We're working with a smoothstreaming fragment. + * Mss doesn't have 'moov' or any information about the streams format, + * requiring qtdemux to expose and create the streams */ + VARIANT_MSS_FRAGMENTED, + } Variant; + struct _GstQTDemux { GstElement element; @@@ -270,9 -276,6 +281,9 @@@ * fields. */ gboolean received_seek; gboolean first_moof_already_parsed; +#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION + QtDemuxSphericalMetadata *spherical_metadata; - #endif ++#endif }; struct _GstQTDemuxClass { diff --cc subprojects/gst-plugins-good/gst/rtsp/gstrtspsrc.c index 67038308b2,8666d26e19..3bc611dc25 --- a/subprojects/gst-plugins-good/gst/rtsp/gstrtspsrc.c +++ b/subprojects/gst-plugins-good/gst/rtsp/gstrtspsrc.c @@@ -547,28 -537,6 +547,28 @@@ cmd_to_string (guint cmd } #endif +#ifdef TIZEN_FEATURE_RTSP_MODIFICATION +static void +gst_rtspsrc_post_error_message (GstRTSPSrc * src, GstRTSPSrcError error_id, + const gchar * error_string) +{ + gboolean ret = TRUE; - g_autoptr(GError) *gerror = NULL; ++ g_autoptr(GError) gerror = NULL; + + GST_ERROR_OBJECT (src, "[%d] %s", error_id, error_string); + + gerror = g_error_new_literal (GST_RESOURCE_ERROR, error_id, error_string); + + ret = gst_element_post_message (GST_ELEMENT (src), + gst_message_new_custom (GST_MESSAGE_ERROR, GST_OBJECT (src), + gst_structure_new ("streaming_error", + "gerror", G_TYPE_ERROR, gerror, + "debug", G_TYPE_STRING, NULL, NULL))); + if (!ret) + GST_ERROR_OBJECT (src, "fail to post error message."); +} +#endif + static gboolean default_select_stream (GstRTSPSrc * src, guint id, GstCaps * caps) { @@@ -1282,7 -1234,7 +1282,7 @@@ gst_rtspsrc_class_init (GstRTSPSrcClas * @sample: RTP sample to send back * * Deprecated: 1.22: Use action signal GstRTSPSrc::push-backchannel-sample instead. -- * IMPORTANT: Please note that this signal decrements the reference count ++ * IMPORTANT: Please note that this signal decrements the reference count * of sample internally! So it cannot be used from other * language bindings in general. * @@@ -2610,26 -2449,9 +2617,26 @@@ gst_rtspsrc_create_stream (GstRTSPSrc base = get_aggregate_control (src); if (g_strcmp0 (control_path, "*") == 0) - control_path = g_strdup (base); + stream->conninfo.location = g_strdup (base); else - stream->conninfo.location = gst_uri_join_strings (base, control_path); +#ifdef TIZEN_FEATURE_RTSP_MODIFICATION + { + /* If uri does not end with slash, gst_uri_join_strings() ignores the last path. + * Similar issue exists, but there was no official patch yet. + * https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/2614 + * So we added slash at the end of uri and we will monitor this issue. */ + if (!g_str_has_suffix (base, "/")) { + /* base with slash added at the end of uri */ + gchar *base2 = g_strconcat(base, "/", NULL); + stream->conninfo.location = gst_uri_join_strings (base2, control_path); + g_free(base2); + } else { +#endif + stream->conninfo.location = gst_uri_join_strings (base, control_path); +#ifdef TIZEN_FEATURE_RTSP_MODIFICATION + } + } +#endif } } GST_DEBUG_OBJECT (src, " setup: %s", diff --cc subprojects/gst-plugins-good/gst/videofilter/gstvideoflip.c index df5bc994c9,718ab45b86..c065523f59 --- a/subprojects/gst-plugins-good/gst/videofilter/gstvideoflip.c +++ b/subprojects/gst-plugins-good/gst/videofilter/gstvideoflip.c @@@ -939,7 -926,7 +939,7 @@@ gst_video_flip_planar_yuv_422_16bit (Gs * which need average chrominance values between two pixels */ format_is_le = GST_VIDEO_FORMAT_INFO_IS_LE (dest->info.finfo); -- /* Attempt to get the compiler to inline specialized variants of this function ++ /* Attempt to get the compiler to inline specialized variants of this function * to avoid too much branching due to endianness checks */ if (format_is_le) { rotate_yuv422_plane (dest, src, 0, videoflip->active_method, FALSE, TRUE); @@@ -1849,93 -1845,17 +1861,105 @@@ gst_video_flip_get_property (GObject * } } +#ifdef TIZEN_FEATURE_VIDEOFLIP_TBM_SUPPORT +static gboolean +gst_video_flip_decide_allocation (GstBaseTransform * trans, + GstQuery * query) +{ + GstVideoFlip *videoflip = GST_VIDEO_FLIP (trans); + GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans); + + GST_WARNING_OBJECT (videoflip, "format[%s]", filter->out_info.finfo->name); + + if (filter->out_info.finfo->format == GST_VIDEO_FORMAT_SN12) { + guint size; + GstStructure *config; + GstCaps *caps = NULL; + GstVideoInfo vinfo; + + gst_query_parse_allocation (query, &caps, NULL); + if (!caps) { + GST_WARNING_OBJECT (videoflip, "caps failed"); + return FALSE; + } + + gst_video_info_init (&vinfo); + gst_video_info_from_caps (&vinfo, caps); + + size = vinfo.size; + + videoflip->pool = gst_tizen_buffer_pool_new (); + config = gst_buffer_pool_get_config (videoflip->pool); + + gst_buffer_pool_config_set_params (config, caps, size, + TIZEN_BUFFER_POOL_MIN_BUFFERS, TIZEN_BUFFER_POOL_MAX_BUFFERS); + gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); + gst_buffer_pool_set_config (videoflip->pool, config); + + if (!gst_buffer_pool_set_active (videoflip->pool, TRUE)) { + gst_object_unref (videoflip->pool); + videoflip->pool = NULL; + GST_WARNING_OBJECT (videoflip, "Failed to activate pool"); + return FALSE; + } + + GST_WARNING_OBJECT (videoflip, "new buffer pool[%p]", videoflip->pool); + } + + return GST_BASE_TRANSFORM_CLASS (parent_class)->decide_allocation (trans, query); +} + +static GstFlowReturn +gst_video_flip_prepare_output_buffer (GstBaseTransform * trans, + GstBuffer *input, GstBuffer **outbuf) +{ + GstVideoFlip *videoflip = GST_VIDEO_FLIP (trans); + GstBuffer *buf = NULL; + + if (!videoflip->pool) + return GST_BASE_TRANSFORM_CLASS (parent_class)->prepare_output_buffer (trans, input, outbuf); + + if (gst_buffer_pool_acquire_buffer (videoflip->pool, &buf, 0) != GST_FLOW_OK) { + GST_ERROR_OBJECT (trans, "Buffer acquire failed"); + return GST_FLOW_ERROR; + } + + GST_DEBUG_OBJECT (videoflip, "acquired buffer[%p]", buf); + + if (input != buf) + GST_BASE_TRANSFORM_CLASS (parent_class)->copy_metadata (trans, input, buf); + + *outbuf = buf; + + return GST_FLOW_OK; +} + +static void - gst_video_flip_finalize (GstVideoFlip * videoflip) ++gst_video_flip_finalize (GObject * object) +{ ++ GstVideoFlip *videoflip = GST_VIDEO_FLIP (object); + if (videoflip->pool) { + GST_WARNING_OBJECT (videoflip, "free buffer pool[%p]", videoflip->pool); + gst_buffer_pool_set_active (videoflip->pool, FALSE); + gst_object_unref (videoflip->pool); + videoflip->pool = NULL; + } + + G_OBJECT_CLASS (parent_class)->finalize (G_OBJECT (videoflip)); +} +#endif + + static void + gst_video_flip_constructed (GObject * object) + { + GstVideoFlip *self = GST_VIDEO_FLIP (object); + + if (self->method == (GstVideoOrientationMethod) PROP_METHOD_DEFAULT) { + gst_video_flip_set_method (self, + (GstVideoOrientationMethod) PROP_METHOD_DEFAULT, FALSE); + } + } + static void gst_video_flip_class_init (GstVideoFlipClass * klass) { diff --cc subprojects/gst-plugins-good/sys/v4l2/gstv4l2allocator.c index b1a3f7288a,059039e530..a408308e5e --- a/subprojects/gst-plugins-good/sys/v4l2/gstv4l2allocator.c +++ b/subprojects/gst-plugins-good/sys/v4l2/gstv4l2allocator.c @@@ -693,33 -658,6 +691,40 @@@ gst_v4l2_allocator_new (GstObject * par GST_OBJECT_FLAG_SET (allocator, flags); +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT + if (v4l2object->tbm_output && + !V4L2_TYPE_IS_OUTPUT (v4l2object->type) && + v4l2object->mode == GST_V4L2_IO_DMABUF) { ++ int i = 0; + tbm_surface_h tmp_surface = NULL; + int width = GST_VIDEO_INFO_WIDTH (&v4l2object->info); + int height = GST_VIDEO_INFO_HEIGHT (&v4l2object->info); + + tmp_surface = tbm_surface_create (width, height, + __get_tbm_format (GST_VIDEO_INFO_FORMAT (&v4l2object->info))); + if (tmp_surface) { + tbm_surface_get_info (tmp_surface, &allocator->s_info); - GST_INFO_OBJECT (allocator, "[%dx%d] -> tbm surface info[%dx%d]", - width, height, allocator->s_info.width, allocator->s_info.height); + tbm_surface_destroy (tmp_surface); ++ ++ GST_INFO_OBJECT (allocator, "TBM surface [%dx%d], Total size[%u]", ++ allocator->s_info.width, allocator->s_info.height, allocator->s_info.size); ++ ++ for (i = 0 ; i < allocator->s_info.num_planes ; i++) { ++ GST_INFO_OBJECT (allocator, " plane[%d] stride[%d],size[%d]", ++ i, allocator->s_info.planes[i].stride, allocator->s_info.planes[i].size); ++ } + } else { + GST_ERROR_OBJECT (allocator, "[%dx%d] surface failed", width, height); + } + + allocator->bufmgr = tbm_bufmgr_init (-1); + if (!allocator->bufmgr) { + GST_ERROR_OBJECT (allocator, "tbm bufmgr failed"); + gst_object_unref (allocator); + return NULL; + } + } +#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ return allocator; } @@@ -975,17 -910,10 +980,23 @@@ gst_v4l2_allocator_alloc_dmabuf (GstV4l GST_LOG_OBJECT (allocator, "exported DMABUF as fd %i plane %d", expbuf.fd, i); - group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator), - NULL, group->planes[i].length, 0, group->planes[i].data_offset, - group->planes[i].length - group->planes[i].data_offset, i, NULL, - expbuf.fd, group); +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT + if (obj->tbm_output) { + bos[i] = tbm_bo_import_fd (allocator->bufmgr, expbuf.fd); - GST_INFO_OBJECT (allocator, "obj[%p,i:%d]: fd[%d] -> bo[%p]", - obj, expbuf.index, expbuf.fd, bos[i]); ++ ++ GST_INFO_OBJECT (allocator, "obj[%p,i:%d]: fd[%d] -> bo[%p], size[%u]", ++ obj, expbuf.index, expbuf.fd, bos[i], allocator->s_info.size); ++ ++ if (group->n_mem == 1) ++ group->planes[0].length = allocator->s_info.size; ++ else ++ group->planes[i].length = allocator->s_info.planes[i].size; + } +#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ + group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator), + NULL, group->planes[i].length, 0, group->planes[i].data_offset, + group->planes[i].length - group->planes[i].data_offset, i, NULL, + expbuf.fd, group); } else { /* Take back the allocator reference */ gst_object_ref (allocator); @@@ -1006,16 -934,6 +1017,20 @@@ group->mem[i] = dma_mem; } - +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT - if (obj->tbm_output && !group->surface) { - group->surface = tbm_surface_internal_create_with_bos (&allocator->s_info, bos, group->n_mem); - GST_INFO_OBJECT (allocator, "new surface[%p] in memory group[%p]", group->surface, group); - } - /* release bos - they will be kept in surface. */ - for (i = 0 ; i < VIDEO_MAX_PLANES && bos[i] ; i++) - tbm_bo_unref (bos[i]); ++ if (obj->tbm_output) { ++ if (group->surface) { ++ GST_WARNING_OBJECT (allocator, "destroy previous surface[%p]", group->surface); ++ tbm_surface_destroy (group->surface); ++ } ++ ++ group->surface = tbm_surface_internal_create_with_bos (&allocator->s_info, bos, group->n_mem); ++ ++ /* release bos - they will be kept in surface. */ ++ for (i = 0 ; i < VIDEO_MAX_PLANES && bos[i] ; i++) ++ tbm_bo_unref (bos[i]); ++ } +#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ gst_v4l2_allocator_reset_size (allocator, group); @@@ -1439,6 -1348,6 +1454,17 @@@ gst_v4l2_allocator_dqbuf (GstV4l2Alloca g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m)); memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m)); } ++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT ++ if (group->surface) { ++ /* This code is to remove the warning of gst_memory_resize() in gst_v4l2_buffer_pool_dqbuf(). */ ++ if (group->n_mem == 1) { ++ group->planes[0].length = allocator->s_info.size; ++ } else { ++ for (i = 0 ; i < group->n_mem ; i++) ++ group->planes[i].length = allocator->s_info.planes[i].size; ++ } ++ } ++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ /* And update memory size */ if (V4L2_TYPE_IS_OUTPUT (obj->type)) { diff --cc subprojects/gst-plugins-good/sys/v4l2/gstv4l2bufferpool.c index f48ba93295,8c6eba175c..a5a97573e9 --- a/subprojects/gst-plugins-good/sys/v4l2/gstv4l2bufferpool.c +++ b/subprojects/gst-plugins-good/sys/v4l2/gstv4l2bufferpool.c @@@ -1472,13 -1237,9 +1400,12 @@@ gst_v4l2_buffer_pool_dqbuf (GstV4l2Buff GstV4l2Object *obj = pool->obj; GstClockTime timestamp; GstV4l2MemoryGroup *group; - GstVideoMeta *vmeta; - gsize size; + const GstVideoInfo *info = &obj->info; gint i; gint old_buffer_state; +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT + GstV4l2TizenBuffer *tizen_buffer = NULL; +#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ if ((res = gst_v4l2_buffer_pool_poll (pool, wait)) < GST_FLOW_OK) goto poll_failed; @@@ -2034,30 -1775,9 +1971,21 @@@ gst_v4l2_buffer_pool_new (GstV4l2Objec g_object_ref_sink (pool); g_free (name); - gst_poll_fd_init (&pool->pollfd); - pool->pollfd.fd = fd; - gst_poll_add_fd (pool->poll, &pool->pollfd); - if (V4L2_TYPE_IS_OUTPUT (obj->type)) - gst_poll_fd_ctl_write (pool->poll, &pool->pollfd, TRUE); - else - gst_poll_fd_ctl_read (pool->poll, &pool->pollfd, TRUE); - pool->video_fd = fd; pool->obj = obj; - pool->can_poll_device = TRUE; +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT + pool->tallocator = gst_tizen_allocator_new (); + if (pool->tallocator == NULL) + goto allocator_failed; + + g_mutex_init (&pool->buffer_lock); + g_cond_init (&pool->buffer_cond); + + pool->tbm_output_dump = obj->tbm_output_dump; + + GST_INFO ("tbm output dump [%d]", pool->tbm_output_dump); +#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ pool->vallocator = gst_v4l2_allocator_new (GST_OBJECT (pool), obj); if (pool->vallocator == NULL) goto allocator_failed; @@@ -2531,14 -2281,16 +2502,20 @@@ gst_v4l2_buffer_pool_flush (GstV4l2Obje pool = GST_V4L2_BUFFER_POOL (bpool); +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT + gst_v4l2_buffer_pool_streamoff (pool, FALSE); +#else gst_v4l2_buffer_pool_streamoff (pool); +#endif - if (!V4L2_TYPE_IS_OUTPUT (pool->obj->type)) - ret = gst_v4l2_buffer_pool_streamon (pool); + if (!V4L2_TYPE_IS_OUTPUT (pool->obj->type)) { + ret = gst_v4l2_buffer_pool_flush_events (v4l2object); + + /* If the format haven't change, avoid reallocation to go back to + * streaming */ + if (ret == GST_FLOW_OK) + ret = gst_v4l2_buffer_pool_streamon (pool); + } gst_object_unref (bpool); return ret; diff --cc subprojects/gst-plugins-good/sys/v4l2/gstv4l2object.c index 31ab76e98f,7c787f11fc..5589e22a25 --- a/subprojects/gst-plugins-good/sys/v4l2/gstv4l2object.c +++ b/subprojects/gst-plugins-good/sys/v4l2/gstv4l2object.c @@@ -526,13 -525,12 +526,16 @@@ gst_v4l2_object_new (GstElement * eleme v4l2object->no_initial_format = FALSE; + v4l2object->poll = gst_poll_new (TRUE); + v4l2object->can_poll_device = TRUE; + /* We now disable libv4l2 by default, but have an env to enable it. */ #ifdef HAVE_LIBV4L2 +#ifdef TIZEN_FEATURE_USE_LIBV4L2 + if (1) { +#else /* TIZEN_FEATURE_USE_LIBV4L2 */ if (g_getenv ("GST_V4L2_USE_LIBV4L2")) { +#endif /* TIZEN_FEATURE_USE_LIBV4L2 */ v4l2object->fd_open = v4l2_fd_open; v4l2object->close = v4l2_close; v4l2object->dup = v4l2_dup; @@@ -3512,6 -3503,6 +3550,7 @@@ get_v4l2_field_for_info (GstVideoInfo } } ++#ifndef TIZEN_FEATURE_V4L2_DISABLE_COLORIMETRY static gboolean gst_v4l2_video_colorimetry_matches (const GstVideoColorimetry * cinfo, GstCaps * caps) @@@ -3561,6 -3552,6 +3600,7 @@@ return FALSE; } ++#endif static const gchar * field_to_str (enum v4l2_field f) @@@ -4217,6 -4204,6 +4257,7 @@@ invalid_field wanted_field == V4L2_FIELD_NONE ? "progressive" : "interleaved")); return FALSE; } ++#ifndef TIZEN_FEATURE_V4L2_DISABLE_COLORIMETRY invalid_colorimetry: { gchar *wanted_colorimetry; @@@ -4231,6 -4218,6 +4272,7 @@@ g_free (wanted_colorimetry); return FALSE; } ++#endif get_parm_failed: { /* it's possible that this call is not supported */ diff --cc subprojects/gst-plugins-good/sys/v4l2/gstv4l2videodec.c index 9d2ae64a94,c7f39ddee2..1632c72789 --- a/subprojects/gst-plugins-good/sys/v4l2/gstv4l2videodec.c +++ b/subprojects/gst-plugins-good/sys/v4l2/gstv4l2videodec.c @@@ -397,13 -346,48 +391,51 @@@ gst_v4l2_video_dec_flush (GstVideoDecod gst_v4l2_buffer_pool_flush (self->v4l2output); +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT + gst_v4l2_video_dec_flush_buffer_event (decoder); +#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ /* gst_v4l2_buffer_pool_flush() calls streamon the capture pool and must be * called after gst_v4l2_object_unlock_stop() stopped flushing the buffer - * pool. */ - gst_v4l2_buffer_pool_flush (self->v4l2capture); + * pool. If the resolution has changed before we stopped the driver we must + * reallocate the capture pool. We simply discard the pool, and let the + * capture thread handle re-allocation.*/ + if (gst_v4l2_buffer_pool_flush (self->v4l2capture) == + GST_V4L2_FLOW_RESOLUTION_CHANGE || self->draining) + gst_v4l2_object_stop (self->v4l2capture); + + return TRUE; + } + + static gboolean + gst_v4l2_video_remove_padding (GstCapsFeatures * features, + GstStructure * structure, gpointer user_data) + { + GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (user_data); + GstVideoAlignment *align = &self->v4l2capture->align; + GstVideoInfo *info = &self->v4l2capture->info; + int width, height; + + if (!gst_structure_get_int (structure, "width", &width)) + return TRUE; + + if (!gst_structure_get_int (structure, "height", &height)) + return TRUE; + + if (align->padding_left != 0 || align->padding_top != 0 || + height != info->height + align->padding_bottom) + return TRUE; + + if (height == info->height + align->padding_bottom) { + /* Some drivers may round up width to the padded with */ + if (width == info->width + align->padding_right) + gst_structure_set (structure, + "width", G_TYPE_INT, width - align->padding_right, + "height", G_TYPE_INT, height - align->padding_bottom, NULL); + /* Some drivers may keep visible width and only round up bytesperline */ + else if (width == info->width) + gst_structure_set (structure, + "height", G_TYPE_INT, height - align->padding_bottom, NULL); + } return TRUE; } @@@ -412,19 -396,135 +444,169 @@@ static gboolea gst_v4l2_video_dec_negotiate (GstVideoDecoder * decoder) { GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder); + GstV4l2Error error = GST_V4L2_ERROR_INIT; + GstVideoInfo info; + GstVideoCodecState *output_state; + GstCaps *acquired_caps, *fixation_caps, *available_caps, *caps, *filter; + GstStructure *st; + gboolean active; + GstBufferPool *cpool; + gboolean ret; ++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT ++ GstStructure *structure = NULL; ++ const gchar *caps_format = NULL; ++ GstMessage *msg = NULL; ++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ /* We don't allow renegotiation without careful disabling the pool */ - { - GstBufferPool *cpool = gst_v4l2_object_get_buffer_pool (self->v4l2capture); - if (cpool) { - gboolean is_active = gst_buffer_pool_is_active (cpool); - gst_object_unref (cpool); - if (is_active) - return TRUE; - } + cpool = gst_v4l2_object_get_buffer_pool (self->v4l2capture); + if (cpool) { + gboolean is_active = gst_buffer_pool_is_active (cpool); + gst_object_unref (cpool); + if (is_active) + return TRUE; + } + + /* init capture fps according to output */ + self->v4l2capture->info.fps_d = self->v4l2output->info.fps_d; + self->v4l2capture->info.fps_n = self->v4l2output->info.fps_n; + + /* For decoders G_FMT returns coded size, G_SELECTION returns visible size + * in the compose rectangle. gst_v4l2_object_acquire_format() checks both + * and returns the visible size as with/height and the coded size as + * padding. */ + if (!gst_v4l2_object_acquire_format (self->v4l2capture, &info)) + goto not_negotiated; + + /* gst_v4l2_object_acquire_format() does not set fps, copy from sink */ + info.fps_n = self->v4l2output->info.fps_n; + info.fps_d = self->v4l2output->info.fps_d; + + gst_caps_replace (&self->probed_srccaps, NULL); + self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture, + gst_v4l2_object_get_raw_caps ()); + /* Create caps from the acquired format, remove the format field */ + acquired_caps = gst_video_info_to_caps (&info); + GST_DEBUG_OBJECT (self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps); + fixation_caps = gst_caps_copy (acquired_caps); + st = gst_caps_get_structure (fixation_caps, 0); + gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site", + NULL); + + /* Probe currently available pixel formats */ + available_caps = gst_caps_copy (self->probed_srccaps); + GST_DEBUG_OBJECT (self, "Available caps: %" GST_PTR_FORMAT, available_caps); + + /* Replace coded size with visible size, we want to negotiate visible size + * with downstream, not coded size. */ + gst_caps_map_in_place (available_caps, gst_v4l2_video_remove_padding, self); + + filter = gst_caps_intersect_full (available_caps, fixation_caps, + GST_CAPS_INTERSECT_FIRST); + GST_DEBUG_OBJECT (self, "Filtered caps: %" GST_PTR_FORMAT, filter); + gst_caps_unref (fixation_caps); + gst_caps_unref (available_caps); + caps = gst_pad_peer_query_caps (decoder->srcpad, filter); + gst_caps_unref (filter); + + GST_DEBUG_OBJECT (self, "Possible decoded caps: %" GST_PTR_FORMAT, caps); + if (gst_caps_is_empty (caps)) { + gst_caps_unref (caps); + goto not_negotiated; + } + ++#ifndef TIZEN_FEATURE_V4L2_TBM_SUPPORT + /* Prefer the acquired caps over anything suggested downstream, this ensure + * that we preserves the bit depth, as we don't have any fancy fixation + * process */ + if (gst_caps_is_subset (acquired_caps, caps)) + goto use_acquired_caps; ++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ + + /* Fixate pixel format */ + caps = gst_caps_fixate (caps); + + GST_DEBUG_OBJECT (self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps); ++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT ++ structure = gst_caps_get_structure (caps, 0); ++ caps_format = gst_structure_get_string (structure, "format"); ++ ++ if (!strcmp (caps_format, "I420")) { ++ GST_INFO_OBJECT (self, "I420 -> S420"); ++ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "S420", NULL); ++ } else if (!strcmp (caps_format, "NV12")) { ++ GST_INFO_OBJECT (self, "NV12 -> SN12"); ++ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "SN12", NULL); + } ++ GST_INFO_OBJECT (self, "Updated decoded caps: %" GST_PTR_FORMAT, caps); ++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ + + /* Try to set negotiated format, on success replace acquired format */ + if (gst_v4l2_object_set_format (self->v4l2capture, caps, &error)) + gst_video_info_from_caps (&info, caps); + else + gst_v4l2_clear_error (&error); + ++#ifndef TIZEN_FEATURE_V4L2_TBM_SUPPORT + use_acquired_caps: ++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ + gst_caps_unref (acquired_caps); + gst_caps_unref (caps); + + /* catch possible bogus driver that don't enumerate the format it actually + * returned from G_FMT */ + if (!self->v4l2capture->fmtdesc) + goto not_negotiated; + + output_state = gst_video_decoder_set_output_state (decoder, + info.finfo->format, info.width, info.height, self->input_state); + + /* Copy the rest of the information, there might be more in the future */ + output_state->info.interlace_mode = info.interlace_mode; + output_state->info.colorimetry = info.colorimetry; + gst_video_codec_state_unref (output_state); + + ret = GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder); + if (!ret) + goto not_negotiated; + + /* The pool may be created through gst_video_decoder_negotiate(), so must + * be kept after */ + cpool = gst_v4l2_object_get_buffer_pool (self->v4l2capture); + gst_v4l2_buffer_pool_enable_resolution_change (GST_V4L2_BUFFER_POOL (cpool)); + + /* Ensure our internal pool is activated */ + active = gst_buffer_pool_set_active (cpool, TRUE); + if (cpool) + gst_object_unref (cpool); + if (!active) + goto activate_failed; ++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT ++ msg = gst_message_new_element (GST_OBJECT_CAST (decoder), ++ gst_structure_new ("prepare-decode-buffers", ++ "num_buffers", G_TYPE_INT, GST_V4L2_BUFFER_POOL (cpool)->num_allocated, ++ "extra_num_buffers", G_TYPE_INT, GST_V4L2_BUFFER_POOL (cpool)->num_allocated - 2, ++ NULL)); ++ ++ gst_element_post_message (GST_ELEMENT_CAST (decoder), msg); ++ ++ GST_WARNING_OBJECT (self, "output buffer[%d]", ++ GST_V4L2_BUFFER_POOL (cpool)->num_allocated); ++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ - return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder); + return TRUE; + + not_negotiated: + GST_ERROR_OBJECT (self, "not negotiated"); + gst_v4l2_error (self, &error); + gst_v4l2_object_stop (self->v4l2capture); + return FALSE; + activate_failed: + GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS, + (_("Failed to allocate required memory.")), + ("Buffer pool activation failed")); + gst_v4l2_object_stop (self->v4l2capture); + return FALSE; } static gboolean @@@ -905,12 -858,28 +940,38 @@@ gst_v4l2_video_dec_loop (GstVideoDecode return; beach: + if (ret == GST_V4L2_FLOW_RESOLUTION_CHANGE) { + GST_VIDEO_DECODER_STREAM_LOCK (decoder); ++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT ++ GST_WARNING_OBJECT (decoder, "RESOLUTION_CHANGE!"); ++ gst_v4l2_object_stop (self->v4l2capture); ++ gst_v4l2_object_unlock_stop (self->v4l2capture); ++#else /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ + self->draining = TRUE; ++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ + GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); + GST_INFO_OBJECT (decoder, "Received resolution change"); + return; + } + + if (ret == GST_V4L2_FLOW_LAST_BUFFER) { + GST_VIDEO_DECODER_STREAM_LOCK (decoder); + if (self->draining) { + self->draining = FALSE; + gst_v4l2_object_stop (self->v4l2capture); + GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); + return; + } + + GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); + } + GST_DEBUG_OBJECT (decoder, "Leaving output thread: %s", gst_flow_get_name (ret)); +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT - if (ret == GST_FLOW_EOS) ++ if (ret == GST_FLOW_EOS || ret == GST_V4L2_FLOW_LAST_BUFFER) + gst_v4l2_video_dec_flush_buffer_event (decoder); +#endif gst_buffer_replace (&buffer, NULL); self->output_flow = ret; diff --cc subprojects/gst-rtsp-server/NEWS index 9802493d32,e7c25cc94f..e7c25cc94f mode 100755,100644..100755 --- a/subprojects/gst-rtsp-server/NEWS +++ b/subprojects/gst-rtsp-server/NEWS diff --cc subprojects/gst-rtsp-server/gst/rtsp-server/gstwfdmessage.c index 3b70583642,0000000000..cc35251cd4 mode 100755,000000..100755 --- a/subprojects/gst-rtsp-server/gst/rtsp-server/gstwfdmessage.c +++ b/subprojects/gst-rtsp-server/gst/rtsp-server/gstwfdmessage.c @@@ -1,2955 -1,0 +1,2957 @@@ +/* GStreamer + * Copyright (C) 2015 Samsung Electronics Hyunjun Ko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ +/* + * Unless otherwise indicated, Source Code is licensed under MIT license. + * See further explanation attached in License Statement (distributed in the file + * LICENSE). + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of + * this software and associated documentation files (the "Software"), to deal in + * the Software without restriction, including without limitation the rights to + * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies + * of the Software, and to permit persons to whom the Software is furnished to do + * so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +/* + * SECTION:gstwfdmessage + * @short_description: Helper methods for dealing with WFD messages + * + * + * + * The GstWFDMessage helper functions makes it easy to parse and create WFD + * messages. + * + * + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include + +#include + +#include "gstwfdmessage.h" + +#define EDID_BLOCK_SIZE 128 +#define EDID_BLOCK_COUNT_MAX_SIZE 256 +#define MAX_PORT_SIZE 65535 + +#define FREE_STRING(field) do { g_free (field); (field) = NULL; } while(0) +#define REPLACE_STRING(field, val) do { FREE_STRING(field); (field) = g_strdup (val); } while(0) + +#define INIT_ARRAY(field, type, init_func) \ +G_STMT_START { \ + if (field) { \ + guint i; \ + for(i = 0; i < (field)->len; i++) \ + init_func (&g_array_index ((field), type, i)); \ + g_array_set_size ((field), 0); \ + } \ + else \ + (field) = g_array_new (FALSE, TRUE, sizeof (type)); \ +} G_STMT_END + +#define FREE_ARRAY(field) \ +G_STMT_START { \ + if (field) \ + g_array_free ((field), TRUE); \ + (field) = NULL; \ +} G_STMT_END + +#define DEFINE_STRING_SETTER(field) \ +GstWFDResult gst_wfd_message_set_##field (GstWFDMessage *msg, const gchar *val) { \ + g_free (msg->field); \ + msg->field = g_strdup (val); \ + return GST_WFD_OK; \ +} +#define DEFINE_STRING_GETTER(field) \ +const gchar* gst_wfd_message_get_##field (const GstWFDMessage *msg) { \ + return msg->field; \ +} + +#define DEFINE_ARRAY_LEN(field) \ +guint gst_wfd_message_##field##_len (const GstWFDMessage *msg) { \ + return msg->field->len; \ +} +#define DEFINE_ARRAY_GETTER(method, field, type) \ +const type * gst_wfd_message_get_##method (const GstWFDMessage *msg, guint idx) { \ + return &g_array_index (msg->field, type, idx); \ +} +#define DEFINE_PTR_ARRAY_GETTER(method, field, type) \ +const type gst_wfd_message_get_##method (const GstWFDMessage *msg, guint idx) { \ + return g_array_index (msg->field, type, idx); \ +} +#define DEFINE_ARRAY_INSERT(method, field, intype, dup_method, type) \ +GstWFDResult gst_wfd_message_insert_##method (GstWFDMessage *msg, gint idx, intype val) { \ + type vt; \ + type* v = &vt; \ + dup_method (v, val); \ + if (idx == -1) \ + g_array_append_val (msg->field, vt); \ + else \ + g_array_insert_val (msg->field, idx, vt); \ + return GST_WFD_OK; \ +} + +#define DEFINE_ARRAY_REPLACE(method, field, intype, free_method, dup_method, type) \ +GstWFDResult gst_wfd_message_replace_##method (GstWFDMessage *msg, guint idx, intype val) { \ + type *v = &g_array_index (msg->field, type, idx); \ + free_method (v); \ + dup_method (v, val); \ + return GST_WFD_OK; \ +} +#define DEFINE_ARRAY_REMOVE(method, field, type, free_method) \ +GstWFDResult gst_wfd_message_remove_##method (GstWFDMessage *msg, guint idx) { \ + type *v = &g_array_index (msg->field, type, idx); \ + free_method (v); \ + g_array_remove_index (msg->field, idx); \ + return GST_WFD_OK; \ +} +#define DEFINE_ARRAY_ADDER(method, type) \ +GstWFDResult gst_wfd_message_add_##method (GstWFDMessage *msg, const type val) { \ + return gst_wfd_message_insert_##method (msg, -1, val); \ +} + +#define dup_string(v,val) ((*v) = g_strdup (val)) +#define INIT_STR_ARRAY(field) \ + INIT_ARRAY (field, gchar *, free_string) +#define DEFINE_STR_ARRAY_GETTER(method, field) \ + DEFINE_PTR_ARRAY_GETTER(method, field, gchar *) +#define DEFINE_STR_ARRAY_INSERT(method, field) \ + DEFINE_ARRAY_INSERT (method, field, const gchar *, dup_string, gchar *) +#define DEFINE_STR_ARRAY_ADDER(method, field) \ + DEFINE_ARRAY_ADDER (method, gchar *) +#define DEFINE_STR_ARRAY_REPLACE(method, field) \ + DEFINE_ARRAY_REPLACE (method, field, const gchar *, free_string, dup_string, gchar *) +#define DEFINE_STR_ARRAY_REMOVE(method, field) \ + DEFINE_ARRAY_REMOVE (method, field, gchar *, free_string) + +static GstWFDMessage *gst_wfd_message_boxed_copy (GstWFDMessage * orig); +static void gst_wfd_message_boxed_free (GstWFDMessage * msg); + +G_DEFINE_BOXED_TYPE (GstWFDMessage, gst_wfd_message, gst_wfd_message_boxed_copy, + gst_wfd_message_boxed_free); + +static GstWFDMessage * +gst_wfd_message_boxed_copy (GstWFDMessage * orig) +{ + GstWFDMessage *copy; + + if (gst_wfd_message_copy (orig, ©) == GST_WFD_OK) + return copy; + + return NULL; +} + +static void +gst_wfd_message_boxed_free (GstWFDMessage * msg) +{ + gst_wfd_message_free (msg); +} + +/** + * gst_wfd_message_new: + * @msg: (out) (transfer full): pointer to new #GstWFDMessage + * + * Allocate a new GstWFDMessage and store the result in @msg. + * + * Returns: a #GstWFDResult. + */ +GstWFDResult +gst_wfd_message_new (GstWFDMessage ** msg) +{ + GstWFDMessage *newmsg; + + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + newmsg = g_new0 (GstWFDMessage, 1); + + *msg = newmsg; + + return gst_wfd_message_init (newmsg); +} + +/** + * gst_wfd_message_init: + * @msg: a #GstWFDMessage + * + * Initialize @msg so that its contents are as if it was freshly allocated + * with gst_wfd_message_new(). This function is mostly used to initialize a message + * allocated on the stack. gst_wfd_message_uninit() undoes this operation. + * + * When this function is invoked on newly allocated data (with malloc or on the + * stack), its contents should be set to 0 before calling this function. + * + * Returns: a #GstWFDResult. + */ +GstWFDResult +gst_wfd_message_init (GstWFDMessage * msg) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + return GST_WFD_OK; +} + +/** + * gst_wfd_message_uninit: + * @msg: a #GstWFDMessage + * + * Free all resources allocated in @msg. @msg should not be used anymore after + * this function. This function should be used when @msg was allocated on the + * stack and initialized with gst_wfd_message_init(). + * + * Returns: a #GstWFDResult. + */ +GstWFDResult +gst_wfd_message_uninit (GstWFDMessage * msg) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (msg->audio_codecs) { + guint i = 0; + if (msg->audio_codecs->list) { + for (; i < msg->audio_codecs->count; i++) { + FREE_STRING (msg->audio_codecs->list[i].audio_format); + msg->audio_codecs->list[i].modes = 0; + msg->audio_codecs->list[i].latency = 0; + } + FREE_STRING (msg->audio_codecs->list); + } + FREE_STRING (msg->audio_codecs); + } + + if (msg->video_formats) { + FREE_STRING (msg->video_formats->list); + FREE_STRING (msg->video_formats); + } + + if (msg->wfd2_audio_codecs) { + guint i = 0; + if (msg->wfd2_audio_codecs->list) { + for (; i < msg->wfd2_audio_codecs->count; i++) { + FREE_STRING(msg->wfd2_audio_codecs->list[i].audio_format); + msg->wfd2_audio_codecs->list[i].modes = 0; + msg->wfd2_audio_codecs->list[i].latency = 0; + } + FREE_STRING(msg->wfd2_audio_codecs->list); + } + FREE_STRING(msg->wfd2_audio_codecs); + } + + if (msg->direct_video_formats) { + FREE_STRING(msg->direct_video_formats->list); + FREE_STRING(msg->direct_video_formats); + } + + if (msg->video_3d_formats) { + FREE_STRING (msg->video_3d_formats->list); + FREE_STRING (msg->video_3d_formats); + } + + if (msg->content_protection) { + if (msg->content_protection->hdcp2_spec) { + FREE_STRING (msg->content_protection->hdcp2_spec->hdcpversion); + FREE_STRING (msg->content_protection->hdcp2_spec->TCPPort); + FREE_STRING (msg->content_protection->hdcp2_spec); + } + FREE_STRING (msg->content_protection); + } + + if (msg->display_edid) { + if (msg->display_edid->edid_payload) + FREE_STRING (msg->display_edid->edid_payload); + FREE_STRING (msg->display_edid); + } + + if (msg->coupled_sink) { + if (msg->coupled_sink->coupled_sink_cap) { + FREE_STRING (msg->coupled_sink->coupled_sink_cap->sink_address); + FREE_STRING (msg->coupled_sink->coupled_sink_cap); + } + FREE_STRING (msg->coupled_sink); + } + + if (msg->trigger_method) { + FREE_STRING (msg->trigger_method->wfd_trigger_method); + FREE_STRING (msg->trigger_method); + } + + if (msg->presentation_url) { + FREE_STRING (msg->presentation_url->wfd_url0); + FREE_STRING (msg->presentation_url->wfd_url1); + FREE_STRING (msg->presentation_url); + } + + if (msg->client_rtp_ports) { + FREE_STRING (msg->client_rtp_ports->profile); + FREE_STRING (msg->client_rtp_ports->mode); + FREE_STRING (msg->client_rtp_ports); + } + + if (msg->route) { + FREE_STRING (msg->route->destination); + FREE_STRING (msg->route); + } + + if (msg->I2C) + FREE_STRING (msg->I2C); + + if (msg->av_format_change_timing) + FREE_STRING (msg->av_format_change_timing); + + if (msg->preferred_display_mode) + FREE_STRING (msg->preferred_display_mode); + + if (msg->standby_resume_capability) + FREE_STRING (msg->standby_resume_capability); + + if (msg->standby) + FREE_STRING (msg->standby); + + if (msg->connector_type) + FREE_STRING (msg->connector_type); + + if (msg->idr_request) + FREE_STRING (msg->idr_request); + + if (msg->direct_mode) + FREE_STRING(msg->direct_mode); + + if (msg->tcp_ports) + FREE_STRING(msg->tcp_ports); + + if (msg->buf_len) + FREE_STRING(msg->buf_len); + + if (msg->audio_status) + FREE_STRING(msg->audio_status); + + if (msg->video_status) + FREE_STRING(msg->video_status); + + return GST_WFD_OK; +} + +/** + * gst_wfd_message_copy: + * @msg: a #GstWFDMessage + * @copy: (out) (transfer full): pointer to new #GstWFDMessage + * + * Allocate a new copy of @msg and store the result in @copy. The value in + * @copy should be release with gst_wfd_message_free function. + * + * Returns: a #GstWFDResult + * + * Since: 1.6 + */ +GstWFDResult +gst_wfd_message_copy (const GstWFDMessage * msg, GstWFDMessage ** copy) +{ + GstWFDResult ret; + GstWFDMessage *cp; + + if (msg == NULL) + return GST_WFD_EINVAL; + + ret = gst_wfd_message_new (copy); + if (ret != GST_WFD_OK) + return ret; + + cp = *copy; + + /* TODO-WFD */ + if (msg->client_rtp_ports) { + cp->client_rtp_ports = g_malloc (sizeof (GstWFDClientRtpPorts)); + if (cp->client_rtp_ports) { + cp->client_rtp_ports->profile = g_strdup (msg->client_rtp_ports->profile); + cp->client_rtp_ports->rtp_port0 = msg->client_rtp_ports->rtp_port0; + cp->client_rtp_ports->rtp_port1 = msg->client_rtp_ports->rtp_port1; + cp->client_rtp_ports->mode = g_strdup (msg->client_rtp_ports->mode); + } + } + + return GST_WFD_OK; +} + + +static void +_read_string_space_ended (gchar * dest, guint size, gchar * src) +{ + guint idx = 0; + + while (!g_ascii_isspace (*src) && *src != '\0') { + if (idx < size - 1) + dest[idx++] = *src; + src++; + } + + if (size > 0) + dest[idx] = '\0'; + + return; +} + +static void +_read_string_attr_and_value (gchar * attr, gchar * value, guint tsize, + guint vsize, gchar del, gchar * src) +{ + guint idx; + + idx = 0; + + while (*src != del && *src != '\0') { + if (idx < tsize - 1) + attr[idx++] = *src; + src++; + } + + if (tsize > 0) + attr[idx] = '\0'; + + src++; + idx = 0; + + while (*src != '\0') { + if (idx < vsize - 1) + value[idx++] = *src; + src++; + } + + if (vsize > 0) + value[idx] = '\0'; + + return; +} + +static void +gst_wfd_parse_attribute (gchar * buffer, GstWFDMessage * msg) +{ + gchar attr[8192] = { 0 }; + gchar value[8192] = { 0 }; + gchar temp[8192] = { 0 }; + gchar *p = buffer; + gchar *v = value; + +#define WFD_SKIP_SPACE(q) if (*q && g_ascii_isspace (*q)) q++ +#define WFD_SKIP_EQUAL(q) if (*q && *q == '=') q++ +#define WFD_SKIP_COMMA(q) if (*q && g_ascii_ispunct (*q)) q++ +#define WFD_READ_STRING(field) _read_string_space_ended (temp, sizeof (temp), v); v+=strlen(temp); REPLACE_STRING (field, temp) +#define WFD_READ_UINT32(field) _read_string_space_ended (temp, sizeof (temp), v); v+=strlen(temp); field = strtoul (temp, NULL, 16) +#define WFD_READ_UINT32_DIGIT(field) _read_string_space_ended (temp, sizeof (temp), v); v+=strlen(temp); field = strtoul (temp, NULL, 10) +#define WFD_READ_UINT64_DIGIT(field) _read_string_space_ended (temp, sizeof (temp), v); v+=strlen(temp); field = strtoull (temp, NULL, 10) + + _read_string_attr_and_value (attr, value, sizeof (attr), sizeof (value), ':', + p); + + if (!g_strcmp0 (attr, GST_STRING_WFD_AUDIO_CODECS)) { + msg->audio_codecs = g_new0 (GstWFDAudioCodeclist, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + if (strncmp (v, "none", 4)) { + guint i = 0; + msg->audio_codecs->count = strlen (v) / 16; + msg->audio_codecs->list = + g_new0 (GstWFDAudioCodec, msg->audio_codecs->count); + for (; i < msg->audio_codecs->count; i++) { + WFD_SKIP_SPACE (v); + WFD_READ_STRING (msg->audio_codecs->list[i].audio_format); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->audio_codecs->list[i].modes); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->audio_codecs->list[i].latency); + WFD_SKIP_COMMA (v); + } + } else { + msg->audio_codecs->count = 0; + msg->audio_codecs->list = NULL; + } + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_VIDEO_FORMATS)) { + msg->video_formats = g_new0 (GstWFDVideoCodeclist, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + if (strncmp (v, "none", 4)) { + msg->video_formats->count = 1; + msg->video_formats->list = g_new0 (GstWFDVideoCodec, 1); + WFD_READ_UINT32 (msg->video_formats->list->native); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_formats->list-> + preferred_display_mode_supported); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.profile); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.level); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.misc_params. + CEA_Support); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.misc_params. + VESA_Support); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.misc_params. + HH_Support); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.misc_params. + latency); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.misc_params. + min_slice_size); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.misc_params. + slice_enc_params); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.misc_params. + frame_rate_control_support); + WFD_SKIP_SPACE (v); + if (msg->video_formats->list->preferred_display_mode_supported == 1) { + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.max_hres); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.max_vres); + WFD_SKIP_SPACE (v); + } + } else { + msg->video_formats->count = 0; + msg->video_formats->list = NULL; + } + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD2_AUDIO_CODECS)) { + msg->wfd2_audio_codecs = g_new0 (GstWFD2AudioCodeclist, 1); + if (strlen (v)) { + guint i = 0; + msg->wfd2_audio_codecs->count = strlen (v) / 16; + msg->wfd2_audio_codecs->list = + g_new0 (GstWFDAudioCodec, msg->wfd2_audio_codecs->count); + for (; i < msg->wfd2_audio_codecs->count; i++) { + WFD_SKIP_SPACE (v); + WFD_READ_STRING (msg->wfd2_audio_codecs->list[i].audio_format); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->wfd2_audio_codecs->list[i].modes); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->wfd2_audio_codecs->list[i].latency); + WFD_SKIP_COMMA (v); + } + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD2_VIDEO_FORMATS)) { + msg->direct_video_formats = g_new0 (GstWFD2VideoCodeclist, 1); + if (strlen (v)) { + msg->direct_video_formats->count = 1; + msg->direct_video_formats->list = g_new0 (GstWFDVideoCodec, 1); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->direct_video_formats->list->native); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->direct_video_formats-> + list->preferred_display_mode_supported); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.profile); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.level); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec. + misc_params.CEA_Support); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec. + misc_params.VESA_Support); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec. + misc_params.HH_Support); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec. + misc_params.latency); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec. + misc_params.min_slice_size); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec. + misc_params.slice_enc_params); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec. + misc_params.frame_rate_control_support); + WFD_SKIP_SPACE (v); + if (msg->direct_video_formats->list->preferred_display_mode_supported == 1) { + WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.max_hres); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->direct_video_formats->list->H264_codec.max_vres); + WFD_SKIP_SPACE (v); + } + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_3D_VIDEO_FORMATS)) { + msg->video_3d_formats = g_new0 (GstWFD3DFormats, 1); + if (strlen (v)) { + msg->video_3d_formats->count = 1; + msg->video_3d_formats->list = g_new0 (GstWFD3dCapList, 1); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_3d_formats->list->native); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_3d_formats->list-> + preferred_display_mode_supported); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_3d_formats->list->H264_codec.profile); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_3d_formats->list->H264_codec.level); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_3d_formats->list->H264_codec.misc_params. + video_3d_capability); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_3d_formats->list->H264_codec.misc_params. + latency); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_3d_formats->list->H264_codec.misc_params. + min_slice_size); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_3d_formats->list->H264_codec.misc_params. + slice_enc_params); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_3d_formats->list->H264_codec.misc_params. + frame_rate_control_support); + WFD_SKIP_SPACE (v); + if (msg->video_formats->list->preferred_display_mode_supported == 1) { + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.max_hres); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->video_formats->list->H264_codec.max_vres); + WFD_SKIP_SPACE (v); + } + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_CONTENT_PROTECTION)) { + msg->content_protection = g_new0 (GstWFDContentProtection, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + msg->content_protection->hdcp2_spec = g_new0 (GstWFDHdcp2Spec, 1); + if (strstr (v, "none")) { + msg->content_protection->hdcp2_spec->hdcpversion = g_strdup ("none"); + } else { + WFD_READ_STRING (msg->content_protection->hdcp2_spec->hdcpversion); + WFD_SKIP_SPACE (v); + WFD_READ_STRING (msg->content_protection->hdcp2_spec->TCPPort); + } + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_DISPLAY_EDID)) { + msg->display_edid = g_new0 (GstWFDDisplayEdid, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + if (strstr (v, "none")) { + msg->display_edid->edid_supported = 0; + } else { + msg->display_edid->edid_supported = 1; + WFD_READ_UINT32 (msg->display_edid->edid_block_count); + WFD_SKIP_SPACE (v); + if (msg->display_edid->edid_block_count) { + gchar *edid_string = v; + int i = 0, j = 0; + guint32 payload_size = + EDID_BLOCK_SIZE * msg->display_edid->edid_block_count; + msg->display_edid->edid_payload = g_malloc (payload_size); + for (; + i < (EDID_BLOCK_SIZE * msg->display_edid->edid_block_count * 2); + j++) { + int k = 0, kk = 0; + if (edid_string[i] > 0x29 && edid_string[i] < 0x40) + k = edid_string[i] - 48; + else if (edid_string[i] > 0x60 && edid_string[i] < 0x67) + k = edid_string[i] - 87; + else if (edid_string[i] > 0x40 && edid_string[i] < 0x47) + k = edid_string[i] - 55; + + if (edid_string[i + 1] > 0x29 && edid_string[i + 1] < 0x40) + kk = edid_string[i + 1] - 48; + else if (edid_string[i + 1] > 0x60 && edid_string[i + 1] < 0x67) + kk = edid_string[i + 1] - 87; + else if (edid_string[i + 1] > 0x40 && edid_string[i + 1] < 0x47) + kk = edid_string[i + 1] - 55; + + msg->display_edid->edid_payload[j] = (k << 4) | kk; + i += 2; + } + //memcpy(msg->display_edid->edid_payload, v, payload_size); + v += (payload_size * 2); + } else + v += strlen (v); + } + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_COUPLED_SINK)) { + msg->coupled_sink = g_new0 (GstWFDCoupledSink, 1); + if (strlen (v)) { + msg->coupled_sink->coupled_sink_cap = g_new0 (GstWFDCoupledSinkCap, 1); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->coupled_sink->coupled_sink_cap->status); + WFD_SKIP_SPACE (v); + WFD_READ_STRING (msg->coupled_sink->coupled_sink_cap->sink_address); + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_TRIGGER_METHOD)) { + msg->trigger_method = g_new0 (GstWFDTriggerMethod, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + WFD_READ_STRING (msg->trigger_method->wfd_trigger_method); + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_PRESENTATION_URL)) { + msg->presentation_url = g_new0 (GstWFDPresentationUrl, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + WFD_READ_STRING (msg->presentation_url->wfd_url0); + WFD_SKIP_SPACE (v); + WFD_READ_STRING (msg->presentation_url->wfd_url1); + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_CLIENT_RTP_PORTS)) { + msg->client_rtp_ports = g_new0 (GstWFDClientRtpPorts, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + WFD_READ_STRING (msg->client_rtp_ports->profile); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32_DIGIT (msg->client_rtp_ports->rtp_port0); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32_DIGIT (msg->client_rtp_ports->rtp_port1); + WFD_SKIP_SPACE (v); + WFD_READ_STRING (msg->client_rtp_ports->mode); + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_ROUTE)) { + msg->route = g_new0 (GstWFDRoute, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + WFD_READ_STRING (msg->route->destination); + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_I2C)) { + msg->I2C = g_new0 (GstWFDI2C, 1); + if (strlen (v)) { + msg->I2C->I2CPresent = TRUE; + WFD_SKIP_SPACE (v); + WFD_READ_UINT32_DIGIT (msg->I2C->I2C_port); + if (msg->I2C->I2C_port) + msg->I2C->I2CPresent = TRUE; + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_AV_FORMAT_CHANGE_TIMING)) { + msg->av_format_change_timing = g_new0 (GstWFDAVFormatChangeTiming, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->av_format_change_timing->PTS); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->av_format_change_timing->DTS); + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_PREFERRED_DISPLAY_MODE)) { + msg->preferred_display_mode = g_new0 (GstWFDPreferredDisplayMode, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + if (!strstr (v, "none")) { + msg->preferred_display_mode->displaymodesupported = FALSE; + } else { + WFD_READ_UINT32 (msg->preferred_display_mode->p_clock); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->H); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->HB); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->HSPOL_HSOFF); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->HSW); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->V); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->VB); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->VSPOL_VSOFF); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->VSW); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->VBS3D); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->V2d_s3d_modes); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->P_depth); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->H264_codec.profile); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->H264_codec.level); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->H264_codec.misc_params. + CEA_Support); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->H264_codec.misc_params. + VESA_Support); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->H264_codec.misc_params. + HH_Support); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->H264_codec.misc_params. + latency); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->H264_codec.misc_params. + min_slice_size); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->H264_codec.misc_params. + slice_enc_params); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->H264_codec.misc_params. + frame_rate_control_support); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->H264_codec.max_hres); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->preferred_display_mode->H264_codec.max_vres); + WFD_SKIP_SPACE (v); + } + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_STANDBY_RESUME_CAPABILITY)) { + msg->standby_resume_capability = g_new0 (GstWFDStandbyResumeCapability, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + if (!g_strcmp0 (v, "supported")) + msg->standby_resume_capability->standby_resume_cap = TRUE; + else + msg->standby_resume_capability->standby_resume_cap = FALSE; + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_STANDBY)) { + msg->standby = g_new0 (GstWFDStandby, 1); + msg->standby->wfd_standby = TRUE; + } else if (!g_strcmp0 (attr, GST_STRING_WFD_CONNECTOR_TYPE)) { + msg->connector_type = g_new0 (GstWFDConnectorType, 1); + if (strlen (v)) { + msg->connector_type->supported = TRUE; + WFD_SKIP_SPACE (v); + WFD_READ_UINT32 (msg->connector_type->connector_type); + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD_IDR_REQUEST)) { + msg->idr_request = g_new0 (GstWFDIdrRequest, 1); + msg->idr_request->idr_request = TRUE; + } else if (!g_strcmp0 (attr, GST_STRING_WFD2_DIRECT_STREAMING_MODE)) { + msg->direct_mode = g_new0 (GstWFD2DirectStreamingMode, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + if (!g_strcmp0 (v, "active")) + msg->direct_mode->direct_mode = TRUE; + else + msg->direct_mode->direct_mode = FALSE; + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD2_TRANSPORT_SWITCH)) { + msg->tcp_ports = g_new0 (GstWFDTCPPorts, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + WFD_READ_STRING (msg->tcp_ports->profile); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32_DIGIT (msg->tcp_ports->rtp_port0); + WFD_SKIP_SPACE (v); + WFD_READ_UINT32_DIGIT (msg->tcp_ports->rtp_port1); + WFD_SKIP_SPACE (v); + WFD_READ_STRING (msg->tcp_ports->mode); + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD2_BUFFER_LEN)) { + msg->buf_len = g_new0 (GstWFDBufferLen, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + WFD_READ_UINT32_DIGIT (msg->buf_len->buf_len); + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD2_AUDIO_STATUS)) { + msg->audio_status = g_new0 (GstWFDAudioReport, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + WFD_READ_UINT32_DIGIT (msg->audio_status->aud_bufsize); + WFD_SKIP_SPACE (v); + WFD_READ_UINT64_DIGIT (msg->audio_status->aud_pts); + } + } else if (!g_strcmp0 (attr, GST_STRING_WFD2_VIDEO_STATUS)) { + msg->video_status = g_new0 (GstWFDVideoReport, 1); + if (strlen (v)) { + WFD_SKIP_SPACE (v); + WFD_READ_UINT32_DIGIT (msg->video_status->vid_bufsize); + WFD_SKIP_SPACE (v); + WFD_READ_UINT64_DIGIT (msg->video_status->vid_pts); + } + } + return; +} + +/** + * gst_wfd_message_parse_buffer: + * @data: the start of the buffer + * @size: the size of the buffer + * @msg: the result #GstSDPMessage + * + * Parse the contents of @size bytes pointed to by @data and store the result in + * @msg. + * + * Returns: #GST_SDP_OK on success. + */ +GstWFDResult +gst_wfd_message_parse_buffer (const guint8 * data, guint size, + GstWFDMessage * msg) +{ + gchar *p; + gchar buffer[255] = { 0 }; + guint idx = 0; + + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (data != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (size != 0, GST_WFD_EINVAL); + + p = (gchar *) data; + while (TRUE) { + + if (*p == '\0') + break; + + idx = 0; + while (*p != '\n' && *p != '\r' && *p != '\0') { + if (idx < sizeof (buffer) - 1) + buffer[idx++] = *p; + p++; + } + buffer[idx] = '\0'; + gst_wfd_parse_attribute (buffer, msg); + + if (*p == '\0') + break; + p += 2; + } + return GST_WFD_OK; +} + +/** + * gst_wfd_message_free: + * @msg: a #GstWFDMessage + * + * Free all resources allocated by @msg. @msg should not be used anymore after + * this function. This function should be used when @msg was dynamically + * allocated with gst_wfd_message_new(). + * + * Returns: a #GstWFDResult. + */ +GstWFDResult +gst_wfd_message_free (GstWFDMessage * msg) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + gst_wfd_message_uninit (msg); + g_free (msg); + msg = NULL; + + return GST_WFD_OK; +} + +/** + * gst_wfd_message_as_text: + * @msg: a #GstWFDMessage + * + * Convert the contents of @msg to a text string. + * + * Returns: A dynamically allocated string representing the WFD description. + */ +gchar * +gst_wfd_message_as_text (const GstWFDMessage * msg) +{ + /* change all vars so they match rfc? */ + GString *lines; + guint i; + + g_return_val_if_fail (msg != NULL, NULL); + + lines = g_string_new (""); + + /* list of audio codecs */ + if (msg->audio_codecs) { + g_string_append_printf (lines, GST_STRING_WFD_AUDIO_CODECS); + g_string_append_printf (lines, ":"); + if (msg->audio_codecs->list) { + for (i = 0; i < msg->audio_codecs->count; i++) { + g_string_append_printf (lines, " %s", + msg->audio_codecs->list[i].audio_format); + g_string_append_printf (lines, " %08x", + msg->audio_codecs->list[i].modes); + g_string_append_printf (lines, " %02x", + msg->audio_codecs->list[i].latency); + if ((i + 1) < msg->audio_codecs->count) + g_string_append_printf (lines, ","); + } + } else { + g_string_append_printf (lines, " none"); + } + g_string_append_printf (lines, "\r\n"); + } + + /* list of video codecs */ + if (msg->video_formats) { + g_string_append_printf (lines, GST_STRING_WFD_VIDEO_FORMATS); + g_string_append_printf (lines, ":"); + if (msg->video_formats->list) { + g_string_append_printf (lines, " %02x", msg->video_formats->list->native); + g_string_append_printf (lines, " %02x", + msg->video_formats->list->preferred_display_mode_supported); + g_string_append_printf (lines, " %02x", + msg->video_formats->list->H264_codec.profile); + g_string_append_printf (lines, " %02x", + msg->video_formats->list->H264_codec.level); + g_string_append_printf (lines, " %08x", + msg->video_formats->list->H264_codec.misc_params.CEA_Support); + g_string_append_printf (lines, " %08x", + msg->video_formats->list->H264_codec.misc_params.VESA_Support); + g_string_append_printf (lines, " %08x", + msg->video_formats->list->H264_codec.misc_params.HH_Support); + g_string_append_printf (lines, " %02x", + msg->video_formats->list->H264_codec.misc_params.latency); + g_string_append_printf (lines, " %04x", + msg->video_formats->list->H264_codec.misc_params.min_slice_size); + g_string_append_printf (lines, " %04x", + msg->video_formats->list->H264_codec.misc_params.slice_enc_params); + g_string_append_printf (lines, " %02x", + msg->video_formats->list->H264_codec.misc_params. + frame_rate_control_support); + + if (msg->video_formats->list->H264_codec.max_hres) + g_string_append_printf (lines, " %04x", + msg->video_formats->list->H264_codec.max_hres); + else + g_string_append_printf (lines, " none"); + + if (msg->video_formats->list->H264_codec.max_vres) + g_string_append_printf (lines, " %04x", + msg->video_formats->list->H264_codec.max_vres); + else + g_string_append_printf (lines, " none"); + } else { + g_string_append_printf (lines, " none"); + } + g_string_append_printf (lines, "\r\n"); + } + + /* list of wfd2 audio codecs */ + if (msg->wfd2_audio_codecs) { + g_string_append_printf (lines, GST_STRING_WFD2_AUDIO_CODECS); + if (msg->wfd2_audio_codecs->list) { + g_string_append_printf (lines, ":"); + for (i = 0; i < msg->wfd2_audio_codecs->count; i++) { + g_string_append_printf (lines, " %s", + msg->wfd2_audio_codecs->list[i].audio_format); + g_string_append_printf (lines, " %08x", + msg->wfd2_audio_codecs->list[i].modes); + g_string_append_printf (lines, " %02x", + msg->wfd2_audio_codecs->list[i].latency); + if ((i + 1) < msg->wfd2_audio_codecs->count) + g_string_append_printf (lines, ","); + } + } + g_string_append_printf (lines, "\r\n"); + } + + /* list of direct video codecs */ + if (msg->direct_video_formats) { + g_string_append_printf (lines, GST_STRING_WFD2_VIDEO_FORMATS); + if (msg->direct_video_formats->list) { + g_string_append_printf (lines, ":"); + g_string_append_printf (lines, " %02x", msg->direct_video_formats->list->native); + g_string_append_printf (lines, " %02x", + msg->direct_video_formats->list->preferred_display_mode_supported); + g_string_append_printf (lines, " %02x", + msg->direct_video_formats->list->H264_codec.profile); + g_string_append_printf (lines, " %02x", + msg->direct_video_formats->list->H264_codec.level); + g_string_append_printf (lines, " %08x", + msg->direct_video_formats->list->H264_codec.misc_params.CEA_Support); + g_string_append_printf (lines, " %08x", + msg->direct_video_formats->list->H264_codec.misc_params.VESA_Support); + g_string_append_printf (lines, " %08x", + msg->direct_video_formats->list->H264_codec.misc_params.HH_Support); + g_string_append_printf (lines, " %02x", + msg->direct_video_formats->list->H264_codec.misc_params.latency); + g_string_append_printf (lines, " %04x", + msg->direct_video_formats->list->H264_codec.misc_params.min_slice_size); + g_string_append_printf (lines, " %04x", + msg->direct_video_formats->list->H264_codec.misc_params.slice_enc_params); + g_string_append_printf (lines, " %02x", + msg->direct_video_formats->list->H264_codec. + misc_params.frame_rate_control_support); + + if (msg->direct_video_formats->list->H264_codec.max_hres) + g_string_append_printf (lines, " %04x", + msg->direct_video_formats->list->H264_codec.max_hres); + else + g_string_append_printf (lines, " none"); + + if (msg->direct_video_formats->list->H264_codec.max_vres) + g_string_append_printf (lines, " %04x", + msg->direct_video_formats->list->H264_codec.max_vres); + else + g_string_append_printf (lines, " none"); + } + g_string_append_printf (lines, "\r\n"); + } + /* list of video 3D codecs */ + if (msg->video_3d_formats) { + g_string_append_printf (lines, GST_STRING_WFD_3D_VIDEO_FORMATS); + g_string_append_printf (lines, ":"); + if (msg->video_3d_formats->list) { + g_string_append_printf (lines, " %02x", + msg->video_3d_formats->list->native); + g_string_append_printf (lines, " %02x", + msg->video_3d_formats->list->preferred_display_mode_supported); + g_string_append_printf (lines, " %02x", + msg->video_3d_formats->list->H264_codec.profile); + g_string_append_printf (lines, " %02x", + msg->video_3d_formats->list->H264_codec.level); + g_string_append_printf (lines, " %16x", + msg->video_3d_formats->list->H264_codec.misc_params. + video_3d_capability); + g_string_append_printf (lines, " %02x", + msg->video_3d_formats->list->H264_codec.misc_params.latency); + g_string_append_printf (lines, " %04x", + msg->video_3d_formats->list->H264_codec.misc_params.min_slice_size); + g_string_append_printf (lines, " %04x", + msg->video_3d_formats->list->H264_codec.misc_params.slice_enc_params); + g_string_append_printf (lines, " %02x", + msg->video_3d_formats->list->H264_codec.misc_params. + frame_rate_control_support); + if (msg->video_3d_formats->list->H264_codec.max_hres) + g_string_append_printf (lines, " %04x", + msg->video_3d_formats->list->H264_codec.max_hres); + else + g_string_append_printf (lines, " none"); + if (msg->video_3d_formats->list->H264_codec.max_vres) + g_string_append_printf (lines, " %04x", + msg->video_3d_formats->list->H264_codec.max_vres); + else + g_string_append_printf (lines, " none"); + } else { + g_string_append_printf (lines, " none"); + } + g_string_append_printf (lines, "\r\n"); + } + + if (msg->content_protection) { + g_string_append_printf (lines, GST_STRING_WFD_CONTENT_PROTECTION); + g_string_append_printf (lines, ":"); + if (msg->content_protection->hdcp2_spec) { + if (msg->content_protection->hdcp2_spec->hdcpversion) { + g_string_append_printf (lines, " %s", + msg->content_protection->hdcp2_spec->hdcpversion); + g_string_append_printf (lines, " %s", + msg->content_protection->hdcp2_spec->TCPPort); + } else { + g_string_append_printf (lines, " none"); + } + } else { + g_string_append_printf (lines, " none"); + } + g_string_append_printf (lines, "\r\n"); + } + + if (msg->display_edid) { + g_string_append_printf (lines, GST_STRING_WFD_DISPLAY_EDID); + g_string_append_printf (lines, ":"); + if (msg->display_edid->edid_supported) { + if (msg->display_edid->edid_block_count > 0 && + msg->display_edid->edid_block_count <= EDID_BLOCK_COUNT_MAX_SIZE) { + g_string_append_printf (lines, " %04x", + msg->display_edid->edid_block_count); + g_string_append_printf (lines, " %s", msg->display_edid->edid_payload); + } else + g_string_append_printf (lines, " none"); + } else { + g_string_append_printf (lines, " none"); + } + g_string_append_printf (lines, "\r\n"); + } + + if (msg->coupled_sink) { + g_string_append_printf (lines, GST_STRING_WFD_COUPLED_SINK); + g_string_append_printf (lines, ":"); + if (msg->coupled_sink->coupled_sink_cap && + (gboolean)msg->coupled_sink->coupled_sink_cap->sink_supported == TRUE) { + g_string_append_printf (lines, " %02x", + msg->coupled_sink->coupled_sink_cap->status); + if (msg->coupled_sink->coupled_sink_cap->sink_address) + g_string_append_printf (lines, " %s", + msg->coupled_sink->coupled_sink_cap->sink_address); + else + g_string_append_printf (lines, " none"); + } else { + g_string_append_printf (lines, " none"); + } + g_string_append_printf (lines, "\r\n"); + } + + if (msg->trigger_method) { + g_string_append_printf (lines, GST_STRING_WFD_TRIGGER_METHOD); + g_string_append_printf (lines, ":"); + g_string_append_printf (lines, " %s", + msg->trigger_method->wfd_trigger_method); + g_string_append_printf (lines, "\r\n"); + } + + if (msg->presentation_url) { + g_string_append_printf (lines, GST_STRING_WFD_PRESENTATION_URL); + g_string_append_printf (lines, ":"); + if (msg->presentation_url->wfd_url0) + g_string_append_printf (lines, " %s", msg->presentation_url->wfd_url0); + else + g_string_append_printf (lines, " none"); + if (msg->presentation_url->wfd_url1) + g_string_append_printf (lines, " %s", msg->presentation_url->wfd_url1); + else + g_string_append_printf (lines, " none"); + g_string_append_printf (lines, "\r\n"); + } + + if (msg->client_rtp_ports) { + g_string_append_printf (lines, GST_STRING_WFD_CLIENT_RTP_PORTS); + if (msg->client_rtp_ports->profile) { + g_string_append_printf (lines, ":"); + g_string_append_printf (lines, " %s", msg->client_rtp_ports->profile); + g_string_append_printf (lines, " %d", msg->client_rtp_ports->rtp_port0); + g_string_append_printf (lines, " %d", msg->client_rtp_ports->rtp_port1); + g_string_append_printf (lines, " %s", msg->client_rtp_ports->mode); + } + g_string_append_printf (lines, "\r\n"); + } + + if (msg->route) { + g_string_append_printf (lines, GST_STRING_WFD_ROUTE); + g_string_append_printf (lines, ":"); + g_string_append_printf (lines, " %s", msg->route->destination); + g_string_append_printf (lines, "\r\n"); + } + + if (msg->I2C) { + g_string_append_printf (lines, GST_STRING_WFD_I2C); + g_string_append_printf (lines, ":"); + if (msg->I2C->I2CPresent) + g_string_append_printf (lines, " %x", msg->I2C->I2C_port); + else + g_string_append_printf (lines, " none"); + g_string_append_printf (lines, "\r\n"); + } + + if (msg->av_format_change_timing) { + g_string_append_printf (lines, GST_STRING_WFD_AV_FORMAT_CHANGE_TIMING); + g_string_append_printf (lines, ":"); - g_string_append_printf (lines, " %010llx", ++ g_string_append_printf (lines, " %010" G_GUINT64_FORMAT, + msg->av_format_change_timing->PTS); - g_string_append_printf (lines, " %010llx", ++ g_string_append_printf (lines, " %010" G_GUINT64_FORMAT, + msg->av_format_change_timing->DTS); + g_string_append_printf (lines, "\r\n"); + } + + if (msg->preferred_display_mode) { + g_string_append_printf (lines, GST_STRING_WFD_PREFERRED_DISPLAY_MODE); + g_string_append_printf (lines, ":"); + if (msg->preferred_display_mode->displaymodesupported) { - g_string_append_printf (lines, " %06llx", ++ g_string_append_printf (lines, " %06" G_GUINT64_FORMAT, + msg->preferred_display_mode->p_clock); + g_string_append_printf (lines, " %04x", msg->preferred_display_mode->H); + g_string_append_printf (lines, " %04x", msg->preferred_display_mode->HB); + g_string_append_printf (lines, " %04x", + msg->preferred_display_mode->HSPOL_HSOFF); + g_string_append_printf (lines, " %04x", msg->preferred_display_mode->HSW); + g_string_append_printf (lines, " %04x", msg->preferred_display_mode->V); + g_string_append_printf (lines, " %04x", msg->preferred_display_mode->VB); + g_string_append_printf (lines, " %04x", + msg->preferred_display_mode->VSPOL_VSOFF); + g_string_append_printf (lines, " %04x", msg->preferred_display_mode->VSW); + g_string_append_printf (lines, " %02x", + msg->preferred_display_mode->VBS3D); + g_string_append_printf (lines, " %02x", + msg->preferred_display_mode->V2d_s3d_modes); + g_string_append_printf (lines, " %02x", + msg->preferred_display_mode->P_depth); + } else + g_string_append_printf (lines, " none"); + g_string_append_printf (lines, "\r\n"); + } + + if (msg->standby_resume_capability) { + g_string_append_printf (lines, GST_STRING_WFD_STANDBY_RESUME_CAPABILITY); + g_string_append_printf (lines, ":"); + if (msg->standby_resume_capability->standby_resume_cap) + g_string_append_printf (lines, " supported"); + else + g_string_append_printf (lines, " none"); + g_string_append_printf (lines, "\r\n"); + } + + if (msg->standby) { + g_string_append_printf (lines, GST_STRING_WFD_STANDBY); + g_string_append_printf (lines, "\r\n"); + } + + if (msg->connector_type) { + g_string_append_printf (lines, GST_STRING_WFD_CONNECTOR_TYPE); + g_string_append_printf (lines, ":"); + if (msg->connector_type->connector_type) + g_string_append_printf (lines, " %02x", + msg->connector_type->connector_type); + else + g_string_append_printf (lines, " none"); + g_string_append_printf (lines, "\r\n"); + } + + if (msg->idr_request) { + g_string_append_printf (lines, GST_STRING_WFD_IDR_REQUEST); + g_string_append_printf (lines, "\r\n"); + } + + if (msg->direct_mode && msg->direct_mode->direct_mode) { + g_string_append_printf (lines, GST_STRING_WFD2_DIRECT_STREAMING_MODE); + g_string_append_printf (lines, ":"); + g_string_append_printf (lines, " active"); + g_string_append_printf (lines, "\r\n"); + } + + if (msg->tcp_ports) { + g_string_append_printf (lines, GST_STRING_WFD2_TRANSPORT_SWITCH); + if (msg->tcp_ports->profile) { + g_string_append_printf (lines, ":"); + g_string_append_printf (lines, " %s", msg->tcp_ports->profile); + g_string_append_printf (lines, " %d", msg->tcp_ports->rtp_port0); + g_string_append_printf (lines, " %d", msg->tcp_ports->rtp_port1); + g_string_append_printf (lines, " %s", msg->tcp_ports->mode); + } + g_string_append_printf (lines, "\r\n"); + } + + if (msg->buf_len) { + g_string_append_printf (lines, GST_STRING_WFD2_BUFFER_LEN); + g_string_append_printf (lines, ":"); + g_string_append_printf (lines, " %d", msg->buf_len->buf_len); + g_string_append_printf (lines, "\r\n"); + } + + return g_string_free (lines, FALSE); +} + +gchar * +gst_wfd_message_param_names_as_text (const GstWFDMessage * msg) +{ + /* change all vars so they match rfc? */ + GString *lines; + g_return_val_if_fail (msg != NULL, NULL); + + lines = g_string_new (""); + + /* list of audio codecs */ + if (msg->audio_codecs) { + g_string_append_printf (lines, GST_STRING_WFD_AUDIO_CODECS); + g_string_append_printf (lines, "\r\n"); + } + /* list of video codecs */ + if (msg->video_formats) { + g_string_append_printf (lines, GST_STRING_WFD_VIDEO_FORMATS); + g_string_append_printf (lines, "\r\n"); + } + /* list of wfd2 audio codecs */ + if (msg->wfd2_audio_codecs) { + g_string_append_printf (lines, GST_STRING_WFD2_AUDIO_CODECS); + g_string_append_printf (lines, "\r\n"); + } + /* list of direct video codecs */ + if (msg->direct_video_formats) { + g_string_append_printf (lines, GST_STRING_WFD2_VIDEO_FORMATS); + g_string_append_printf (lines, "\r\n"); + } + /* list of video 3D codecs */ + if (msg->video_3d_formats) { + g_string_append_printf (lines, GST_STRING_WFD_3D_VIDEO_FORMATS); + g_string_append_printf (lines, "\r\n"); + } + if (msg->content_protection) { + g_string_append_printf (lines, GST_STRING_WFD_CONTENT_PROTECTION); + g_string_append_printf (lines, "\r\n"); + } + if (msg->display_edid) { + g_string_append_printf (lines, GST_STRING_WFD_DISPLAY_EDID); + g_string_append_printf (lines, "\r\n"); + } + if (msg->coupled_sink) { + g_string_append_printf (lines, GST_STRING_WFD_COUPLED_SINK); + g_string_append_printf (lines, "\r\n"); + } + if (msg->trigger_method) { + g_string_append_printf (lines, GST_STRING_WFD_TRIGGER_METHOD); + g_string_append_printf (lines, "\r\n"); + } + if (msg->presentation_url) { + g_string_append_printf (lines, GST_STRING_WFD_PRESENTATION_URL); + g_string_append_printf (lines, "\r\n"); + } + if (msg->client_rtp_ports) { + g_string_append_printf (lines, GST_STRING_WFD_CLIENT_RTP_PORTS); + g_string_append_printf (lines, "\r\n"); + } + if (msg->route) { + g_string_append_printf (lines, GST_STRING_WFD_ROUTE); + g_string_append_printf (lines, "\r\n"); + } + if (msg->I2C) { + g_string_append_printf (lines, GST_STRING_WFD_I2C); + g_string_append_printf (lines, "\r\n"); + } + if (msg->av_format_change_timing) { + g_string_append_printf (lines, GST_STRING_WFD_AV_FORMAT_CHANGE_TIMING); + g_string_append_printf (lines, "\r\n"); + } + if (msg->preferred_display_mode) { + g_string_append_printf (lines, GST_STRING_WFD_PREFERRED_DISPLAY_MODE); + g_string_append_printf (lines, "\r\n"); + } + if (msg->standby_resume_capability) { + g_string_append_printf (lines, GST_STRING_WFD_STANDBY_RESUME_CAPABILITY); + g_string_append_printf (lines, "\r\n"); + } + if (msg->standby) { + g_string_append_printf (lines, GST_STRING_WFD_STANDBY); + g_string_append_printf (lines, "\r\n"); + } + if (msg->connector_type) { + g_string_append_printf (lines, GST_STRING_WFD_CONNECTOR_TYPE); + g_string_append_printf (lines, "\r\n"); + } + if (msg->idr_request) { + g_string_append_printf (lines, GST_STRING_WFD_IDR_REQUEST); + g_string_append_printf (lines, "\r\n"); + } + if (msg->tcp_ports) { + g_string_append_printf (lines, GST_STRING_WFD2_TRANSPORT_SWITCH); + g_string_append_printf (lines, "\r\n"); + } + if (msg->buf_len) { + g_string_append_printf (lines, GST_STRING_WFD2_BUFFER_LEN); + g_string_append_printf (lines, "\r\n"); + } + if (msg->audio_status) { + g_string_append_printf (lines, GST_STRING_WFD2_AUDIO_STATUS); + g_string_append_printf (lines, "\r\n"); + } + if (msg->video_status) { + g_string_append_printf (lines, GST_STRING_WFD2_VIDEO_STATUS); + g_string_append_printf (lines, "\r\n"); + } + + return g_string_free (lines, FALSE); +} + +/** + * gst_wfd_message_dump: + * @msg: a #GstWFDMessage + * + * Dump the parsed contents of @msg to stdout. + * + * Returns: a #GstWFDResult. + */ +GstWFDResult +gst_wfd_message_dump (const GstWFDMessage * msg) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (msg->audio_codecs) { + guint i = 0; + g_print ("Audio supported formats : \n"); + for (; i < msg->audio_codecs->count; i++) { + g_print ("Codec: %s\n", msg->audio_codecs->list[i].audio_format); + if (!strcmp (msg->audio_codecs->list[i].audio_format, "LPCM")) { + if (msg->audio_codecs->list[i].modes & GST_WFD_FREQ_44100) + g_print (" Freq: %d\n", 44100); + if (msg->audio_codecs->list[i].modes & GST_WFD_FREQ_48000) + g_print (" Freq: %d\n", 48000); + g_print (" Channels: %d\n", 2); + } + if (!strcmp (msg->audio_codecs->list[i].audio_format, "AAC")) { + g_print (" Freq: %d\n", 48000); + if (msg->audio_codecs->list[i].modes & GST_WFD_CHANNEL_2) + g_print (" Channels: %d\n", 2); + if (msg->audio_codecs->list[i].modes & GST_WFD_CHANNEL_4) + g_print (" Channels: %d\n", 4); + if (msg->audio_codecs->list[i].modes & GST_WFD_CHANNEL_6) + g_print (" Channels: %d\n", 6); + if (msg->audio_codecs->list[i].modes & GST_WFD_CHANNEL_8) + g_print (" Channels: %d\n", 8); + } + if (!strcmp (msg->audio_codecs->list[i].audio_format, "AC3")) { + g_print (" Freq: %d\n", 48000); + if (msg->audio_codecs->list[i].modes & GST_WFD_CHANNEL_2) + g_print (" Channels: %d\n", 2); + if (msg->audio_codecs->list[i].modes & GST_WFD_CHANNEL_4) + g_print (" Channels: %d\n", 4); + if (msg->audio_codecs->list[i].modes & GST_WFD_CHANNEL_6) + g_print (" Channels: %d\n", 6); + } + g_print (" Bitwidth: %d\n", 16); + g_print (" Latency: %d\n", msg->audio_codecs->list[i].latency); + } + } + + + if (msg->video_formats) { + g_print ("Video supported formats : \n"); + if (msg->video_formats->list) { + guint nativeindex = 0; + g_print ("Codec: H264\n"); + if ((msg->video_formats->list->native & 0x7) == + GST_WFD_VIDEO_CEA_RESOLUTION) { + g_print (" Native type: CEA\n"); + } else if ((msg->video_formats->list->native & 0x7) == + GST_WFD_VIDEO_VESA_RESOLUTION) { + g_print (" Native type: VESA\n"); + } else if ((msg->video_formats->list->native & 0x7) == + GST_WFD_VIDEO_HH_RESOLUTION) { + g_print (" Native type: HH\n"); + } + nativeindex = msg->video_formats->list->native >> 3; + g_print (" Resolution: %d\n", (1 << nativeindex)); + + if (msg->video_formats->list->H264_codec. + profile & GST_WFD_H264_BASE_PROFILE) { + g_print (" Profile: BASE\n"); + } else if (msg->video_formats->list->H264_codec. + profile & GST_WFD_H264_HIGH_PROFILE) { + g_print (" Profile: HIGH\n"); + } + if (msg->video_formats->list->H264_codec.level & GST_WFD_H264_LEVEL_3_1) { + g_print (" Level: 3.1\n"); + } else if (msg->video_formats->list->H264_codec. + level & GST_WFD_H264_LEVEL_3_2) { + g_print (" Level: 3.2\n"); + } else if (msg->video_formats->list->H264_codec. + level & GST_WFD_H264_LEVEL_4) { + g_print (" Level: 4\n"); + } else if (msg->video_formats->list->H264_codec. + level & GST_WFD_H264_LEVEL_4_1) { + g_print (" Level: 4.1\n"); + } else if (msg->video_formats->list->H264_codec. + level & GST_WFD_H264_LEVEL_4_2) { + g_print (" Level: 4.2\n"); + } + g_print (" Latency: %d\n", + msg->video_formats->list->H264_codec.misc_params.latency); + g_print (" min_slice_size: %x\n", + msg->video_formats->list->H264_codec.misc_params.min_slice_size); + g_print (" slice_enc_params: %x\n", + msg->video_formats->list->H264_codec.misc_params.slice_enc_params); + g_print (" frame_rate_control_support: %x\n", + msg->video_formats->list->H264_codec.misc_params. + frame_rate_control_support); + if (msg->video_formats->list->H264_codec.max_hres) { + g_print (" Max Width(horizontal resolution): %04d\n", + msg->video_formats->list->H264_codec.max_hres); + } + if (msg->video_formats->list->H264_codec.max_vres) { + g_print (" Max Height(vertical resolution): %04d\n", + msg->video_formats->list->H264_codec.max_vres); + } + } + } + + if (msg->wfd2_audio_codecs) { + guint i = 0; + g_print ("Audio supported codecs for R2 : \n"); + for (; i < msg->wfd2_audio_codecs->count; i++) { + g_print ("Codec: %s\n", msg->wfd2_audio_codecs->list[i].audio_format); + if (!strcmp (msg->wfd2_audio_codecs->list[i].audio_format, "LPCM")) { + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_FREQ_44100) + g_print (" Freq: %d\n", 44100); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_FREQ_48000) + g_print (" Freq: %d\n", 48000); + g_print (" Channels: %d\n", 2); + } + if (!strcmp (msg->wfd2_audio_codecs->list[i].audio_format, "AAC")) { + g_print (" Freq: %d\n", 48000); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_2) + g_print (" Channels: %d\n", 2); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_4) + g_print (" Channels: %d\n", 4); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_6) + g_print (" Channels: %d\n", 6); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_8) + g_print (" Channels: %d\n", 8); + } + if (!strcmp (msg->wfd2_audio_codecs->list[i].audio_format, "AC3")) { + g_print (" Freq: %d\n", 48000); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_2) + g_print (" Channels: %d\n", 2); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_4) + g_print (" Channels: %d\n", 4); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_6) + g_print (" Channels: %d\n", 6); + } + if (!strcmp (msg->wfd2_audio_codecs->list[i].audio_format, "CTA")) { + g_print (" Freq: %d\n", 48000); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_2) + g_print (" Channels: %d\n", 2); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_4) + g_print (" Channels: %d\n", 4); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_6) + g_print (" Channels: %d\n", 6); + } + if (!strcmp (msg->wfd2_audio_codecs->list[i].audio_format, "AAC-ELDv2")) { + g_print (" Freq: %d\n", 48000); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_2) + g_print (" Channels: %d\n", 2); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_4) + g_print (" Channels: %d\n", 4); + if (msg->wfd2_audio_codecs->list[i].modes & GST_WFD_CHANNEL_6) + g_print (" Channels: %d\n", 6); + } + g_print (" Bitwidth: %d\n", 16); + g_print (" Latency: %d\n", msg->wfd2_audio_codecs->list[i].latency); + } + } + + + if (msg->direct_video_formats) { + g_print ("Video supported formats for direct streaming : \n"); + if (msg->direct_video_formats->list) { + guint nativeindex = 0; + g_print ("Codec: H264\n"); + if ((msg->direct_video_formats->list->native & 0x7) == + GST_WFD_VIDEO_CEA_RESOLUTION) { + g_print (" Native type: CEA\n"); + } else if ((msg->direct_video_formats->list->native & 0x7) == + GST_WFD_VIDEO_VESA_RESOLUTION) { + g_print (" Native type: VESA\n"); + } else if ((msg->direct_video_formats->list->native & 0x7) == + GST_WFD_VIDEO_HH_RESOLUTION) { + g_print (" Native type: HH\n"); + } + nativeindex = msg->direct_video_formats->list->native >> 3; + g_print (" Resolution: %d\n", (1 << nativeindex)); + + if (msg->direct_video_formats->list-> + H264_codec.profile & GST_WFD_H264_BASE_PROFILE) { + g_print (" Profile: BASE\n"); + } else if (msg->direct_video_formats->list-> + H264_codec.profile & GST_WFD_H264_HIGH_PROFILE) { + g_print (" Profile: HIGH\n"); + } + if (msg->direct_video_formats->list->H264_codec.level & GST_WFD_H264_LEVEL_3_1) { + g_print (" Level: 3.1\n"); + } else if (msg->direct_video_formats->list-> + H264_codec.level & GST_WFD_H264_LEVEL_3_2) { + g_print (" Level: 3.2\n"); + } else if (msg->direct_video_formats->list-> + H264_codec.level & GST_WFD_H264_LEVEL_4) { + g_print (" Level: 4\n"); + } else if (msg->direct_video_formats->list-> + H264_codec.level & GST_WFD_H264_LEVEL_4_1) { + g_print (" Level: 4.1\n"); + } else if (msg->direct_video_formats->list-> + H264_codec.level & GST_WFD_H264_LEVEL_4_2) { + g_print (" Level: 4.2\n"); + } + g_print (" Latency: %d\n", + msg->direct_video_formats->list->H264_codec.misc_params.latency); + g_print (" min_slice_size: %x\n", + msg->direct_video_formats->list->H264_codec.misc_params.min_slice_size); + g_print (" slice_enc_params: %x\n", + msg->direct_video_formats->list->H264_codec.misc_params.slice_enc_params); + g_print (" frame_rate_control_support: %x\n", + msg->direct_video_formats->list->H264_codec. + misc_params.frame_rate_control_support); + if (msg->direct_video_formats->list->H264_codec.max_hres) { + g_print (" Max Width(horizontal resolution): %04d\n", + msg->direct_video_formats->list->H264_codec.max_hres); + } + if (msg->direct_video_formats->list->H264_codec.max_vres) { + g_print (" Max height(vertical resolution): %04d\n", + msg->direct_video_formats->list->H264_codec.max_vres); + } + } + } + + if (msg->video_3d_formats) { + g_print ("wfd_3d_formats"); + g_print ("\r\n"); + } + + if (msg->content_protection) { + g_print (GST_STRING_WFD_CONTENT_PROTECTION); + g_print ("\r\n"); + } + + if (msg->display_edid) { + g_print (GST_STRING_WFD_DISPLAY_EDID); + g_print ("\r\n"); + } + + if (msg->coupled_sink) { + g_print (GST_STRING_WFD_COUPLED_SINK); + g_print ("\r\n"); + } + + if (msg->trigger_method) { + g_print (" Trigger type: %s\n", msg->trigger_method->wfd_trigger_method); + } + + if (msg->presentation_url) { + g_print (GST_STRING_WFD_PRESENTATION_URL); + g_print ("\r\n"); + } + + if (msg->client_rtp_ports) { + g_print (" Client RTP Ports : \n"); + if (msg->client_rtp_ports->profile) { + g_print ("%s\n", msg->client_rtp_ports->profile); + g_print (" %d\n", msg->client_rtp_ports->rtp_port0); + g_print (" %d\n", msg->client_rtp_ports->rtp_port1); + g_print (" %s\n", msg->client_rtp_ports->mode); + } + g_print ("\r\n"); + } + + if (msg->route) { + g_print (GST_STRING_WFD_ROUTE); + g_print ("\r\n"); + } + + if (msg->I2C) { + g_print (GST_STRING_WFD_I2C); + g_print ("\r\n"); + } + + if (msg->av_format_change_timing) { + g_print (GST_STRING_WFD_AV_FORMAT_CHANGE_TIMING); + g_print ("\r\n"); + } + + if (msg->preferred_display_mode) { + g_print (GST_STRING_WFD_PREFERRED_DISPLAY_MODE); + g_print ("\r\n"); + } + + if (msg->standby_resume_capability) { + g_print (GST_STRING_WFD_STANDBY_RESUME_CAPABILITY); + g_print ("\r\n"); + } + + if (msg->standby) { + g_print (GST_STRING_WFD_STANDBY); + g_print ("\r\n"); + } + + if (msg->connector_type) { + g_print (GST_STRING_WFD_CONNECTOR_TYPE); + g_print ("\r\n"); + } + + if (msg->idr_request) { + g_print (GST_STRING_WFD_IDR_REQUEST); + g_print ("\r\n"); + } + + if (msg->direct_mode) { + g_print (GST_STRING_WFD2_DIRECT_STREAMING_MODE); + g_print ("\r\n"); + } + + if (msg->tcp_ports) { + g_print (" TCP Ports : \n"); + if (msg->tcp_ports->profile) { + g_print ("%s\n", msg->tcp_ports->profile); + g_print (" %d\n", msg->tcp_ports->rtp_port0); + g_print (" %d\n", msg->tcp_ports->rtp_port1); + g_print (" %s\n", msg->tcp_ports->mode); + } + g_print ("\r\n"); + } + + if (msg->buf_len) { + g_print (" Buffer Length : %d\n", msg->buf_len->buf_len); + g_print ("\r\n"); + } + + if (msg->audio_status) { + g_print ("Audio Playback Status : \n"); + g_print (" Current audio buffer size : %d\n", msg->audio_status->aud_bufsize); - g_print (" Current audio decoded PTS : %lld\n", msg->audio_status->aud_pts); ++ g_print (" Current audio decoded PTS : %" G_GUINT64_FORMAT "\n", ++ msg->audio_status->aud_pts); + g_print ("\r\n"); + } + + if (msg->video_status) { + g_print ("Video Playback Status : \n"); + g_print (" Current video buffer size : %d\n", msg->video_status->vid_bufsize); - g_print (" Current video decoded PTS : %lld\n", msg->video_status->vid_pts); ++ g_print (" Current video decoded PTS : %" G_GUINT64_FORMAT "\n", ++ msg->video_status->vid_pts); + g_print ("\r\n"); + } + + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_supported_audio_format (GstWFDMessage * msg, + GstWFDAudioFormats a_codec, + guint a_freq, guint a_channels, guint a_bitwidth, guint32 a_latency) +{ + guint i = 0; + guint pcm = 0, aac = 0, ac3 = 0; + + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->audio_codecs) + msg->audio_codecs = g_new0 (GstWFDAudioCodeclist, 1); + + if (a_codec != GST_WFD_AUDIO_UNKNOWN) { + + if (a_codec & GST_WFD_AUDIO_LPCM) + msg->audio_codecs->count++; + if (a_codec & GST_WFD_AUDIO_AAC) + msg->audio_codecs->count++; + if (a_codec & GST_WFD_AUDIO_AC3) + msg->audio_codecs->count++; + + msg->audio_codecs->list = + g_new0 (GstWFDAudioCodec, msg->audio_codecs->count); + for (; i < msg->audio_codecs->count; i++) { + if ((a_codec & GST_WFD_AUDIO_LPCM) && (!pcm)) { + msg->audio_codecs->list[i].audio_format = g_strdup ("LPCM"); + msg->audio_codecs->list[i].modes = a_freq; + msg->audio_codecs->list[i].latency = a_latency; + pcm = 1; + } else if ((a_codec & GST_WFD_AUDIO_AAC) && (!aac)) { + msg->audio_codecs->list[i].audio_format = g_strdup ("AAC"); + msg->audio_codecs->list[i].modes = a_channels; + msg->audio_codecs->list[i].latency = a_latency; + aac = 1; + } else if ((a_codec & GST_WFD_AUDIO_AC3) && (!ac3)) { + msg->audio_codecs->list[i].audio_format = g_strdup ("AC3"); + msg->audio_codecs->list[i].modes = a_channels; + msg->audio_codecs->list[i].latency = a_latency; + ac3 = 1; + } + } + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_preferred_audio_format (GstWFDMessage * msg, + GstWFDAudioFormats a_codec, + GstWFDAudioFreq a_freq, + GstWFDAudioChannels a_channels, guint a_bitwidth, guint32 a_latency) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->audio_codecs) + msg->audio_codecs = g_new0 (GstWFDAudioCodeclist, 1); + + if (a_codec == GST_WFD_AUDIO_UNKNOWN) { + msg->audio_codecs->list = NULL; + msg->audio_codecs->count = 0; + return GST_WFD_OK; + } + msg->audio_codecs->list = g_new0 (GstWFDAudioCodec, 1); + msg->audio_codecs->count = 1; + if (a_codec == GST_WFD_AUDIO_LPCM) { + msg->audio_codecs->list->audio_format = g_strdup ("LPCM"); + msg->audio_codecs->list->modes = a_freq; + msg->audio_codecs->list->latency = a_latency; + } else if (a_codec == GST_WFD_AUDIO_AAC) { + msg->audio_codecs->list->audio_format = g_strdup ("AAC"); + msg->audio_codecs->list->modes = a_channels; + msg->audio_codecs->list->latency = a_latency; + } else if (a_codec == GST_WFD_AUDIO_AC3) { + msg->audio_codecs->list->audio_format = g_strdup ("AC3"); + msg->audio_codecs->list->modes = a_channels; + msg->audio_codecs->list->latency = a_latency; + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_supported_audio_format (GstWFDMessage * msg, + guint * a_codec, + guint * a_freq, guint * a_channels, guint * a_bitwidth, guint32 * a_latency) +{ + guint i = 0; + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (msg->audio_codecs != NULL, GST_WFD_EINVAL); + + for (; i < msg->audio_codecs->count; i++) { + if (!g_strcmp0 (msg->audio_codecs->list[i].audio_format, "LPCM")) { + *a_codec |= GST_WFD_AUDIO_LPCM; + *a_freq |= msg->audio_codecs->list[i].modes; + *a_channels |= GST_WFD_CHANNEL_2; + *a_bitwidth = 16; + *a_latency = msg->audio_codecs->list[i].latency; + } else if (!g_strcmp0 (msg->audio_codecs->list[i].audio_format, "AAC")) { + *a_codec |= GST_WFD_AUDIO_AAC; + *a_freq |= GST_WFD_FREQ_48000; + *a_channels |= msg->audio_codecs->list[i].modes; + *a_bitwidth = 16; + *a_latency = msg->audio_codecs->list[i].latency; + } else if (!g_strcmp0 (msg->audio_codecs->list[i].audio_format, "AC3")) { + *a_codec |= GST_WFD_AUDIO_AC3; + *a_freq |= GST_WFD_FREQ_48000; + *a_channels |= msg->audio_codecs->list[i].modes; + *a_bitwidth = 16; + *a_latency = msg->audio_codecs->list[i].latency; + } + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_preferred_audio_format (GstWFDMessage * msg, + GstWFDAudioFormats * a_codec, + GstWFDAudioFreq * a_freq, + GstWFDAudioChannels * a_channels, guint * a_bitwidth, guint32 * a_latency) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!g_strcmp0 (msg->audio_codecs->list->audio_format, "LPCM")) { + *a_codec = GST_WFD_AUDIO_LPCM; + *a_freq = msg->audio_codecs->list->modes; + *a_channels = GST_WFD_CHANNEL_2; + *a_bitwidth = 16; + *a_latency = msg->audio_codecs->list->latency; + } else if (!g_strcmp0 (msg->audio_codecs->list->audio_format, "AAC")) { + *a_codec = GST_WFD_AUDIO_AAC; + *a_freq = GST_WFD_FREQ_48000; + *a_channels = msg->audio_codecs->list->modes; + *a_bitwidth = 16; + *a_latency = msg->audio_codecs->list->latency; + } else if (!g_strcmp0 (msg->audio_codecs->list->audio_format, "AC3")) { + *a_codec = GST_WFD_AUDIO_AC3; + *a_freq = GST_WFD_FREQ_48000; + *a_channels = msg->audio_codecs->list->modes; + *a_bitwidth = 16; + *a_latency = msg->audio_codecs->list->latency; + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_supported_video_format (GstWFDMessage * msg, + GstWFDVideoCodecs v_codec, + GstWFDVideoNativeResolution v_native, + guint64 v_native_resolution, + guint64 v_cea_resolution, + guint64 v_vesa_resolution, + guint64 v_hh_resolution, + guint v_profile, + guint v_level, + guint32 v_latency, + guint32 v_max_height, + guint32 v_max_width, + guint32 min_slice_size, guint32 slice_enc_params, guint frame_rate_control) +{ + guint nativeindex = 0; + guint64 temp = v_native_resolution; + + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->video_formats) + msg->video_formats = g_new0 (GstWFDVideoCodeclist, 1); + + if (v_codec != GST_WFD_VIDEO_UNKNOWN) { + msg->video_formats->list = g_new0 (GstWFDVideoCodec, 1); + + temp >>= 1; + while (temp) { + nativeindex++; + temp >>= 1; + } + + msg->video_formats->list->native = nativeindex; + msg->video_formats->list->native <<= 3; + + if (v_native == GST_WFD_VIDEO_VESA_RESOLUTION) + msg->video_formats->list->native |= 1; + else if (v_native == GST_WFD_VIDEO_HH_RESOLUTION) + msg->video_formats->list->native |= 2; + + msg->video_formats->list->preferred_display_mode_supported = 1; + msg->video_formats->list->H264_codec.profile = v_profile; + msg->video_formats->list->H264_codec.level = v_level; + msg->video_formats->list->H264_codec.max_hres = v_max_width; + msg->video_formats->list->H264_codec.max_vres = v_max_height; + msg->video_formats->list->H264_codec.misc_params.CEA_Support = + v_cea_resolution; + msg->video_formats->list->H264_codec.misc_params.VESA_Support = + v_vesa_resolution; + msg->video_formats->list->H264_codec.misc_params.HH_Support = + v_hh_resolution; + msg->video_formats->list->H264_codec.misc_params.latency = v_latency; + msg->video_formats->list->H264_codec.misc_params.min_slice_size = + min_slice_size; + msg->video_formats->list->H264_codec.misc_params.slice_enc_params = + slice_enc_params; + msg->video_formats->list->H264_codec.misc_params. + frame_rate_control_support = frame_rate_control; + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_preferred_video_format (GstWFDMessage * msg, + GstWFDVideoCodecs v_codec, + GstWFDVideoNativeResolution v_native, + guint64 v_native_resolution, + GstWFDVideoCEAResolution v_cea_resolution, + GstWFDVideoVESAResolution v_vesa_resolution, + GstWFDVideoHHResolution v_hh_resolution, + GstWFDVideoH264Profile v_profile, + GstWFDVideoH264Level v_level, + guint32 v_latency, + guint32 v_max_height, + guint32 v_max_width, + guint32 min_slice_size, guint32 slice_enc_params, guint frame_rate_control) +{ + guint nativeindex = 0; + guint64 temp = v_native_resolution; + + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->video_formats) + msg->video_formats = g_new0 (GstWFDVideoCodeclist, 1); + + if (v_codec == GST_WFD_VIDEO_UNKNOWN) { + msg->video_formats->list = NULL; + msg->video_formats->count = 0; + return GST_WFD_OK; + } + + msg->video_formats->list = g_new0 (GstWFDVideoCodec, 1); + + while (temp) { + nativeindex++; + temp >>= 1; + } + + if (nativeindex) + msg->video_formats->list->native = nativeindex - 1; + msg->video_formats->list->native <<= 3; + + if (v_native == GST_WFD_VIDEO_VESA_RESOLUTION) + msg->video_formats->list->native |= 1; + else if (v_native == GST_WFD_VIDEO_HH_RESOLUTION) + msg->video_formats->list->native |= 2; + + msg->video_formats->list->preferred_display_mode_supported = 0; + msg->video_formats->list->H264_codec.profile = v_profile; + msg->video_formats->list->H264_codec.level = v_level; + msg->video_formats->list->H264_codec.max_hres = v_max_width; + msg->video_formats->list->H264_codec.max_vres = v_max_height; + msg->video_formats->list->H264_codec.misc_params.CEA_Support = + v_cea_resolution; + msg->video_formats->list->H264_codec.misc_params.VESA_Support = + v_vesa_resolution; + msg->video_formats->list->H264_codec.misc_params.HH_Support = v_hh_resolution; + msg->video_formats->list->H264_codec.misc_params.latency = v_latency; + msg->video_formats->list->H264_codec.misc_params.min_slice_size = + min_slice_size; + msg->video_formats->list->H264_codec.misc_params.slice_enc_params = + slice_enc_params; + msg->video_formats->list->H264_codec.misc_params.frame_rate_control_support = + frame_rate_control; + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_supported_video_format (GstWFDMessage * msg, + GstWFDVideoCodecs * v_codec, + GstWFDVideoNativeResolution * v_native, + guint64 * v_native_resolution, + guint64 * v_cea_resolution, + guint64 * v_vesa_resolution, + guint64 * v_hh_resolution, + guint * v_profile, + guint * v_level, + guint32 * v_latency, + guint32 * v_max_height, + guint32 * v_max_width, + guint32 * min_slice_size, + guint32 * slice_enc_params, guint * frame_rate_control) +{ + guint nativeindex = 0; + + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + *v_codec = GST_WFD_VIDEO_H264; + *v_native = msg->video_formats->list->native & 0x7; + nativeindex = msg->video_formats->list->native >> 3; + *v_native_resolution = ((guint64) 1) << nativeindex; + *v_profile = msg->video_formats->list->H264_codec.profile; + *v_level = msg->video_formats->list->H264_codec.level; + *v_max_width = msg->video_formats->list->H264_codec.max_hres; + *v_max_height = msg->video_formats->list->H264_codec.max_vres; + *v_cea_resolution = + msg->video_formats->list->H264_codec.misc_params.CEA_Support; + *v_vesa_resolution = + msg->video_formats->list->H264_codec.misc_params.VESA_Support; + *v_hh_resolution = + msg->video_formats->list->H264_codec.misc_params.HH_Support; + *v_latency = msg->video_formats->list->H264_codec.misc_params.latency; + *min_slice_size = + msg->video_formats->list->H264_codec.misc_params.min_slice_size; + *slice_enc_params = + msg->video_formats->list->H264_codec.misc_params.slice_enc_params; + *frame_rate_control = + msg->video_formats->list->H264_codec.misc_params. + frame_rate_control_support; + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_preferred_video_format (GstWFDMessage * msg, + GstWFDVideoCodecs * v_codec, + GstWFDVideoNativeResolution * v_native, + guint64 * v_native_resolution, + GstWFDVideoCEAResolution * v_cea_resolution, + GstWFDVideoVESAResolution * v_vesa_resolution, + GstWFDVideoHHResolution * v_hh_resolution, + GstWFDVideoH264Profile * v_profile, + GstWFDVideoH264Level * v_level, + guint32 * v_latency, + guint32 * v_max_height, + guint32 * v_max_width, + guint32 * min_slice_size, + guint32 * slice_enc_params, guint * frame_rate_control) +{ + guint nativeindex = 0; + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + *v_codec = GST_WFD_VIDEO_H264; + *v_native = msg->video_formats->list->native & 0x7; + nativeindex = msg->video_formats->list->native >> 3; + *v_native_resolution = ((guint64) 1) << nativeindex; + *v_profile = msg->video_formats->list->H264_codec.profile; + *v_level = msg->video_formats->list->H264_codec.level; + *v_max_width = msg->video_formats->list->H264_codec.max_hres; + *v_max_height = msg->video_formats->list->H264_codec.max_vres; + *v_cea_resolution = + msg->video_formats->list->H264_codec.misc_params.CEA_Support; + *v_vesa_resolution = + msg->video_formats->list->H264_codec.misc_params.VESA_Support; + *v_hh_resolution = + msg->video_formats->list->H264_codec.misc_params.HH_Support; + *v_latency = msg->video_formats->list->H264_codec.misc_params.latency; + *min_slice_size = + msg->video_formats->list->H264_codec.misc_params.min_slice_size; + *slice_enc_params = + msg->video_formats->list->H264_codec.misc_params.slice_enc_params; + *frame_rate_control = + msg->video_formats->list->H264_codec.misc_params. + frame_rate_control_support; + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_supported_wfd2_audio_codec (GstWFDMessage * msg, + GstWFDAudioFormats a_codec, + guint a_freq, guint a_channels, guint a_bitwidth, guint32 a_latency) +{ + guint temp = a_codec; + guint i = 0; + guint pcm = 0, aac = 0, ac3 = 0, cta = 0, aac_eldv2 = 0; + + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->wfd2_audio_codecs) + msg->wfd2_audio_codecs = g_new0 (GstWFD2AudioCodeclist, 1); + + if (a_codec != GST_WFD_AUDIO_UNKNOWN) { + while (temp) { + msg->wfd2_audio_codecs->count++; + temp >>= 1; + } + msg->wfd2_audio_codecs->list = + g_new0 (GstWFDAudioCodec, msg->wfd2_audio_codecs->count); + for (; i < msg->wfd2_audio_codecs->count; i++) { + if ((a_codec & GST_WFD_AUDIO_LPCM) && (!pcm)) { + msg->wfd2_audio_codecs->list[i].audio_format = g_strdup ("LPCM"); + msg->wfd2_audio_codecs->list[i].modes = a_freq; + msg->wfd2_audio_codecs->list[i].latency = a_latency; + pcm = 1; + } else if ((a_codec & GST_WFD_AUDIO_AAC) && (!aac)) { + msg->wfd2_audio_codecs->list[i].audio_format = g_strdup ("AAC"); + msg->wfd2_audio_codecs->list[i].modes = a_channels; + msg->wfd2_audio_codecs->list[i].latency = a_latency; + aac = 1; + } else if ((a_codec & GST_WFD_AUDIO_AC3) && (!ac3)) { + msg->wfd2_audio_codecs->list[i].audio_format = g_strdup ("AC3"); + msg->wfd2_audio_codecs->list[i].modes = a_channels; + msg->wfd2_audio_codecs->list[i].latency = a_latency; + ac3 = 1; + } else if ((a_codec & GST_WFD_AUDIO_CTA) && (!cta)) { + msg->wfd2_audio_codecs->list[i].audio_format = g_strdup ("CTA"); + msg->wfd2_audio_codecs->list[i].modes = a_channels; + msg->wfd2_audio_codecs->list[i].latency = a_latency; + cta = 1; + } else if ((a_codec & GST_WFD_AUDIO_AAC_ELDV2) && (!aac_eldv2)) { + msg->wfd2_audio_codecs->list[i].audio_format = g_strdup ("AAC-ELDv2"); + msg->wfd2_audio_codecs->list[i].modes = a_channels; + msg->wfd2_audio_codecs->list[i].latency = a_latency; + aac_eldv2 = 1; + } + } + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_preferred_wfd2_audio_codec (GstWFDMessage * msg, + GstWFDAudioFormats a_codec, + GstWFDAudioFreq a_freq, + GstWFDAudioChannels a_channels, guint a_bitwidth, guint32 a_latency) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->wfd2_audio_codecs) + msg->wfd2_audio_codecs = g_new0 (GstWFD2AudioCodeclist, 1); + + msg->wfd2_audio_codecs->list = g_new0 (GstWFDAudioCodec, 1); + msg->wfd2_audio_codecs->count = 1; + if (a_codec == GST_WFD_AUDIO_LPCM) { + msg->wfd2_audio_codecs->list->audio_format = g_strdup ("LPCM"); + msg->wfd2_audio_codecs->list->modes = a_freq; + msg->wfd2_audio_codecs->list->latency = a_latency; + } else if (a_codec == GST_WFD_AUDIO_AAC) { + msg->wfd2_audio_codecs->list->audio_format = g_strdup ("AAC"); + msg->wfd2_audio_codecs->list->modes = a_channels; + msg->wfd2_audio_codecs->list->latency = a_latency; + } else if (a_codec == GST_WFD_AUDIO_AC3) { + msg->wfd2_audio_codecs->list->audio_format = g_strdup ("AC3"); + msg->wfd2_audio_codecs->list->modes = a_channels; + msg->wfd2_audio_codecs->list->latency = a_latency; + } else if (a_codec == GST_WFD_AUDIO_CTA) { + msg->wfd2_audio_codecs->list->audio_format = g_strdup ("CTA"); + msg->wfd2_audio_codecs->list->modes = a_channels; + msg->wfd2_audio_codecs->list->latency = a_latency; + } else if (a_codec == GST_WFD_AUDIO_AAC_ELDV2) { + msg->wfd2_audio_codecs->list->audio_format = g_strdup ("AAC-ELDv2"); + msg->wfd2_audio_codecs->list->modes = a_channels; + msg->wfd2_audio_codecs->list->latency = a_latency; + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_supported_wfd2_audio_codec (GstWFDMessage * msg, + guint * a_codec, + guint * a_freq, guint * a_channels, guint * a_bitwidth, guint32 * a_latency) +{ + guint i = 0; + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (msg->wfd2_audio_codecs != NULL, GST_WFD_EINVAL); + + for (; i < msg->wfd2_audio_codecs->count; i++) { + if (!g_strcmp0 (msg->wfd2_audio_codecs->list[i].audio_format, "LPCM")) { + *a_codec |= GST_WFD_AUDIO_LPCM; + *a_freq |= msg->wfd2_audio_codecs->list[i].modes; + *a_channels |= GST_WFD_CHANNEL_2; + *a_bitwidth = 16; + *a_latency = msg->wfd2_audio_codecs->list[i].latency; + } else if (!g_strcmp0 (msg->wfd2_audio_codecs->list[i].audio_format, "AAC")) { + *a_codec |= GST_WFD_AUDIO_AAC; + *a_freq |= GST_WFD_FREQ_48000; + *a_channels |= msg->wfd2_audio_codecs->list[i].modes; + *a_bitwidth = 16; + *a_latency = msg->wfd2_audio_codecs->list[i].latency; + } else if (!g_strcmp0 (msg->wfd2_audio_codecs->list[i].audio_format, "AC3")) { + *a_codec |= GST_WFD_AUDIO_AC3; + *a_freq |= GST_WFD_FREQ_48000; + *a_channels |= msg->wfd2_audio_codecs->list[i].modes; + *a_bitwidth = 16; + *a_latency = msg->wfd2_audio_codecs->list[i].latency; + } else if (!g_strcmp0 (msg->wfd2_audio_codecs->list[i].audio_format, "CTA")) { + *a_codec |= GST_WFD_AUDIO_CTA; + *a_freq |= GST_WFD_FREQ_48000; + *a_channels |= msg->wfd2_audio_codecs->list[i].modes; + *a_bitwidth = 16; + *a_latency = msg->wfd2_audio_codecs->list[i].latency; + } else if (!g_strcmp0 (msg->wfd2_audio_codecs->list[i].audio_format, "AAC-ELDv2")) { + *a_codec |= GST_WFD_AUDIO_AAC_ELDV2; + *a_freq |= GST_WFD_FREQ_48000; + *a_channels |= msg->wfd2_audio_codecs->list[i].modes; + *a_bitwidth = 16; + *a_latency = msg->wfd2_audio_codecs->list[i].latency; + } + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_preferred_wfd2_audio_codec (GstWFDMessage * msg, + GstWFDAudioFormats * a_codec, + GstWFDAudioFreq * a_freq, + GstWFDAudioChannels * a_channels, guint * a_bitwidth, guint32 * a_latency) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!g_strcmp0 (msg->wfd2_audio_codecs->list->audio_format, "LPCM")) { + *a_codec = GST_WFD_AUDIO_LPCM; + *a_freq = msg->wfd2_audio_codecs->list->modes; + *a_channels = GST_WFD_CHANNEL_2; + *a_bitwidth = 16; + *a_latency = msg->wfd2_audio_codecs->list->latency; + } else if (!g_strcmp0 (msg->wfd2_audio_codecs->list->audio_format, "AAC")) { + *a_codec = GST_WFD_AUDIO_AAC; + *a_freq = GST_WFD_FREQ_48000; + *a_channels = msg->wfd2_audio_codecs->list->modes; + *a_bitwidth = 16; + *a_latency = msg->wfd2_audio_codecs->list->latency; + } else if (!g_strcmp0 (msg->wfd2_audio_codecs->list->audio_format, "AC3")) { + *a_codec = GST_WFD_AUDIO_AC3; + *a_freq = GST_WFD_FREQ_48000; + *a_channels = msg->wfd2_audio_codecs->list->modes; + *a_bitwidth = 16; + *a_latency = msg->wfd2_audio_codecs->list->latency; + } else if (!g_strcmp0 (msg->wfd2_audio_codecs->list->audio_format, "CTA")) { + *a_codec = GST_WFD_AUDIO_CTA; + *a_freq = GST_WFD_FREQ_48000; + *a_channels = msg->wfd2_audio_codecs->list->modes; + *a_bitwidth = 16; + *a_latency = msg->wfd2_audio_codecs->list->latency; + } else if (!g_strcmp0 (msg->wfd2_audio_codecs->list->audio_format, "AAC-ELDv2")) { + *a_codec = GST_WFD_AUDIO_AAC_ELDV2; + *a_freq = GST_WFD_FREQ_48000; + *a_channels = msg->wfd2_audio_codecs->list->modes; + *a_bitwidth = 16; + *a_latency = msg->wfd2_audio_codecs->list->latency; + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_supported_direct_video_format (GstWFDMessage * msg, + GstWFDVideoCodecs v_codec, + GstWFDVideoNativeResolution v_native, + guint64 v_native_resolution, + guint64 v_cea_resolution, + guint64 v_vesa_resolution, + guint64 v_hh_resolution, + guint v_profile, + guint v_level, + guint32 v_latency, + guint32 v_max_height, + guint32 v_max_width, + guint32 min_slice_size, guint32 slice_enc_params, guint frame_rate_control) +{ + guint nativeindex = 0; + guint64 temp = v_native_resolution; + + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->direct_video_formats) + msg->direct_video_formats = g_new0 (GstWFD2VideoCodeclist, 1); + + if (v_codec != GST_WFD_VIDEO_UNKNOWN) { + msg->direct_video_formats->list = g_new0 (GstWFDVideoCodec, 1); + while (temp) { + nativeindex++; + temp >>= 1; + } + + if (nativeindex) msg->direct_video_formats->list->native = nativeindex - 1; + msg->direct_video_formats->list->native <<= 3; + + if (v_native == GST_WFD_VIDEO_VESA_RESOLUTION) + msg->direct_video_formats->list->native |= 1; + else if (v_native == GST_WFD_VIDEO_HH_RESOLUTION) + msg->direct_video_formats->list->native |= 2; + + msg->direct_video_formats->list->preferred_display_mode_supported = 1; + msg->direct_video_formats->list->H264_codec.profile = v_profile; + msg->direct_video_formats->list->H264_codec.level = v_level; + msg->direct_video_formats->list->H264_codec.max_hres = v_max_width; + msg->direct_video_formats->list->H264_codec.max_vres = v_max_height; + msg->direct_video_formats->list->H264_codec.misc_params.CEA_Support = + v_cea_resolution; + msg->direct_video_formats->list->H264_codec.misc_params.VESA_Support = + v_vesa_resolution; + msg->direct_video_formats->list->H264_codec.misc_params.HH_Support = + v_hh_resolution; + msg->direct_video_formats->list->H264_codec.misc_params.latency = v_latency; + msg->direct_video_formats->list->H264_codec.misc_params.min_slice_size = + min_slice_size; + msg->direct_video_formats->list->H264_codec.misc_params.slice_enc_params = + slice_enc_params; + msg->direct_video_formats->list->H264_codec. + misc_params.frame_rate_control_support = frame_rate_control; + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_preferred_direct_video_format (GstWFDMessage * msg, + GstWFDVideoCodecs v_codec, + GstWFDVideoNativeResolution v_native, + guint64 v_native_resolution, + guint64 v_cea_resolution, + guint64 v_vesa_resolution, + guint64 v_hh_resolution, + GstWFDVideoH264Profile v_profile, + GstWFDVideoH264Level v_level, + guint32 v_latency, + guint32 v_max_height, + guint32 v_max_width, + guint32 min_slice_size, guint32 slice_enc_params, guint frame_rate_control) +{ + guint nativeindex = 0; + guint64 temp = v_native_resolution; + + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->direct_video_formats) + msg->direct_video_formats = g_new0 (GstWFD2VideoCodeclist, 1); + msg->direct_video_formats->list = g_new0 (GstWFDVideoCodec, 1); + + while (temp) { + nativeindex++; + temp >>= 1; + } + + if (nativeindex) + msg->direct_video_formats->list->native = nativeindex - 1; + msg->direct_video_formats->list->native <<= 3; + + if (v_native == GST_WFD_VIDEO_VESA_RESOLUTION) + msg->direct_video_formats->list->native |= 1; + else if (v_native == GST_WFD_VIDEO_HH_RESOLUTION) + msg->direct_video_formats->list->native |= 2; + + msg->direct_video_formats->list->preferred_display_mode_supported = 0; + msg->direct_video_formats->list->H264_codec.profile = v_profile; + msg->direct_video_formats->list->H264_codec.level = v_level; + msg->direct_video_formats->list->H264_codec.max_hres = v_max_width; + msg->direct_video_formats->list->H264_codec.max_vres = v_max_height; + msg->direct_video_formats->list->H264_codec.misc_params.CEA_Support = + v_cea_resolution; + msg->direct_video_formats->list->H264_codec.misc_params.VESA_Support = + v_vesa_resolution; + msg->direct_video_formats->list->H264_codec.misc_params.HH_Support = v_hh_resolution; + msg->direct_video_formats->list->H264_codec.misc_params.latency = v_latency; + msg->direct_video_formats->list->H264_codec.misc_params.min_slice_size = + min_slice_size; + msg->direct_video_formats->list->H264_codec.misc_params.slice_enc_params = + slice_enc_params; + msg->direct_video_formats->list->H264_codec.misc_params.frame_rate_control_support = + frame_rate_control; + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_supported_direct_video_format (GstWFDMessage * msg, + GstWFDVideoCodecs * v_codec, + GstWFDVideoNativeResolution * v_native, + guint64 * v_native_resolution, + guint64 * v_cea_resolution, + guint64 * v_vesa_resolution, + guint64 * v_hh_resolution, + guint * v_profile, + guint * v_level, + guint32 * v_latency, + guint32 * v_max_height, + guint32 * v_max_width, + guint32 * min_slice_size, + guint32 * slice_enc_params, guint * frame_rate_control) +{ + guint nativeindex = 0; + + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + *v_codec = GST_WFD_VIDEO_H264; + *v_native = msg->direct_video_formats->list->native & 0x7; + nativeindex = msg->direct_video_formats->list->native >> 3; + *v_native_resolution = ((guint64) 1) << nativeindex; + *v_profile = msg->direct_video_formats->list->H264_codec.profile; + *v_level = msg->direct_video_formats->list->H264_codec.level; + *v_max_width = msg->direct_video_formats->list->H264_codec.max_hres; + *v_max_height = msg->direct_video_formats->list->H264_codec.max_vres; + *v_cea_resolution = + msg->direct_video_formats->list->H264_codec.misc_params.CEA_Support; + *v_vesa_resolution = + msg->direct_video_formats->list->H264_codec.misc_params.VESA_Support; + *v_hh_resolution = + msg->direct_video_formats->list->H264_codec.misc_params.HH_Support; + *v_latency = msg->direct_video_formats->list->H264_codec.misc_params.latency; + *min_slice_size = + msg->direct_video_formats->list->H264_codec.misc_params.min_slice_size; + *slice_enc_params = + msg->direct_video_formats->list->H264_codec.misc_params.slice_enc_params; + *frame_rate_control = + msg->direct_video_formats->list->H264_codec. + misc_params.frame_rate_control_support; + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_preferred_direct_video_format (GstWFDMessage * msg, + GstWFDVideoCodecs * v_codec, + GstWFDVideoNativeResolution * v_native, + guint64 * v_native_resolution, + guint64 * v_cea_resolution, + guint64 * v_vesa_resolution, + guint64 * v_hh_resolution, + GstWFDVideoH264Profile * v_profile, + GstWFDVideoH264Level * v_level, + guint32 * v_latency, + guint32 * v_max_height, + guint32 * v_max_width, + guint32 * min_slice_size, + guint32 * slice_enc_params, guint * frame_rate_control) +{ + guint nativeindex = 0; + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + *v_codec = GST_WFD_VIDEO_H264; + *v_native = msg->direct_video_formats->list->native & 0x7; + nativeindex = msg->direct_video_formats->list->native >> 3; + *v_native_resolution = ((guint64) 1) << nativeindex; + *v_profile = msg->direct_video_formats->list->H264_codec.profile; + *v_level = msg->direct_video_formats->list->H264_codec.level; + *v_max_width = msg->direct_video_formats->list->H264_codec.max_hres; + *v_max_height = msg->direct_video_formats->list->H264_codec.max_vres; + *v_cea_resolution = + msg->direct_video_formats->list->H264_codec.misc_params.CEA_Support; + *v_vesa_resolution = + msg->direct_video_formats->list->H264_codec.misc_params.VESA_Support; + *v_hh_resolution = + msg->direct_video_formats->list->H264_codec.misc_params.HH_Support; + *v_latency = msg->direct_video_formats->list->H264_codec.misc_params.latency; + *min_slice_size = + msg->direct_video_formats->list->H264_codec.misc_params.min_slice_size; + *slice_enc_params = + msg->direct_video_formats->list->H264_codec.misc_params.slice_enc_params; + *frame_rate_control = + msg->direct_video_formats->list->H264_codec. + misc_params.frame_rate_control_support; + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_display_edid (GstWFDMessage * msg, + gboolean edid_supported, guint32 edid_blockcount, gchar * edid_playload) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + if (!msg->display_edid) + msg->display_edid = g_new0 (GstWFDDisplayEdid, 1); + msg->display_edid->edid_supported = edid_supported; + if (!edid_supported) + return GST_WFD_OK; + if (edid_blockcount > 0 && edid_blockcount <= EDID_BLOCK_COUNT_MAX_SIZE) { + msg->display_edid->edid_block_count = edid_blockcount; + msg->display_edid->edid_payload = + g_malloc (EDID_BLOCK_SIZE * edid_blockcount); + if (msg->display_edid->edid_payload) + memcpy (msg->display_edid->edid_payload, edid_playload, + EDID_BLOCK_SIZE * edid_blockcount); + else + msg->display_edid->edid_supported = FALSE; + } else + msg->display_edid->edid_supported = FALSE; + + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_display_edid (GstWFDMessage * msg, + gboolean * edid_supported, + guint32 * edid_blockcount, gchar ** edid_playload) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (edid_supported != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (edid_blockcount != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (edid_playload != NULL, GST_WFD_EINVAL); + + *edid_supported = FALSE; + if (msg->display_edid) { + if (msg->display_edid->edid_supported) { + *edid_blockcount = msg->display_edid->edid_block_count; + if (msg->display_edid->edid_block_count > 0 + && msg->display_edid->edid_block_count <= EDID_BLOCK_COUNT_MAX_SIZE) { + char *temp; + temp = + g_malloc0 (EDID_BLOCK_SIZE * msg->display_edid->edid_block_count); + if (temp) { + memcpy (temp, msg->display_edid->edid_payload, + EDID_BLOCK_SIZE * msg->display_edid->edid_block_count); + *edid_playload = temp; + *edid_supported = TRUE; + } + } + } + } + return GST_WFD_OK; +} + +GstWFDResult gst_wfd_message_set_coupled_sink(GstWFDMessage *msg, + GstWFDCoupledSinkStatus status, gchar *sink_address, gboolean sink_supported) +{ + g_return_val_if_fail(msg != NULL, GST_WFD_EINVAL); + if (!msg->coupled_sink) msg->coupled_sink = g_new0(GstWFDCoupledSink, 1); + if (!sink_supported) return GST_WFD_OK; + msg->coupled_sink->coupled_sink_cap = g_new0(GstWFDCoupledSinkCap, 1); + msg->coupled_sink->coupled_sink_cap->status = status; + msg->coupled_sink->coupled_sink_cap->sink_address = g_strdup(sink_address); + msg->coupled_sink->coupled_sink_cap->sink_supported = sink_supported; + return GST_WFD_OK; +} + +GstWFDResult gst_wfd_message_get_coupled_sink(GstWFDMessage *msg, + GstWFDCoupledSinkStatus *status, gchar **sink_address, gboolean *sink_supported) +{ + g_return_val_if_fail(msg != NULL, GST_WFD_EINVAL); + if (msg->coupled_sink + && msg->coupled_sink->coupled_sink_cap->sink_supported) { + *status = msg->coupled_sink->coupled_sink_cap->status; + *sink_address = g_strdup(msg->coupled_sink->coupled_sink_cap->sink_address); + *sink_supported = (gboolean)msg->coupled_sink->coupled_sink_cap->sink_supported; + } + else { + *status = GST_WFD_SINK_NOT_COUPLED; + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_contentprotection_type (GstWFDMessage * msg, + GstWFDHDCPProtection hdcpversion, guint32 TCPPort) +{ + char str[11] = { 0, }; + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (TCPPort <= MAX_PORT_SIZE, GST_WFD_EINVAL); + + if (!msg->content_protection) + msg->content_protection = g_new0 (GstWFDContentProtection, 1); + if (hdcpversion == GST_WFD_HDCP_NONE) + return GST_WFD_OK; + msg->content_protection->hdcp2_spec = g_new0 (GstWFDHdcp2Spec, 1); + if (hdcpversion == GST_WFD_HDCP_2_0) + msg->content_protection->hdcp2_spec->hdcpversion = g_strdup ("HDCP2.0"); + else if (hdcpversion == GST_WFD_HDCP_2_1) + msg->content_protection->hdcp2_spec->hdcpversion = g_strdup ("HDCP2.1"); + snprintf (str, sizeof (str), "port=%d", TCPPort); + msg->content_protection->hdcp2_spec->TCPPort = g_strdup (str); + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_contentprotection_type (GstWFDMessage * msg, + GstWFDHDCPProtection * hdcpversion, guint32 * TCPPort) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + *hdcpversion = GST_WFD_HDCP_NONE; + *TCPPort = 0; + + if (msg->content_protection && msg->content_protection->hdcp2_spec) { + char *result = NULL; + char *ptr = NULL; + if (!g_strcmp0 (msg->content_protection->hdcp2_spec->hdcpversion, "none")) + return GST_WFD_OK; + else if (!g_strcmp0 (msg->content_protection->hdcp2_spec->hdcpversion, + "HDCP2.0")) + *hdcpversion = GST_WFD_HDCP_2_0; + else if (!g_strcmp0 (msg->content_protection->hdcp2_spec->hdcpversion, + "HDCP2.1")) + *hdcpversion = GST_WFD_HDCP_2_1; + else + return GST_WFD_OK; + + if (!msg->content_protection->hdcp2_spec->TCPPort) + return GST_WFD_OK; + + result = strtok_r (msg->content_protection->hdcp2_spec->TCPPort, "=", &ptr); + if (result == NULL || ptr == NULL) + return GST_WFD_OK; + + result = strtok_r (NULL, "=", &ptr); + if (result) + *TCPPort = atoi(result); + } + return GST_WFD_OK; +} + + +GstWFDResult +gst_wfd_messge_set_preferred_rtp_ports (GstWFDMessage * msg, + GstWFDRTSPTransMode trans, + GstWFDRTSPProfile profile, + GstWFDRTSPLowerTrans lowertrans, guint32 rtp_port0, guint32 rtp_port1) +{ + GString *lines; + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->client_rtp_ports) + msg->client_rtp_ports = g_new0 (GstWFDClientRtpPorts, 1); + + if (trans != GST_WFD_RTSP_TRANS_UNKNOWN) { + lines = g_string_new (""); + if (trans == GST_WFD_RTSP_TRANS_RTP) + g_string_append_printf (lines, "RTP"); + else if (trans == GST_WFD_RTSP_TRANS_RDT) + g_string_append_printf (lines, "RDT"); + + if (profile == GST_WFD_RTSP_PROFILE_AVP) + g_string_append_printf (lines, "/AVP"); + else if (profile == GST_WFD_RTSP_PROFILE_SAVP) + g_string_append_printf (lines, "/SAVP"); + + if (lowertrans == GST_WFD_RTSP_LOWER_TRANS_UDP) + g_string_append_printf (lines, "/UDP;unicast"); + else if (lowertrans == GST_WFD_RTSP_LOWER_TRANS_UDP_MCAST) + g_string_append_printf (lines, "/UDP;multicast"); + else if (lowertrans == GST_WFD_RTSP_LOWER_TRANS_TCP) + g_string_append_printf (lines, "/TCP;unicast"); + else if (lowertrans == GST_WFD_RTSP_LOWER_TRANS_HTTP) + g_string_append_printf (lines, "/HTTP"); + + msg->client_rtp_ports->profile = g_string_free (lines, FALSE); + msg->client_rtp_ports->rtp_port0 = rtp_port0; + msg->client_rtp_ports->rtp_port1 = rtp_port1; + msg->client_rtp_ports->mode = g_strdup ("mode=play"); + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_preferred_rtp_ports (GstWFDMessage * msg, + GstWFDRTSPTransMode * trans, + GstWFDRTSPProfile * profile, + GstWFDRTSPLowerTrans * lowertrans, guint32 * rtp_port0, guint32 * rtp_port1) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (msg->client_rtp_ports != NULL, GST_WFD_EINVAL); + + if (g_strrstr (msg->client_rtp_ports->profile, "RTP")) + *trans = GST_WFD_RTSP_TRANS_RTP; + if (g_strrstr (msg->client_rtp_ports->profile, "RDT")) + *trans = GST_WFD_RTSP_TRANS_RDT; + if (g_strrstr (msg->client_rtp_ports->profile, "AVP")) + *profile = GST_WFD_RTSP_PROFILE_AVP; + if (g_strrstr (msg->client_rtp_ports->profile, "SAVP")) + *profile = GST_WFD_RTSP_PROFILE_SAVP; + if (g_strrstr (msg->client_rtp_ports->profile, "UDP;unicast")) + *lowertrans = GST_WFD_RTSP_LOWER_TRANS_UDP; + if (g_strrstr (msg->client_rtp_ports->profile, "UDP;multicast")) + *lowertrans = GST_WFD_RTSP_LOWER_TRANS_UDP_MCAST; + if (g_strrstr (msg->client_rtp_ports->profile, "TCP;unicast")) + *lowertrans = GST_WFD_RTSP_LOWER_TRANS_TCP; + if (g_strrstr (msg->client_rtp_ports->profile, "HTTP")) + *lowertrans = GST_WFD_RTSP_LOWER_TRANS_HTTP; + + *rtp_port0 = msg->client_rtp_ports->rtp_port0; + *rtp_port1 = msg->client_rtp_ports->rtp_port1; + + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_presentation_url (GstWFDMessage * msg, gchar * wfd_url0, + gchar * wfd_url1) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->presentation_url) + msg->presentation_url = g_new0 (GstWFDPresentationUrl, 1); + if (wfd_url0) + msg->presentation_url->wfd_url0 = g_strdup (wfd_url0); + if (wfd_url1) + msg->presentation_url->wfd_url1 = g_strdup (wfd_url1); + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_presentation_url (GstWFDMessage * msg, gchar ** wfd_url0, + gchar ** wfd_url1) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (msg->presentation_url) { + *wfd_url0 = g_strdup (msg->presentation_url->wfd_url0); + *wfd_url1 = g_strdup (msg->presentation_url->wfd_url1); + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_av_format_change_timing (GstWFDMessage * msg, guint64 PTS, + guint64 DTS) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->av_format_change_timing) + msg->av_format_change_timing = g_new0 (GstWFDAVFormatChangeTiming, 1); + + msg->av_format_change_timing->PTS = PTS; + msg->av_format_change_timing->DTS = DTS; + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_av_format_change_timing (GstWFDMessage * msg, guint64 * PTS, + guint64 * DTS) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (PTS != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (DTS != NULL, GST_WFD_EINVAL); + + if (msg->av_format_change_timing) { + *PTS = msg->av_format_change_timing->PTS; + *DTS = msg->av_format_change_timing->DTS; + } + + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_direct_streaming_mode(GstWFDMessage *msg, gboolean enable) +{ + g_return_val_if_fail(msg != NULL, GST_WFD_EINVAL); + + if (!msg->direct_mode) + msg->direct_mode = g_new0(GstWFD2DirectStreamingMode, 1); + + msg->direct_mode->direct_mode = enable; + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_messge_set_preferred_tcp_ports (GstWFDMessage *msg, + GstWFDRTSPTransMode trans, + GstWFDRTSPProfile profile, + GstWFDRTSPLowerTrans lowertrans, + guint32 rtp_port0, + guint32 rtp_port1) +{ + GString *lines; + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->tcp_ports) + msg->tcp_ports = g_new0 (GstWFDTCPPorts, 1); + + if (trans != GST_WFD_RTSP_TRANS_UNKNOWN) { + lines = g_string_new (""); + if (trans == GST_WFD_RTSP_TRANS_RTP) + g_string_append_printf (lines, "RTP"); + else if (trans == GST_WFD_RTSP_TRANS_RDT) + g_string_append_printf (lines, "RDT"); + + if (profile == GST_WFD_RTSP_PROFILE_AVP) + g_string_append_printf (lines, "/AVP"); + else if (profile == GST_WFD_RTSP_PROFILE_SAVP) + g_string_append_printf (lines, "/SAVP"); + + if (lowertrans == GST_WFD_RTSP_LOWER_TRANS_UDP) + g_string_append_printf (lines, "/UDP;unicast"); + else if (lowertrans == GST_WFD_RTSP_LOWER_TRANS_UDP_MCAST) + g_string_append_printf (lines, "/UDP;multicast"); + else if (lowertrans == GST_WFD_RTSP_LOWER_TRANS_TCP) + g_string_append_printf (lines, "/TCP;unicast"); + else if (lowertrans == GST_WFD_RTSP_LOWER_TRANS_HTTP) + g_string_append_printf (lines, "/HTTP"); + + msg->tcp_ports->profile = g_string_free (lines, FALSE); + msg->tcp_ports->rtp_port0 = rtp_port0; + msg->tcp_ports->rtp_port1 = rtp_port1; + msg->tcp_ports->mode = g_strdup ("mode=play"); + } + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_preferred_tcp_ports (GstWFDMessage *msg, + GstWFDRTSPTransMode *trans, + GstWFDRTSPProfile *profile, + GstWFDRTSPLowerTrans *lowertrans, + guint32 *rtp_port0, + guint32 *rtp_port1) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (msg->tcp_ports != NULL, GST_WFD_EINVAL); + + if (g_strrstr (msg->tcp_ports->profile, "RTP")) + *trans = GST_WFD_RTSP_TRANS_RTP; + if (g_strrstr (msg->tcp_ports->profile, "RDT")) + *trans = GST_WFD_RTSP_TRANS_RDT; + if (g_strrstr (msg->tcp_ports->profile, "AVP")) + *profile = GST_WFD_RTSP_PROFILE_AVP; + if (g_strrstr (msg->tcp_ports->profile, "SAVP")) + *profile = GST_WFD_RTSP_PROFILE_SAVP; + if (g_strrstr (msg->tcp_ports->profile, "UDP;unicast")) + *lowertrans = GST_WFD_RTSP_LOWER_TRANS_UDP; + if (g_strrstr (msg->tcp_ports->profile, "UDP;multicast")) + *lowertrans = GST_WFD_RTSP_LOWER_TRANS_UDP_MCAST; + if (g_strrstr (msg->tcp_ports->profile, "TCP;unicast")) + *lowertrans = GST_WFD_RTSP_LOWER_TRANS_TCP; + if (g_strrstr (msg->tcp_ports->profile, "HTTP")) + *lowertrans = GST_WFD_RTSP_LOWER_TRANS_HTTP; + + *rtp_port0 = msg->tcp_ports->rtp_port0; + *rtp_port1 = msg->tcp_ports->rtp_port1; + + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_set_buffer_length (GstWFDMessage *msg, guint buf_len) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + + if (!msg->buf_len) + msg->buf_len = g_new0 (GstWFDBufferLen, 1); + msg->buf_len->buf_len = buf_len; + + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_buffer_length (GstWFDMessage *msg, guint *buf_len) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (msg->buf_len != NULL, GST_WFD_EINVAL); + + *buf_len = msg->buf_len->buf_len; + + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_audio_playback_status (GstWFDMessage *msg, + guint *bufsize, + guint64 *pts) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (msg->audio_status != NULL, GST_WFD_EINVAL); + + *bufsize = msg->audio_status->aud_bufsize; + *pts = msg->audio_status->aud_pts; + + return GST_WFD_OK; +} + +GstWFDResult +gst_wfd_message_get_video_playback_status (GstWFDMessage *msg, + guint *bufsize, + guint64 *pts) +{ + g_return_val_if_fail (msg != NULL, GST_WFD_EINVAL); + g_return_val_if_fail (msg->video_status != NULL, GST_WFD_EINVAL); + + *bufsize = msg->video_status->vid_bufsize; + *pts = msg->video_status->vid_pts; + + return GST_WFD_OK; +} diff --cc subprojects/gst-rtsp-server/gst/rtsp-server/rtsp-client-wfd.c index f98138d5f8,0000000000..9cdd480125 mode 100644,000000..100644 --- a/subprojects/gst-rtsp-server/gst/rtsp-server/rtsp-client-wfd.c +++ b/subprojects/gst-rtsp-server/gst/rtsp-server/rtsp-client-wfd.c @@@ -1,4373 -1,0 +1,4368 @@@ +/* GStreamer + * Copyright (C) 2015 Samsung Electronics Hyunjun Ko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ +/* + * SECTION:rtsp-client + * @short_description: A client connection state + * @see_also: #GstRTSPServer, #GstRTSPThreadPool + * + * The client object handles the connection with a client for as long as a TCP + * connection is open. + * + * A #GstRTSPWFDClient is created by #GstRTSPServer when a new connection is + * accepted and it inherits the #GstRTSPMountPoints, #GstRTSPSessionPool, + * #GstRTSPAuth and #GstRTSPThreadPool from the server. + * + * The client connection should be configured with the #GstRTSPConnection using + * gst_rtsp_wfd_client_set_connection() before it can be attached to a #GMainContext + * using gst_rtsp_wfd_client_attach(). From then on the client will handle requests + * on the connection. + * + * Use gst_rtsp_wfd_client_session_filter() to iterate or modify all the + * #GstRTSPSession objects managed by the client object. + * + * Last reviewed on 2013-07-11 (1.0.0) + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include +#include +#include +#include +#include + +#include "rtsp-client-wfd.h" +#include "rtsp-media-factory-wfd.h" +#include "rtsp-sdp.h" +#include "rtsp-params.h" + - #define GST_RTSP_WFD_CLIENT_GET_PRIVATE(obj) \ - (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_RTSP_WFD_CLIENT, GstRTSPWFDClientPrivate)) - +typedef struct _GstRTSPClientRTPStats GstRTSPClientRTPStats; + +struct _GstRTSPClientRTPStats +{ + GstRTSPStream *stream; + guint64 last_sent_bytes; + guint64 sent_bytes; + guint last_seqnum; + guint seqnum; + + /* Info in RR (Receiver Report) */ + guint8 fraction_lost; + guint32 cumulative_lost_num; + guint16 max_seqnum; + guint32 arrival_jitter; + guint32 lsr; + guint32 dlsr; + guint32 rtt; + guint resent_packets; +}; + +typedef enum { + WFD_TS_UDP, + WFD_TS_TCP +} WFDTSMode; + +typedef enum { + WFD_TS_REP_AUDIO, + WFD_TS_REP_VIDEO +} WFDTSReportType; + +struct _GstRTSPWFDClientPrivate +{ + GstRTSPWFDClientSendFunc send_func; /* protected by send_lock */ + gpointer send_data; /* protected by send_lock */ + GDestroyNotify send_notify; /* protected by send_lock */ + + /* used to cache the media in the last requested DESCRIBE so that + * we can pick it up in the next SETUP immediately */ + gchar *path; + GstRTSPMedia *media; + + GList *transports; + GList *sessions; + + guint8 m1_done; + guint8 m3_done; + guint8 m4_done; + + /* Host's URL info */ + gchar *host_address; + + /* Parameters for WIFI-DISPLAY */ + guint caCodec; + guint8 audio_codec; + guint cFreq; + guint cChanels; + guint cBitwidth; + guint caLatency; + guint cvCodec; + guint8 video_codec; + guint cNative; + guint64 cNativeResolution; + guint64 video_resolution_supported; + gint video_native_resolution; + guint64 cCEAResolution; + guint64 cVESAResolution; + guint64 cHHResolution; + guint cProfile; + guint cLevel; + guint32 cMaxHeight; + guint32 cMaxWidth; + guint32 cFramerate; + guint32 cInterleaved; + guint32 cmin_slice_size; + guint32 cslice_enc_params; + guint cframe_rate_control; + guint cvLatency; + guint ctrans; + guint cprofile; + guint clowertrans; + guint32 crtp_port0; + guint32 crtp_port1; + + gboolean direct_streaming_supported; + gint direct_streaming_state; + guint8 direct_detected_video_codec; + guint8 direct_detected_audio_codec; + + gboolean protection_enabled; + GstWFDHDCPProtection hdcp_version; + guint32 hdcp_tcpport; + + gboolean edid_supported; + guint32 edid_hres; + guint32 edid_vres; + + gboolean keep_alive_flag; + GMutex keep_alive_lock; + + /* RTP statistics */ + GstRTSPClientRTPStats stats; + GMutex stats_lock; + guint stats_timer_id; + gboolean rtcp_stats_enabled; + + gchar *sink_user_agent; + guint ctrans_tcp; + guint cprofile_tcp; + guint clowertrans_tcp; + guint32 crtp_port0_tcp; + guint32 crtp_port1_tcp; + guint buf_len; + WFDTSMode ts_mode; + WFDTSReportType report_type; + GstRTSPWatch *datawatch; + guint datawatchid; + GstRTSPConnection *data_conn; + gchar *uristr; + GMutex tcp_send_lock; + + /* enable or disable R2 features */ + gboolean wfd2_mode; + gint wfd2_supported; + gboolean coupling_mode; + + guint coupled_sink_status; + gchar *coupled_sink_address; + gboolean coupled_sink_supported; +}; + +#define DEFAULT_WFD_TIMEOUT 60 +#define WFD_MOUNT_POINT "/wfd1.0/streamid=0" + +enum +{ + SIGNAL_WFD_OPTIONS_REQUEST, + SIGNAL_WFD_GET_PARAMETER_REQUEST, + SIGNAL_WFD_KEEP_ALIVE_FAIL, + SIGNAL_WFD_PLAYING_DONE, + SIGNAL_WFD_RTP_STATS, + SIGNAL_WFD_M3_REQ_MSG, + SIGNAL_WFD_M3_RES_MSG, + SIGNAL_WFD_M4_REQ_MSG, + SIGNAL_WFD_SET_PARAM_MSG, + SIGNAL_WFD_LAST +}; + +GST_DEBUG_CATEGORY_STATIC (rtsp_wfd_client_debug); +#define GST_CAT_DEFAULT rtsp_wfd_client_debug + +static guint gst_rtsp_client_wfd_signals[SIGNAL_WFD_LAST] = { 0 }; + +static void gst_rtsp_wfd_client_get_property (GObject * object, guint propid, + GValue * value, GParamSpec * pspec); +static void gst_rtsp_wfd_client_set_property (GObject * object, guint propid, + const GValue * value, GParamSpec * pspec); +static void gst_rtsp_wfd_client_finalize (GObject * obj); + +static gboolean handle_wfd_options_request (GstRTSPClient * client, + GstRTSPContext * ctx, GstRTSPVersion version); +static gboolean handle_wfd_set_param_request (GstRTSPClient * client, + GstRTSPContext * ctx); +static gboolean handle_wfd_get_param_request (GstRTSPClient * client, + GstRTSPContext * ctx); + +static void send_generic_wfd_response (GstRTSPWFDClient * client, + GstRTSPStatusCode code, GstRTSPContext * ctx); +static gchar *wfd_make_path_from_uri (GstRTSPClient * client, + const GstRTSPUrl * uri); +static void wfd_options_request_done (GstRTSPWFDClient * client, + GstRTSPContext * ctx); +static void wfd_get_param_request_done (GstRTSPWFDClient * client, + GstRTSPContext * ctx); +static void handle_wfd_response (GstRTSPClient * client, GstRTSPContext * ctx); +static void handle_wfd_play (GstRTSPClient * client, GstRTSPContext * ctx); +static void wfd_set_keep_alive_condition (GstRTSPWFDClient * client); +static gboolean wfd_ckeck_keep_alive_response (gpointer userdata); +static gboolean keep_alive_condition (gpointer userdata); +static gboolean wfd_configure_client_media (GstRTSPClient * client, + GstRTSPMedia * media, GstRTSPStream * stream, GstRTSPContext * ctx); + +GstRTSPResult prepare_trigger_request (GstRTSPWFDClient * client, + GstRTSPMessage * request, GstWFDTriggerType trigger_type, gchar * url); + +GstRTSPResult +prepare_response (GstRTSPWFDClient * client, GstRTSPMessage * request, + GstRTSPMessage * response, GstRTSPMethod method); + +static GstRTSPResult handle_M1_message (GstRTSPWFDClient * client); +static GstRTSPResult handle_M3_message (GstRTSPWFDClient * client); +static GstRTSPResult handle_M4_message (GstRTSPWFDClient * client); +static GstRTSPResult handle_M16_message (GstRTSPWFDClient * client); + +static GstRTSPResult handle_M4_direct_streaming_message (GstRTSPWFDClient * client); + - G_DEFINE_TYPE (GstRTSPWFDClient, gst_rtsp_wfd_client, GST_TYPE_RTSP_CLIENT); ++G_DEFINE_TYPE_WITH_PRIVATE (GstRTSPWFDClient, gst_rtsp_wfd_client, GST_TYPE_RTSP_CLIENT); + +static void +gst_rtsp_wfd_client_class_init (GstRTSPWFDClientClass * klass) +{ + GObjectClass *gobject_class; + GstRTSPClientClass *rtsp_client_class; + - g_type_class_add_private (klass, sizeof (GstRTSPWFDClientPrivate)); - + gobject_class = G_OBJECT_CLASS (klass); + rtsp_client_class = GST_RTSP_CLIENT_CLASS (klass); + + gobject_class->get_property = gst_rtsp_wfd_client_get_property; + gobject_class->set_property = gst_rtsp_wfd_client_set_property; + gobject_class->finalize = gst_rtsp_wfd_client_finalize; + + //klass->create_sdp = create_sdp; + //klass->configure_client_transport = default_configure_client_transport; + //klass->params_set = default_params_set; + //klass->params_get = default_params_get; + + rtsp_client_class->handle_options_request = handle_wfd_options_request; + rtsp_client_class->handle_set_param_request = handle_wfd_set_param_request; + rtsp_client_class->handle_get_param_request = handle_wfd_get_param_request; + rtsp_client_class->make_path_from_uri = wfd_make_path_from_uri; + rtsp_client_class->configure_client_media = wfd_configure_client_media; + + rtsp_client_class->handle_response = handle_wfd_response; + rtsp_client_class->play_request = handle_wfd_play; + + gst_rtsp_client_wfd_signals[SIGNAL_WFD_OPTIONS_REQUEST] = + g_signal_new ("wfd-options-request", G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPWFDClientClass, + wfd_options_request), NULL, NULL, g_cclosure_marshal_VOID__POINTER, + G_TYPE_NONE, 1, GST_TYPE_RTSP_CONTEXT); + + gst_rtsp_client_wfd_signals[SIGNAL_WFD_GET_PARAMETER_REQUEST] = + g_signal_new ("wfd-get-parameter-request", G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPWFDClientClass, + wfd_get_param_request), NULL, NULL, g_cclosure_marshal_VOID__POINTER, + G_TYPE_NONE, 1, GST_TYPE_RTSP_CONTEXT); + + gst_rtsp_client_wfd_signals[SIGNAL_WFD_KEEP_ALIVE_FAIL] = + g_signal_new ("wfd-keep-alive-fail", G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPWFDClientClass, + wfd_keep_alive_fail), NULL, NULL, g_cclosure_marshal_generic, + G_TYPE_NONE, 0, G_TYPE_NONE); + + gst_rtsp_client_wfd_signals[SIGNAL_WFD_PLAYING_DONE] = + g_signal_new ("wfd-playing-done", G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPWFDClientClass, + wfd_playing_done), NULL, NULL, g_cclosure_marshal_generic, + G_TYPE_NONE, 0, G_TYPE_NONE); + + gst_rtsp_client_wfd_signals[SIGNAL_WFD_RTP_STATS] = + g_signal_new ("wfd-rtp-stats", G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPWFDClientClass, + wfd_rtp_stats), NULL, NULL, g_cclosure_marshal_generic, + G_TYPE_NONE, 1, GST_TYPE_STRUCTURE); + + gst_rtsp_client_wfd_signals[SIGNAL_WFD_M3_REQ_MSG] = + g_signal_new ("wfd-m3-request-msg", G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPWFDClientClass, + wfd_handle_m3_req_msg), NULL, NULL, g_cclosure_marshal_generic, + G_TYPE_STRING, 1, G_TYPE_STRING); + + gst_rtsp_client_wfd_signals[SIGNAL_WFD_M3_RES_MSG] = + g_signal_new ("wfd-m3-response-msg", G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPWFDClientClass, + wfd_handle_m3_res_msg), NULL, NULL, g_cclosure_marshal_generic, + G_TYPE_NONE, 1, G_TYPE_STRING); + + gst_rtsp_client_wfd_signals[SIGNAL_WFD_M4_REQ_MSG] = + g_signal_new ("wfd-m4-request-msg", G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPWFDClientClass, + wfd_handle_m4_req_msg), NULL, NULL, g_cclosure_marshal_generic, + G_TYPE_STRING, 1, G_TYPE_STRING); + + gst_rtsp_client_wfd_signals[SIGNAL_WFD_SET_PARAM_MSG] = + g_signal_new ("wfd-set-param-msg", G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPWFDClientClass, + wfd_handle_set_param_msg), NULL, NULL, g_cclosure_marshal_generic, + G_TYPE_NONE, 1, G_TYPE_STRING); + + klass->wfd_options_request = wfd_options_request_done; + klass->wfd_get_param_request = wfd_get_param_request_done; + klass->configure_client_media = wfd_configure_client_media; + + GST_DEBUG_CATEGORY_INIT (rtsp_wfd_client_debug, "rtspwfdclient", 0, + "GstRTSPWFDClient"); +} + +static void +gst_rtsp_wfd_client_init (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + + g_return_if_fail (priv != NULL); + + client->priv = priv; + priv->protection_enabled = FALSE; + priv->video_native_resolution = GST_WFD_VIDEO_CEA_RESOLUTION; + priv->video_resolution_supported = GST_WFD_CEA_640x480P60; + priv->audio_codec = GST_WFD_AUDIO_AAC; + priv->keep_alive_flag = FALSE; + + g_mutex_init (&priv->keep_alive_lock); + g_mutex_init (&priv->stats_lock); + + priv->host_address = NULL; + + priv->stats_timer_id = -1; + priv->rtcp_stats_enabled = FALSE; + memset (&priv->stats, 0x00, sizeof (GstRTSPClientRTPStats)); + + priv->direct_streaming_supported = FALSE; + priv->direct_streaming_state = 0; + + priv->sink_user_agent = NULL; + + priv->ts_mode = WFD_TS_UDP; + priv->report_type = WFD_TS_REP_AUDIO; + + priv->wfd2_supported = 0; + priv->coupled_sink_address = NULL; + + g_mutex_init (&priv->tcp_send_lock); + GST_INFO_OBJECT (client, "Client is initialized"); +} + +/* A client is finalized when the connection is broken */ +static void +gst_rtsp_wfd_client_finalize (GObject * obj) +{ + GstRTSPWFDClient *client = GST_RTSP_WFD_CLIENT (obj); - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + + g_return_if_fail (GST_IS_RTSP_WFD_CLIENT (obj)); + g_return_if_fail (priv != NULL); + + GST_INFO ("finalize client %p", client); + + if (priv->host_address) + g_free (priv->host_address); + + if (priv->stats_timer_id > 0) + g_source_remove (priv->stats_timer_id); + + if (priv->sink_user_agent) { + g_free (priv->sink_user_agent); + priv->sink_user_agent = NULL; + } + + g_mutex_clear (&priv->keep_alive_lock); + g_mutex_clear (&priv->stats_lock); + g_mutex_clear (&priv->tcp_send_lock); + G_OBJECT_CLASS (gst_rtsp_wfd_client_parent_class)->finalize (obj); +} + +static void +gst_rtsp_wfd_client_get_property (GObject * object, guint propid, + GValue * value, GParamSpec * pspec) +{ + //GstRTSPWFDClient *client = GST_RTSP_WFD_CLIENT (object); + + switch (propid) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, propid, pspec); + } +} + +static void +gst_rtsp_wfd_client_set_property (GObject * object, guint propid, + const GValue * value, GParamSpec * pspec) +{ + //GstRTSPWFDClient *client = GST_RTSP_WFD_CLIENT (object); + + switch (propid) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, propid, pspec); + } +} + +/** + * gst_rtsp_wfd_client_new: + * + * Create a new #GstRTSPWFDClient instance. + * + * Returns: a new #GstRTSPWFDClient + */ +GstRTSPWFDClient * +gst_rtsp_wfd_client_new (void) +{ + GstRTSPWFDClient *result; + + result = g_object_new (GST_TYPE_RTSP_WFD_CLIENT, NULL); + + return result; +} + +void +gst_rtsp_wfd_client_start_wfd (GstRTSPWFDClient * client) +{ + GstRTSPResult res = GST_RTSP_OK; + GST_INFO_OBJECT (client, "gst_rtsp_wfd_client_start_wfd"); + + res = handle_M1_message (client); + if (res < GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "handle_M1_message failed : %d", res); + } + + return; +} + +static gboolean +wfd_display_rtp_stats (gpointer userdata) +{ + guint16 seqnum = 0; + guint64 bytes = 0; + + GstRTSPWFDClient *client = NULL; + GstRTSPWFDClientPrivate *priv = NULL; + + client = (GstRTSPWFDClient *) userdata; - priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ priv = gst_rtsp_wfd_client_get_instance_private (client); + + if (!priv) { + GST_ERROR ("No priv"); + return FALSE; + } + + g_mutex_lock (&priv->stats_lock); + + seqnum = gst_rtsp_stream_get_current_seqnum (priv->stats.stream); + bytes = gst_rtsp_stream_get_udp_sent_bytes (priv->stats.stream); + + GST_INFO ("----------------------------------------------------\n"); + GST_INFO ("Sent RTP packets : %d", seqnum - priv->stats.last_seqnum); - GST_INFO ("Sent Bytes of RTP packets : %lld bytes", ++ GST_INFO ("Sent Bytes of RTP packets : %" G_GUINT64_FORMAT " bytes", + bytes - priv->stats.last_sent_bytes); + + priv->stats.last_seqnum = seqnum; + priv->stats.last_sent_bytes = bytes; + + if (priv->rtcp_stats_enabled) { + GST_INFO ("Fraction Lost: %d", priv->stats.fraction_lost); + GST_INFO ("Cumulative number of packets lost: %d", + priv->stats.cumulative_lost_num); + GST_INFO ("Extended highest sequence number received: %d", + priv->stats.max_seqnum); + GST_INFO ("Interarrival Jitter: %d", priv->stats.arrival_jitter); + GST_INFO ("Round trip time : %d", priv->stats.rtt); + } + + GST_INFO ("----------------------------------------------------\n"); + + g_mutex_unlock (&priv->stats_lock); + + return TRUE; +} + +static void +on_rtcp_stats (GstRTSPStream * stream, GstStructure * stats, + GstRTSPClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + + guint fraction_lost, exthighestseq, jitter, lsr, dlsr, rtt; + gint packetslost; + + if (!priv) + return; + + g_mutex_lock (&priv->stats_lock); + + gst_structure_get_uint (stats, "rb-fractionlost", &fraction_lost); + gst_structure_get_int (stats, "rb-packetslost", &packetslost); + gst_structure_get_uint (stats, "rb-exthighestseq", &exthighestseq); + gst_structure_get_uint (stats, "rb-jitter", &jitter); + gst_structure_get_uint (stats, "rb-lsr", &lsr); + gst_structure_get_uint (stats, "rb-dlsr", &dlsr); + gst_structure_get_uint (stats, "rb-round-trip", &rtt); + + if (!priv->rtcp_stats_enabled) + priv->rtcp_stats_enabled = TRUE; + + priv->stats.stream = stream; + priv->stats.fraction_lost = (guint8) fraction_lost; + priv->stats.cumulative_lost_num += (guint32) fraction_lost; + priv->stats.max_seqnum = (guint16) exthighestseq; + priv->stats.arrival_jitter = (guint32) jitter; + priv->stats.lsr = (guint32) lsr; + priv->stats.dlsr = (guint32) dlsr; + priv->stats.rtt = (guint32) rtt; + + g_mutex_unlock (&priv->stats_lock); + g_signal_emit (client, gst_rtsp_client_wfd_signals[SIGNAL_WFD_RTP_STATS], 0, + stats); +} + +static gboolean +wfd_configure_client_media (GstRTSPClient * client, GstRTSPMedia * media, + GstRTSPStream * stream, GstRTSPContext * ctx) +{ + if (media) { - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + priv->media = media; + } + if (stream) { - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + if (priv) + priv->stats.stream = stream; + g_signal_connect (stream, "rtcp-statistics", (GCallback) on_rtcp_stats, + client); + } + + return GST_RTSP_CLIENT_CLASS (gst_rtsp_wfd_client_parent_class)-> + configure_client_media (client, media, stream, ctx); +} + +static void +wfd_options_request_done (GstRTSPWFDClient * client, GstRTSPContext * ctx) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPWFDClientClass *klass = GST_RTSP_WFD_CLIENT_GET_CLASS (client); + + g_return_if_fail (klass != NULL); + + GST_INFO_OBJECT (client, "M2 done.."); + + res = handle_M3_message (client); + if (res < GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "handle_M3_message failed : %d", res); + } + + if (klass->prepare_resource) { + klass->prepare_resource (client, ctx); + } + + return; +} + +static void +wfd_get_param_request_done (GstRTSPWFDClient * client, GstRTSPContext * ctx) +{ + GstRTSPResult res = GST_RTSP_OK; - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + GstRTSPWFDClientClass *klass = GST_RTSP_WFD_CLIENT_GET_CLASS (client); + + g_return_if_fail (priv != NULL && klass != NULL); + + priv->m3_done = TRUE; + GST_INFO_OBJECT (client, "M3 done.."); + + res = handle_M4_message (client); + if (res < GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "handle_M4_message failed : %d", res); + } + + if (klass->confirm_resource) { + klass->confirm_resource (client, ctx); + } + + return; +} + +static guint +wfd_get_preferred_audio_codec (guint8 srcAudioCodec, guint sinkAudioCodec) +{ + int i = 0; + guint codec = 0; + for (i = 0; i < 8; i++) { + if (((sinkAudioCodec << i) & 0x80) + && ((srcAudioCodec << i) & 0x80)) { + codec = (0x01 << (7 - i)); + break; + } + } + return codec; +} + +static guint +wfd_get_preferred_video_codec (guint8 srcVideoCodec, guint sinkVideoCodec) +{ + int i = 0; + guint codec = 0; + for (i = 0; i < 8; i++) { + if (((sinkVideoCodec << i) & 0x80) + && ((srcVideoCodec << i) & 0x80)) { + codec = (0x01 << (7 - i)); + break; + } + } + return codec; +} + +static guint64 +wfd_get_preferred_resolution (guint64 srcResolution, + guint64 sinkResolution, + GstWFDVideoNativeResolution native, + guint32 * cMaxWidth, + guint32 * cMaxHeight, guint32 * cFramerate, guint32 * interleaved) +{ + int i = 0; + guint64 resolution = 0; + for (i = 0; i < 32; i++) { + if (((sinkResolution << i) & 0x80000000) + && ((srcResolution << i) & 0x80000000)) { + resolution = ((guint64) 0x00000001 << (31 - i)); + break; + } + } + switch (native) { + case GST_WFD_VIDEO_CEA_RESOLUTION: + { + switch (resolution) { + case GST_WFD_CEA_640x480P60: + *cMaxWidth = 640; + *cMaxHeight = 480; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_CEA_720x480P60: + *cMaxWidth = 720; + *cMaxHeight = 480; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_CEA_720x480I60: + *cMaxWidth = 720; + *cMaxHeight = 480; + *cFramerate = 60; + *interleaved = 1; + break; + case GST_WFD_CEA_720x576P50: + *cMaxWidth = 720; + *cMaxHeight = 576; + *cFramerate = 50; + *interleaved = 0; + break; + case GST_WFD_CEA_720x576I50: + *cMaxWidth = 720; + *cMaxHeight = 576; + *cFramerate = 50; + *interleaved = 1; + break; + case GST_WFD_CEA_1280x720P30: + *cMaxWidth = 1280; + *cMaxHeight = 720; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_CEA_1280x720P60: + *cMaxWidth = 1280; + *cMaxHeight = 720; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_CEA_1920x1080P30: + *cMaxWidth = 1920; + *cMaxHeight = 1080; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_CEA_1920x1080P60: + *cMaxWidth = 1920; + *cMaxHeight = 1080; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_CEA_1920x1080I60: + *cMaxWidth = 1920; + *cMaxHeight = 1080; + *cFramerate = 60; + *interleaved = 1; + break; + case GST_WFD_CEA_1280x720P25: + *cMaxWidth = 1280; + *cMaxHeight = 720; + *cFramerate = 25; + *interleaved = 0; + break; + case GST_WFD_CEA_1280x720P50: + *cMaxWidth = 1280; + *cMaxHeight = 720; + *cFramerate = 50; + *interleaved = 0; + break; + case GST_WFD_CEA_1920x1080P25: + *cMaxWidth = 1920; + *cMaxHeight = 1080; + *cFramerate = 25; + *interleaved = 0; + break; + case GST_WFD_CEA_1920x1080P50: + *cMaxWidth = 1920; + *cMaxHeight = 1080; + *cFramerate = 50; + *interleaved = 0; + break; + case GST_WFD_CEA_1920x1080I50: + *cMaxWidth = 1920; + *cMaxHeight = 1080; + *cFramerate = 50; + *interleaved = 1; + break; + case GST_WFD_CEA_1280x720P24: + *cMaxWidth = 1280; + *cMaxHeight = 720; + *cFramerate = 24; + *interleaved = 0; + break; + case GST_WFD_CEA_1920x1080P24: + *cMaxWidth = 1920; + *cMaxHeight = 1080; + *cFramerate = 24; + *interleaved = 0; + break; + default: + *cMaxWidth = 0; + *cMaxHeight = 0; + *cFramerate = 0; + *interleaved = 0; + break; + } + } + break; + case GST_WFD_VIDEO_VESA_RESOLUTION: + { + switch (resolution) { + case GST_WFD_VESA_800x600P30: + *cMaxWidth = 800; + *cMaxHeight = 600; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_800x600P60: + *cMaxWidth = 800; + *cMaxHeight = 600; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1024x768P30: + *cMaxWidth = 1024; + *cMaxHeight = 768; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1024x768P60: + *cMaxWidth = 1024; + *cMaxHeight = 768; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1152x864P30: + *cMaxWidth = 1152; + *cMaxHeight = 864; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1152x864P60: + *cMaxWidth = 1152; + *cMaxHeight = 864; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1280x768P30: + *cMaxWidth = 1280; + *cMaxHeight = 768; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1280x768P60: + *cMaxWidth = 1280; + *cMaxHeight = 768; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1280x800P30: + *cMaxWidth = 1280; + *cMaxHeight = 800; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1280x800P60: + *cMaxWidth = 1280; + *cMaxHeight = 800; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1360x768P30: + *cMaxWidth = 1360; + *cMaxHeight = 768; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1360x768P60: + *cMaxWidth = 1360; + *cMaxHeight = 768; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1366x768P30: + *cMaxWidth = 1366; + *cMaxHeight = 768; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1366x768P60: + *cMaxWidth = 1366; + *cMaxHeight = 768; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1280x1024P30: + *cMaxWidth = 1280; + *cMaxHeight = 1024; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1280x1024P60: + *cMaxWidth = 1280; + *cMaxHeight = 1024; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1400x1050P30: + *cMaxWidth = 1400; + *cMaxHeight = 1050; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1400x1050P60: + *cMaxWidth = 1400; + *cMaxHeight = 1050; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1440x900P30: + *cMaxWidth = 1440; + *cMaxHeight = 900; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1440x900P60: + *cMaxWidth = 1440; + *cMaxHeight = 900; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1600x900P30: + *cMaxWidth = 1600; + *cMaxHeight = 900; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1600x900P60: + *cMaxWidth = 1600; + *cMaxHeight = 900; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1600x1200P30: + *cMaxWidth = 1600; + *cMaxHeight = 1200; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1600x1200P60: + *cMaxWidth = 1600; + *cMaxHeight = 1200; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1680x1024P30: + *cMaxWidth = 1680; + *cMaxHeight = 1024; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1680x1024P60: + *cMaxWidth = 1680; + *cMaxHeight = 1024; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1680x1050P30: + *cMaxWidth = 1680; + *cMaxHeight = 1050; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1680x1050P60: + *cMaxWidth = 1680; + *cMaxHeight = 1050; + *cFramerate = 60; + *interleaved = 0; + break; + case GST_WFD_VESA_1920x1200P30: + *cMaxWidth = 1920; + *cMaxHeight = 1200; + *cFramerate = 30; + *interleaved = 0; + break; + case GST_WFD_VESA_1920x1200P60: + *cMaxWidth = 1920; + *cMaxHeight = 1200; + *cFramerate = 60; + *interleaved = 0; + break; + default: + *cMaxWidth = 0; + *cMaxHeight = 0; + *cFramerate = 0; + *interleaved = 0; + break; + } + } + break; + case GST_WFD_VIDEO_HH_RESOLUTION: + { + *interleaved = 0; + switch (resolution) { + case GST_WFD_HH_800x480P30: + *cMaxWidth = 800; + *cMaxHeight = 480; + *cFramerate = 30; + break; + case GST_WFD_HH_800x480P60: + *cMaxWidth = 800; + *cMaxHeight = 480; + *cFramerate = 60; + break; + case GST_WFD_HH_854x480P30: + *cMaxWidth = 854; + *cMaxHeight = 480; + *cFramerate = 30; + break; + case GST_WFD_HH_854x480P60: + *cMaxWidth = 854; + *cMaxHeight = 480; + *cFramerate = 60; + break; + case GST_WFD_HH_864x480P30: + *cMaxWidth = 864; + *cMaxHeight = 480; + *cFramerate = 30; + break; + case GST_WFD_HH_864x480P60: + *cMaxWidth = 864; + *cMaxHeight = 480; + *cFramerate = 60; + break; + case GST_WFD_HH_640x360P30: + *cMaxWidth = 640; + *cMaxHeight = 360; + *cFramerate = 30; + break; + case GST_WFD_HH_640x360P60: + *cMaxWidth = 640; + *cMaxHeight = 360; + *cFramerate = 60; + break; + case GST_WFD_HH_960x540P30: + *cMaxWidth = 960; + *cMaxHeight = 540; + *cFramerate = 30; + break; + case GST_WFD_HH_960x540P60: + *cMaxWidth = 960; + *cMaxHeight = 540; + *cFramerate = 60; + break; + case GST_WFD_HH_848x480P30: + *cMaxWidth = 848; + *cMaxHeight = 480; + *cFramerate = 30; + break; + case GST_WFD_HH_848x480P60: + *cMaxWidth = 848; + *cMaxHeight = 480; + *cFramerate = 60; + break; + default: + *cMaxWidth = 0; + *cMaxHeight = 0; + *cFramerate = 0; + *interleaved = 0; + break; + } + } + break; + + default: + *cMaxWidth = 0; + *cMaxHeight = 0; + *cFramerate = 0; + *interleaved = 0; + break; + } + return resolution; +} + +static gchar * +wfd_make_path_from_uri (GstRTSPClient * client, const GstRTSPUrl * uri) +{ + gchar *path; + + GST_DEBUG_OBJECT (client, "Got URI host : %s", uri->host); + GST_DEBUG_OBJECT (client, "Got URI abspath : %s", uri->abspath); + + path = g_strdup ("/wfd1.0/streamid=0"); + + return path; +} + +static void +handle_wfd_play (GstRTSPClient * client, GstRTSPContext * ctx) +{ + GstRTSPWFDClient *_client = GST_RTSP_WFD_CLIENT (client); - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + + g_return_if_fail (priv != NULL); + + wfd_set_keep_alive_condition (_client); + + priv->stats_timer_id = g_timeout_add (2000, wfd_display_rtp_stats, _client); + + g_signal_emit (client, + gst_rtsp_client_wfd_signals[SIGNAL_WFD_PLAYING_DONE], 0, NULL); +} + +static gboolean +do_send_data (GstBuffer * buffer, guint8 channel, GstRTSPClient * client) +{ + GstRTSPMessage message = { 0 }; + GstRTSPResult res = GST_RTSP_OK; + GstMapInfo map_info; + guint8 *data; + guint usize; + + gst_rtsp_message_init_data (&message, channel); + + /* FIXME, need some sort of iovec RTSPMessage here */ + if (!gst_buffer_map (buffer, &map_info, GST_MAP_READ)) + return FALSE; + + gst_rtsp_message_take_body (&message, map_info.data, map_info.size); + + g_mutex_lock (&(GST_RTSP_WFD_CLIENT (client)->priv->tcp_send_lock)); + + gst_rtsp_watch_send_message (GST_RTSP_WFD_CLIENT (client)->priv->datawatch, &message, NULL); + + g_mutex_unlock (&(GST_RTSP_WFD_CLIENT (client)->priv->tcp_send_lock)); + + gst_rtsp_message_steal_body (&message, &data, &usize); + gst_buffer_unmap (buffer, &map_info); + + gst_rtsp_message_unset (&message); + + return res == GST_RTSP_OK; +} +static GstRTSPResult +message_received (GstRTSPWatch * watch, GstRTSPMessage * message, + gpointer user_data) +{ + return gst_rtsp_client_handle_message (GST_RTSP_CLIENT (user_data), message); +} + +static GstRTSPResult +message_sent (GstRTSPWatch * watch, guint cseq, gpointer user_data) +{ + GstRTSPClient *client; + + client = GST_RTSP_CLIENT (user_data); + if(client == NULL) + return GST_RTSP_ERROR; + return GST_RTSP_OK; +} + +static GstRTSPResult +error (GstRTSPWatch * watch, GstRTSPResult result, gpointer user_data) +{ + GstRTSPClient *client = GST_RTSP_CLIENT (user_data); + gchar *str; + + str = gst_rtsp_strresult (result); + GST_INFO ("client %p: received an error %s", client, str); + g_free (str); + + return GST_RTSP_OK; +} +static GstRTSPResult +closed_tcp (GstRTSPWatch * watch, gpointer user_data) +{ + GstRTSPClient *client = GST_RTSP_CLIENT (user_data); + + GST_INFO ("client %p: connection closed", client); + + return GST_RTSP_OK; +} + +static GstRTSPResult +error_full_tcp (GstRTSPWatch * watch, GstRTSPResult result, + GstRTSPMessage * message, guint id, gpointer user_data) +{ + GstRTSPClient *client = GST_RTSP_CLIENT (user_data); + gchar *str; + + str = gst_rtsp_strresult (result); + GST_INFO + ("client %p: received an error %s when handling message %p with id %d", + client, str, message, id); + g_free (str); + + return GST_RTSP_OK; +} + +static GstRTSPWatchFuncs watch_funcs_tcp = { + message_received, + message_sent, + closed_tcp, + error, + NULL, + NULL, + error_full_tcp, + NULL +}; +static void +client_watch_notify_tcp (GstRTSPClient * client) +{ + GST_INFO ("client %p: watch destroyed", client); + GST_RTSP_WFD_CLIENT (client)->priv->datawatch = NULL; + GST_RTSP_WFD_CLIENT (client)->priv->data_conn = NULL; +} + +static GstRTSPResult +new_tcp (GstRTSPWFDClient * client) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPConnection *conn = NULL; + GstRTSPConnection *parent_conn = NULL; + GstRTSPUrl *url; + GSource *source; + GMainContext *context; + int conn_retry_remained = 10; + int bsize = -1; + GError *err = NULL; + + /* client's address */ + int ret; + GSocket *tcp_socket = NULL; + GSocketAddress *tcp_socket_addr = NULL; + + /* Get the client connection details */ + parent_conn = gst_rtsp_client_get_connection (GST_RTSP_CLIENT (client)); + url = gst_rtsp_connection_get_url (parent_conn); + if(!url) + return GST_RTSP_ERROR; + + gst_rtsp_url_set_port (url, client->priv->crtp_port0_tcp); + + GST_INFO ("create new connection %p ip %s:%d", client, url->host, url->port); + + /* create a TCP/IP socket */ + if ((tcp_socket = g_socket_new (G_SOCKET_FAMILY_IPV4, G_SOCKET_TYPE_STREAM, G_SOCKET_PROTOCOL_TCP, NULL)) == NULL) { + GST_ERROR ("cannot create socket"); + return GST_RTSP_ERROR; + } + + /* allow immediate reuse of the port */ + ret = g_socket_set_option (tcp_socket, SOL_SOCKET, SO_REUSEADDR, TRUE, NULL); + if (ret == 0) { + GST_ERROR ("cannot change socket options"); + goto failed; + } + + /* bind the socket to our source address */ + tcp_socket_addr = g_inet_socket_address_new_from_string (url->host, url->port); + if (!tcp_socket_addr) { + GST_ERROR ("tcp_socket_addr is failed"); + goto failed; + } + + g_socket_set_blocking (tcp_socket, FALSE); + + while (!g_socket_connect (tcp_socket, tcp_socket_addr, NULL, &err)) { + GST_ERROR ("Connection failed... Try again..."); + if (err) { + GST_ERROR (" error: [%s]", err->message); + g_error_free (err); + err = NULL; + } + + if (conn_retry_remained-- == 0) { + GST_ERROR ("Failed to connection finally."); + goto failed; + } + + usleep (100000); + } + + res = gst_rtsp_connection_create_from_socket (tcp_socket, url->host, url->port, NULL, &conn); + if (res < 0) { + GST_ERROR ("gst_rtsp_connection_create_from_socket function is failed"); + goto failed; + } + + /* Set send buffer size to 1024000 */ + if (g_socket_set_option (tcp_socket , SOL_SOCKET, SO_SNDBUF, 1024000, NULL)) + GST_DEBUG_OBJECT (client, "Set send buf size : %d\n", bsize); + else + GST_ERROR_OBJECT (client, "SO_SNDBUF setsockopt failed"); + + /* Get send buffer size */ + if (g_socket_get_option (tcp_socket , SOL_SOCKET, SO_SNDBUF, &bsize, &err)) { + GST_DEBUG_OBJECT (client, "Get send buf size : %d\n", bsize); + } else { + GST_ERROR_OBJECT (client, "SO_SNDBUF getsockopt failed"); + if (err) { + GST_ERROR_OBJECT (client," error: [%s]", err->message); + g_error_free (err); + err = NULL; + } + } + + /* Set TCP no delay */ + if (g_socket_set_option (tcp_socket , IPPROTO_TCP, TCP_NODELAY, TRUE, NULL)) + GST_DEBUG_OBJECT (client, "TCP NO DELAY"); + else + GST_ERROR_OBJECT (client, "TCP_NODELAY setsockopt failed"); + + client->priv->data_conn = conn; + + /* create watch for the connection and attach */ + client->priv->datawatch = gst_rtsp_watch_new (client->priv->data_conn, &watch_funcs_tcp, client, (GDestroyNotify) client_watch_notify_tcp); + GST_DEBUG_OBJECT (client, "data watch : %p", client->priv->datawatch); + /* find the context to add the watch */ + if ((source = g_main_current_source ())) + context = g_source_get_context (source); + else + context = NULL; + + GST_DEBUG (" source = %p", source); + GST_INFO ("attaching to context %p", context); + client->priv->datawatchid = gst_rtsp_watch_attach (client->priv->datawatch, context); + gst_rtsp_watch_unref (client->priv->datawatch); + g_object_unref (tcp_socket_addr); + return res; + +failed: + g_object_unref (tcp_socket_addr); + g_object_unref (tcp_socket); + + return GST_RTSP_ERROR; +} + +static void +do_keepalive (GstRTSPSession * session) +{ + GST_INFO ("keep session %p alive", session); + gst_rtsp_session_touch (session); +} +static void +map_transport (GstRTSPWFDClient * client, GstRTSPTransport * ct) +{ + switch(client->priv->ctrans) { + case GST_WFD_RTSP_TRANS_RTP: + ct->trans = GST_RTSP_TRANS_RTP; + break; + case GST_WFD_RTSP_TRANS_RDT: + ct->trans = GST_RTSP_TRANS_RDT; + break; + default: + ct->trans = GST_RTSP_TRANS_UNKNOWN; + break; + } + switch(client->priv->cprofile) { + case GST_WFD_RTSP_PROFILE_AVP: + ct->profile = GST_RTSP_PROFILE_AVP; + break; + case GST_WFD_RTSP_PROFILE_SAVP: + ct->profile = GST_RTSP_PROFILE_SAVP; + break; + default: + ct->profile = GST_RTSP_PROFILE_UNKNOWN; + break; + } + switch(client->priv->clowertrans) { + case GST_WFD_RTSP_LOWER_TRANS_UDP: + ct->lower_transport = GST_RTSP_LOWER_TRANS_UDP; + break; + case GST_WFD_RTSP_LOWER_TRANS_UDP_MCAST: + ct->lower_transport = GST_RTSP_LOWER_TRANS_UDP_MCAST; + break; + case GST_WFD_RTSP_LOWER_TRANS_TCP: + ct->lower_transport = GST_RTSP_LOWER_TRANS_TCP; + break; + case GST_WFD_RTSP_LOWER_TRANS_HTTP: + ct->lower_transport = GST_RTSP_LOWER_TRANS_HTTP; + break; + default: + ct->lower_transport = GST_RTSP_LOWER_TRANS_UNKNOWN; + break; + } + + if (client->priv->ts_mode == WFD_TS_TCP) + ct->lower_transport = GST_RTSP_LOWER_TRANS_TCP; +} + +static GstRTSPResult +handle_ts_response (GstRTSPWFDClient * client, GstRTSPContext * ctx) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPTransport *ct; + GstRTSPConnection *conn; + GstRTSPUrl *url = NULL; + GList *t = NULL; + GstRTSPStreamTransport *tr = NULL; + GPtrArray *ta = NULL; + + ta = g_ptr_array_new(); + + t = client->priv->transports; + tr = GST_RTSP_STREAM_TRANSPORT (t->data); + g_ptr_array_add (ta, t->data); + + gst_rtsp_stream_transport_set_callbacks (tr, NULL, NULL, NULL, NULL); + gst_rtsp_stream_transport_set_keepalive (tr, NULL, ctx->session, NULL); + + gst_rtsp_transport_new (&ct); + + map_transport (client, ct); + + if (ct->trans != GST_RTSP_TRANS_RTP || ct->profile != GST_RTSP_PROFILE_AVP) { + GST_WARNING_OBJECT (client, "Trans or profile is wrong"); + goto error; + } + if (ct->lower_transport == GST_RTSP_LOWER_TRANS_HTTP || + ct->lower_transport == GST_RTSP_LOWER_TRANS_UNKNOWN) { + GST_WARNING_OBJECT (client, "Lowertrans is wrong"); + goto error; + } + + if (client->priv->ts_mode == WFD_TS_UDP) { + g_print ("\nSwitched to UDP !!!\n"); + /* Free any previous TCP connection */ + if(client->priv->data_conn) + { + gst_rtsp_connection_close (client->priv->data_conn); + gst_rtsp_connection_free(client->priv->data_conn); + if (client->priv->datawatch) { + g_source_destroy ((GSource *)client->priv->datawatch); + } + } + conn = gst_rtsp_client_get_connection (GST_RTSP_CLIENT (client)); + url = gst_rtsp_connection_get_url (conn); + gst_rtsp_url_set_port (url, client->priv->crtp_port0); + ct->destination = g_strdup (url->host); + ct->client_port.min = client->priv->crtp_port0; + if(client->priv->crtp_port1 == 0) + ct->client_port.max = client->priv->crtp_port0 + 1; + else ct->client_port.max = client->priv->crtp_port1; + } else if (client->priv->ts_mode == WFD_TS_TCP) { + res = new_tcp(client); + if(res != GST_RTSP_OK) + goto error; + + conn = gst_rtsp_client_get_connection (GST_RTSP_CLIENT (client)); + url = gst_rtsp_connection_get_url (conn); + ct->destination = g_strdup (url->host); + ct->client_port.min = client->priv->crtp_port0_tcp; + if(client->priv->crtp_port1_tcp == 0) + ct->client_port.max = client->priv->crtp_port0_tcp + 1; + else ct->client_port.max = client->priv->crtp_port1_tcp; + } + + gst_rtsp_stream_transport_set_transport (tr, ct); + + GST_DEBUG ("client %p: linking transport", client); + if (client->priv->ts_mode == WFD_TS_TCP) { + g_print ("\nSwitched to TCP !!!\n"); + gst_rtsp_stream_transport_set_callbacks (tr, (GstRTSPSendFunc) do_send_data, + (GstRTSPSendFunc) do_send_data, client, NULL); + } + else if(client->priv->ts_mode == WFD_TS_UDP ) { + g_print ("\nSwitched to UDP !!!\n"); + /* configure keepalive for this transport */ + gst_rtsp_stream_transport_set_keepalive (tr, (GstRTSPKeepAliveFunc) do_keepalive, ctx->session, NULL); + gst_rtsp_stream_transport_set_callbacks (tr, NULL, NULL, client, NULL); + } + + gst_rtsp_media_set_state (client->priv->media, GST_STATE_PLAYING, ta); + + g_ptr_array_free (ta, FALSE); + + return res; + +error: + gst_rtsp_transport_free (ct); + g_ptr_array_free (ta, FALSE); + return GST_RTSP_ERROR; +} + +static void +handle_wfd_response (GstRTSPClient * client, GstRTSPContext * ctx) +{ + GstRTSPResult res = GST_RTSP_OK; + guint8 *data = NULL; + guint size = 0; + GstWFDResult wfd_res; + GstWFDMessage *msg = NULL; + + GstRTSPWFDClient *_client = GST_RTSP_WFD_CLIENT (client); - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + + g_return_if_fail (priv != NULL); + + GST_INFO_OBJECT (_client, "Handling response.."); + + if (!ctx) { + GST_ERROR_OBJECT (_client, "Context is NULL"); + goto error; + } + + if (!ctx->response) { + GST_ERROR_OBJECT (_client, "Response is NULL"); + goto error; + } + + if (priv->sink_user_agent == NULL) { + gchar *user_agent = NULL; + res = gst_rtsp_message_get_header (ctx->response, GST_RTSP_HDR_USER_AGENT, + &user_agent, 0); + if (res == GST_RTSP_OK) { + priv->sink_user_agent = g_strdup (user_agent); + GST_INFO_OBJECT (_client, "sink user_agent : %s", priv->sink_user_agent); + } else { + GST_INFO_OBJECT (_client, "user_agent is NULL and user_agent is optional."); + } + } + + /* parsing the GET_PARAMTER response */ + res = gst_rtsp_message_get_body (ctx->response, (guint8 **) & data, &size); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (_client, "Failed to get body of response..."); + return; + } + + GST_INFO_OBJECT (_client, "Response body is %d", size); + if (size > 0) { + if (!priv->m3_done) { + /* Parse M3 response from sink */ + wfd_res = gst_wfd_message_new (&msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to create wfd message..."); + goto error; + } + + wfd_res = gst_wfd_message_init (msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to init wfd message..."); + goto error; + } + + wfd_res = gst_wfd_message_parse_buffer (data, size, msg); + + GST_ERROR_OBJECT (client, "M3 response server side message body: %s", + gst_wfd_message_as_text (msg)); + + /* Get the audio formats supported by WFDSink */ + if (msg->wfd2_audio_codecs && msg->wfd2_audio_codecs->count > 0) { + priv->wfd2_mode = TRUE; + wfd_res = + gst_wfd_message_get_supported_wfd2_audio_codec (msg, &priv->caCodec, + &priv->cFreq, &priv->cChanels, &priv->cBitwidth, &priv->caLatency); + if (wfd_res != GST_WFD_OK) { + GST_WARNING_OBJECT (client, + "Failed to get wfd support audio formats..."); + goto error; + } + } else if (msg->audio_codecs && msg->audio_codecs->count > 0) { + priv->wfd2_mode = FALSE; + wfd_res = + gst_wfd_message_get_supported_audio_format (msg, &priv->caCodec, + &priv->cFreq, &priv->cChanels, &priv->cBitwidth, &priv->caLatency); + if (wfd_res != GST_WFD_OK) { + GST_WARNING_OBJECT (client, + "Failed to get wfd support audio formats..."); + goto error; + } + } + + if (msg->direct_video_formats) { + priv->direct_streaming_supported = TRUE; + } + + /* Get the Video formats supported by WFDSink */ + if (msg->video_formats && msg->video_formats->count > 0) { + wfd_res = + gst_wfd_message_get_supported_video_format (msg, &priv->cvCodec, + &priv->cNative, &priv->cNativeResolution, + (guint64 *) & priv->cCEAResolution, + (guint64 *) & priv->cVESAResolution, + (guint64 *) & priv->cHHResolution, &priv->cProfile, &priv->cLevel, + &priv->cvLatency, &priv->cMaxHeight, &priv->cMaxWidth, + &priv->cmin_slice_size, &priv->cslice_enc_params, + &priv->cframe_rate_control); + if (wfd_res != GST_WFD_OK) { + GST_WARNING_OBJECT (client, + "Failed to get wfd supported video formats..."); + goto error; + } + } + + if (msg->client_rtp_ports) { + /* Get the RTP ports preferred by WFDSink */ + wfd_res = + gst_wfd_message_get_preferred_rtp_ports (msg, &priv->ctrans, + &priv->cprofile, &priv->clowertrans, &priv->crtp_port0, + &priv->crtp_port1); + if (wfd_res != GST_WFD_OK) { + GST_WARNING_OBJECT (client, + "Failed to get wfd preferred RTP ports..."); + goto error; + } + } + if (msg->tcp_ports) { + /* Get the TCP ports preferred by WFDSink */ + wfd_res = + gst_wfd_message_get_preferred_tcp_ports (msg, &priv->ctrans_tcp, + &priv->cprofile_tcp, &priv->clowertrans_tcp, &priv->crtp_port0_tcp, + &priv->crtp_port1_tcp); + if (wfd_res != GST_WFD_OK) { + GST_WARNING_OBJECT (client, + "Failed to get wfd preferred RTP ports..."); + goto error; + } + } + + if (msg->buf_len) { + wfd_res = + gst_wfd_message_get_buffer_length (msg, &priv->buf_len); + if (wfd_res != GST_WFD_OK) { + GST_WARNING_OBJECT (client, + "Failed to get wfd preferred RTP ports..."); + goto error; + } + } + + if (msg->display_edid) { + guint32 edid_block_count = 0; + gchar *edid_payload = NULL; + priv->edid_supported = FALSE; + /* Get the display edid preferred by WFDSink */ + GST_DEBUG_OBJECT (client, "Going to gst_wfd_message_get_display_edid"); + wfd_res = + gst_wfd_message_get_display_edid (msg, &priv->edid_supported, + &edid_block_count, &edid_payload); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to get wfd display edid..."); + goto error; + } + GST_DEBUG_OBJECT (client, " edid supported: %d edid_block_count: %d", + priv->edid_supported, edid_block_count); + if (priv->edid_supported) { + priv->edid_hres = 0; + priv->edid_vres = 0; + priv->edid_hres = + (guint32) (((edid_payload[54 + 4] >> 4) << 8) | edid_payload[54 + + 2]); + priv->edid_vres = + (guint32) (((edid_payload[54 + 7] >> 4) << 8) | edid_payload[54 + + 5]); + GST_DEBUG_OBJECT (client, " edid supported Hres: %d Wres: %d", + priv->edid_hres, priv->edid_vres); + if ((priv->edid_hres < 640) || (priv->edid_vres < 480) + || (priv->edid_hres > 1920) || (priv->edid_vres > 1080)) { + priv->edid_hres = 0; + priv->edid_vres = 0; + priv->edid_supported = FALSE; + GST_WARNING_OBJECT (client, " edid invalid resolutions"); + } + } + /* Release allocated memory */ + g_free (edid_payload); + } + + if (msg->content_protection) { +#if 0 + /*Get the hdcp version and tcp port by WFDSink */ + wfd_res = + gst_wfd_message_get_contentprotection_type (msg, + &priv->hdcp_version, &priv->hdcp_tcpport); + GST_DEBUG ("hdcp version =%d, tcp port = %d", priv->hdcp_version, + priv->hdcp_tcpport); + if (priv->hdcp_version > 0 && priv->hdcp_tcpport > 0) + priv->protection_enabled = TRUE; + + if (wfd_res != GST_WFD_OK) { + GST_WARNING_OBJECT (client, + "Failed to get wfd content protection..."); + goto error; + } +#else + GST_WARNING_OBJECT (client, "Don't use content protection"); +#endif + } + + g_signal_emit (client, + gst_rtsp_client_wfd_signals[SIGNAL_WFD_M3_RES_MSG], 0, data); + + g_signal_emit (_client, + gst_rtsp_client_wfd_signals[SIGNAL_WFD_GET_PARAMETER_REQUEST], 0, + ctx); + } else { + if (g_strrstr((char *)data, "wfd2_buffer_len")) { + GST_DEBUG_OBJECT (_client, "Get TS message responce"); + + /* Parse TS response from sink */ + wfd_res = gst_wfd_message_new (&msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to create wfd message..."); + goto error; + } + + wfd_res = gst_wfd_message_init (msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to init wfd message..."); + goto error; + } + + wfd_res = gst_wfd_message_parse_buffer (data, size, msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to parse wfd message..."); + goto error; + } + + wfd_res = gst_wfd_message_get_buffer_length (msg, &_client->priv->buf_len); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to parse wfd message..."); + goto error; + } + + if (GST_RTSP_OK != handle_ts_response (_client, ctx)) { + GST_ERROR_OBJECT (client, "Failed to handle transport switch response"); + goto error; + } + } + /* TODO-WFD: Handle another GET_PARAMETER response with body */ + } + } else if (size == 0) { + if (!priv->m1_done) { + GST_INFO_OBJECT (_client, "M1 response is done"); + priv->m1_done = TRUE; + } else if (!priv->m4_done) { + GST_INFO_OBJECT (_client, "M4 response is done"); + priv->m4_done = TRUE; + /* Checks whether server is 'coupling mode' or not */ + GST_DEBUG_OBJECT (client, "server coupling mode [%d]",priv->coupling_mode ); + if (priv->coupling_mode) { + gst_rtsp_wfd_client_trigger_request (_client, WFD_TRIGGER_TEARDOWN_COUPLING); + } else { + gst_rtsp_wfd_client_trigger_request (_client, WFD_TRIGGER_SETUP); + } + } else { + g_mutex_lock (&priv->keep_alive_lock); + if (priv->keep_alive_flag == FALSE) { + GST_INFO_OBJECT (_client, "M16 response is done"); + priv->keep_alive_flag = TRUE; + } + g_mutex_unlock (&priv->keep_alive_lock); + } + } + + if (msg != NULL) + gst_wfd_message_free(msg); + + return; + +error: + + if (msg != NULL) + gst_wfd_message_free(msg); + + return; +} + +static gboolean +handle_wfd_options_request (GstRTSPClient * client, GstRTSPContext * ctx, GstRTSPVersion version) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPMethod options; + gchar *tmp = NULL; + gchar *str = NULL; + gchar *user_agent = NULL; + + GstRTSPWFDClient *_client = GST_RTSP_WFD_CLIENT (client); + + options = GST_RTSP_OPTIONS | + GST_RTSP_PAUSE | + GST_RTSP_PLAY | + GST_RTSP_SETUP | + GST_RTSP_GET_PARAMETER | GST_RTSP_SET_PARAMETER | GST_RTSP_TEARDOWN; + + if (version < GST_RTSP_VERSION_2_0) { + options |= GST_RTSP_RECORD; + options |= GST_RTSP_ANNOUNCE; + } + + str = gst_rtsp_options_as_text (options); + + /*append WFD specific method */ + tmp = g_strdup (", org.wfa.wfd1.0"); + g_strlcat (str, tmp, strlen (tmp) + strlen (str) + 1); + + gst_rtsp_message_init_response (ctx->response, GST_RTSP_STS_OK, + gst_rtsp_status_as_text (GST_RTSP_STS_OK), ctx->request); + + gst_rtsp_message_add_header (ctx->response, GST_RTSP_HDR_PUBLIC, str); + g_free (str); + g_free (tmp); + + str = NULL; + + res = + gst_rtsp_message_get_header (ctx->request, GST_RTSP_HDR_USER_AGENT, + &user_agent, 0); + if (res == GST_RTSP_OK) { + gst_rtsp_message_add_header (ctx->response, GST_RTSP_HDR_USER_AGENT, + user_agent); + } else { + GST_INFO_OBJECT (_client, "user_agent is NULL and user_agent is optional."); + } + + res = gst_rtsp_client_send_message (client, NULL, ctx->response); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "gst_rtsp_client_send_message failed : %d", res); + return FALSE; + } + + GST_DEBUG_OBJECT (client, "Sent M2 response..."); + + g_signal_emit (_client, + gst_rtsp_client_wfd_signals[SIGNAL_WFD_OPTIONS_REQUEST], 0, ctx); + + return TRUE; +} + +static gboolean +handle_wfd_get_param_request (GstRTSPClient * client, GstRTSPContext * ctx) +{ + GstRTSPResult res = GST_RTSP_OK; + guint8 *data = NULL; + guint size = 0; + + GstRTSPWFDClient *_client = GST_RTSP_WFD_CLIENT (client); + + /* parsing the GET_PARAMTER request */ + res = gst_rtsp_message_get_body (ctx->request, (guint8 **) & data, &size); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (_client, "Failed to get body of request..."); + return FALSE; + } + + if (size == 0) { + send_generic_wfd_response (_client, GST_RTSP_STS_OK, ctx); + } else { + /* TODO-WFD: Handle other GET_PARAMETER request from sink */ + } + + return TRUE; +} + +static gboolean +handle_wfd_set_param_request (GstRTSPClient * client, GstRTSPContext * ctx) +{ + GstRTSPResult res = GST_RTSP_OK; + guint8 *data = NULL; + guint size = 0; + + GstRTSPWFDClient *_client = GST_RTSP_WFD_CLIENT (client); + + res = gst_rtsp_message_get_body (ctx->request, &data, &size); + if (res != GST_RTSP_OK) + goto bad_request; + + if (size == 0) { + /* no body, keep-alive request */ + send_generic_wfd_response (_client, GST_RTSP_STS_OK, ctx); + } else { + if (data != NULL) { + GST_INFO_OBJECT (_client, "SET_PARAMETER Request : %s(%d)", data, size); + if (g_strcmp0 ((const gchar *) data, "wfd_idr_request")) + send_generic_wfd_response (_client, GST_RTSP_STS_OK, ctx); + else { + send_generic_wfd_response (_client, GST_RTSP_STS_OK, ctx); + g_signal_emit (client, + gst_rtsp_client_wfd_signals[SIGNAL_WFD_SET_PARAM_MSG], 0, data); + } + } else { + goto bad_request; + } + } + + return TRUE; + + /* ERRORS */ +bad_request: + { + GST_ERROR ("_client %p: bad request", _client); + send_generic_wfd_response (_client, GST_RTSP_STS_BAD_REQUEST, ctx); + return FALSE; + } +} + +#if 0 +static gboolean +gst_rtsp_wfd_client_parse_methods (GstRTSPWFDClient * client, + GstRTSPMessage * response) +{ + GstRTSPHeaderField field; + gchar *respoptions; + gchar **options; + gint indx = 0; + gint i; + gboolean found_wfd_method = FALSE; + + /* reset supported methods */ + client->supported_methods = 0; + + /* Try Allow Header first */ + field = GST_RTSP_HDR_ALLOW; + while (TRUE) { + respoptions = NULL; + gst_rtsp_message_get_header (response, field, &respoptions, indx); + if (indx == 0 && !respoptions) { + /* if no Allow header was found then try the Public header... */ + field = GST_RTSP_HDR_PUBLIC; + gst_rtsp_message_get_header (response, field, &respoptions, indx); + } + if (!respoptions) + break; + + /* If we get here, the server gave a list of supported methods, parse + * them here. The string is like: + * + * OPTIONS, PLAY, SETUP, ... + */ + options = g_strsplit (respoptions, ",", 0); + + for (i = 0; options[i]; i++) { + gchar *stripped; + gint method; + + stripped = g_strstrip (options[i]); + method = gst_rtsp_find_method (stripped); + + if (!g_ascii_strcasecmp ("org.wfa.wfd1.0", stripped)) + found_wfd_method = TRUE; + + /* keep bitfield of supported methods */ + if (method != GST_RTSP_INVALID) + client->supported_methods |= method; + } + g_strfreev (options); + + indx++; + } + + if (!found_wfd_method) { + GST_ERROR_OBJECT (client, + "WFD client is not supporting WFD mandatory message : org.wfa.wfd1.0..."); + goto no_required_methods; + } + + /* Checking mandatory method */ + if (!(client->supported_methods & GST_RTSP_SET_PARAMETER)) { + GST_ERROR_OBJECT (client, + "WFD client is not supporting WFD mandatory message : SET_PARAMETER..."); + goto no_required_methods; + } + + /* Checking mandatory method */ + if (!(client->supported_methods & GST_RTSP_GET_PARAMETER)) { + GST_ERROR_OBJECT (client, + "WFD client is not supporting WFD mandatory message : GET_PARAMETER..."); + goto no_required_methods; + } + + if (!(client->supported_methods & GST_RTSP_OPTIONS)) { + GST_INFO_OBJECT (client, "assuming OPTIONS is supported by client..."); + client->supported_methods |= GST_RTSP_OPTIONS; + } + + return TRUE; + +/* ERRORS */ +no_required_methods: + { + GST_ELEMENT_ERROR (client, RESOURCE, OPEN_READ, (NULL), + ("WFD Client does not support mandatory methods.")); + return FALSE; + } +} +#endif + +typedef enum +{ + M1_REQ_MSG, + M1_RES_MSG, + M2_REQ_MSG, + M2_RES_MSG, + M3_REQ_MSG, + M3_RES_MSG, + M4_REQ_MSG, + M4_DS_REQ_MSG, + M4_RES_MSG, + M5_REQ_MSG, + TEARDOWN_TRIGGER, + TEARDOWN_COUPLING_TRIGGER, + PLAY_TRIGGER, + PAUSE_TRIGGER, + TS_REQ_MSG, + TS_REP_REQ_MSG, +} GstWFDMessageType; + +static gboolean +_set_negotiated_audio_codec (GstRTSPWFDClient * client, guint audio_codec) +{ + GstRTSPClient *parent_client = GST_RTSP_CLIENT_CAST (client); + + GstRTSPMediaFactory *factory = NULL; + GstRTSPMountPoints *mount_points = NULL; + gchar *path = NULL; + gint matched = 0; + gboolean ret = TRUE; + + if (!(mount_points = gst_rtsp_client_get_mount_points (parent_client))) { + ret = FALSE; + GST_ERROR_OBJECT (client, + "Failed to set negotiated audio codec: no mount points..."); + goto no_mount_points; + } + + path = g_strdup (WFD_MOUNT_POINT); + if (!path) { + ret = FALSE; + GST_ERROR_OBJECT (client, + "Failed to set negotiated audio codec: no path..."); + goto no_path; + } + + if (!(factory = gst_rtsp_mount_points_match (mount_points, path, &matched))) { + GST_ERROR_OBJECT (client, + "Failed to set negotiated audio codec: no factory..."); + ret = FALSE; + goto no_factory; + } + + gst_rtsp_media_factory_wfd_set_audio_codec (factory, audio_codec); + ret = TRUE; + + g_object_unref (factory); + +no_factory: + g_free (path); +no_path: + g_object_unref (mount_points); +no_mount_points: + return ret; +} + +static gboolean +_set_negotiated_video_codec (GstRTSPWFDClient * client, guint video_codec) +{ + GstRTSPClient *parent_client = GST_RTSP_CLIENT_CAST (client); + + GstRTSPMediaFactory *factory = NULL; + GstRTSPMountPoints *mount_points = NULL; + gchar *path = NULL; + gint matched = 0; + gboolean ret = TRUE; + + if (!(mount_points = gst_rtsp_client_get_mount_points (parent_client))) { + ret = FALSE; + GST_ERROR_OBJECT (client, + "Failed to set negotiated video codec: no mount points..."); + goto no_mount_points; + } + + path = g_strdup (WFD_MOUNT_POINT); + if (!path) { + ret = FALSE; + GST_ERROR_OBJECT (client, + "Failed to set negotiated vidoe codec: no path..."); + goto no_path; + } + + if (!(factory = gst_rtsp_mount_points_match (mount_points, path, &matched))) { + GST_ERROR_OBJECT (client, + "Failed to set negotiated vidoe codec: no factory..."); + ret = FALSE; + goto no_factory; + } + + gst_rtsp_media_factory_wfd_set_video_codec (factory, video_codec); + ret = TRUE; + + g_object_unref (factory); + +no_factory: + g_free (path); +no_path: + g_object_unref (mount_points); +no_mount_points: + return ret; +} + +static gboolean +_set_negotiated_resolution (GstRTSPWFDClient * client, + guint32 width, guint32 height) +{ + GstRTSPClient *parent_client = GST_RTSP_CLIENT_CAST (client); + + GstRTSPMediaFactory *factory = NULL; + GstRTSPMountPoints *mount_points = NULL; + gchar *path = NULL; + gint matched = 0; + gboolean ret = TRUE; + + if (!(mount_points = gst_rtsp_client_get_mount_points (parent_client))) { + ret = FALSE; + GST_ERROR_OBJECT (client, + "Failed to set negotiated resolution: no mount points..."); + goto no_mount_points; + } + + path = g_strdup (WFD_MOUNT_POINT); + if (!path) { + ret = FALSE; + GST_ERROR_OBJECT (client, + "Failed to set negotiated resolution: no path..."); + goto no_path; + } + + if (!(factory = gst_rtsp_mount_points_match (mount_points, path, &matched))) { + GST_ERROR_OBJECT (client, + "Failed to set negotiated resolution: no factory..."); + ret = FALSE; + goto no_factory; + } + + gst_rtsp_media_factory_wfd_set_negotiated_resolution (factory, width, height); + ret = TRUE; + + g_object_unref (factory); + +no_factory: + g_free (path); +no_path: + g_object_unref (mount_points); +no_mount_points: + return ret; +} + +static void +_set_wfd_message_body (GstRTSPWFDClient * client, GstWFDMessageType msg_type, + gchar ** data, guint * len) +{ + GString *buf = NULL; + GstWFDMessage *msg = NULL; + GstWFDResult wfd_res = GST_WFD_EINVAL; + GstRTSPWFDClientPrivate *priv = NULL; - priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ priv = gst_rtsp_wfd_client_get_instance_private (client); + + g_return_if_fail (priv != NULL); + + if (msg_type == M3_REQ_MSG) { + /* create M3 request to be sent */ + wfd_res = gst_wfd_message_new (&msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to create wfd message..."); + goto error; + } + + wfd_res = gst_wfd_message_init (msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to init wfd message..."); + goto error; + } + + /* set the supported audio formats by the WFD server */ + wfd_res = + gst_wfd_message_set_supported_audio_format (msg, GST_WFD_AUDIO_UNKNOWN, + GST_WFD_FREQ_UNKNOWN, GST_WFD_CHANNEL_UNKNOWN, 0, 0); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set supported audio formats on wfd message..."); + goto error; + } + + /* set the supported Video formats by the WFD server */ + wfd_res = + gst_wfd_message_set_supported_video_format (msg, GST_WFD_VIDEO_UNKNOWN, + GST_WFD_VIDEO_CEA_RESOLUTION, GST_WFD_CEA_UNKNOWN, GST_WFD_CEA_UNKNOWN, + GST_WFD_VESA_UNKNOWN, GST_WFD_HH_UNKNOWN, GST_WFD_H264_UNKNOWN_PROFILE, + GST_WFD_H264_LEVEL_UNKNOWN, 0, 0, 0, 0, 0, 0); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set supported video formats on wfd message..."); + goto error; + } + + /* set wfd2_audio_codecs & wfd2_video_formats if it is supported */ + if (priv->wfd2_supported == 1) { + /* set the supported audio formats by the WFD server for direct streaming */ + wfd_res = + gst_wfd_message_set_supported_wfd2_audio_codec (msg, GST_WFD_AUDIO_UNKNOWN, + GST_WFD_FREQ_UNKNOWN, GST_WFD_CHANNEL_UNKNOWN, 0, 0); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set supported audio formats for direct streaming on wfd message..."); + goto error; + } + + /* set the supported Video formats by the WFD server for direct streaming */ + wfd_res = + gst_wfd_message_set_supported_direct_video_format (msg, GST_WFD_VIDEO_UNKNOWN, + GST_WFD_VIDEO_CEA_RESOLUTION, GST_WFD_CEA_UNKNOWN, GST_WFD_CEA_UNKNOWN, + GST_WFD_VESA_UNKNOWN, GST_WFD_HH_UNKNOWN, GST_WFD_H264_UNKNOWN_PROFILE, + GST_WFD_H264_LEVEL_UNKNOWN, 0, 0, 0, 0, 0, 0); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set supported video formats for direct streaming on wfd message..."); + goto error; + } + } + wfd_res = gst_wfd_message_set_display_edid (msg, 0, 0, NULL); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set display edid type on wfd message..."); + goto error; + } + + if (priv->protection_enabled) { + wfd_res = + gst_wfd_message_set_contentprotection_type (msg, GST_WFD_HDCP_NONE, + 0); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set supported content protection type on wfd message..."); + goto error; + } + } + + /* set the preffered RTP ports for the WFD server */ + wfd_res = + gst_wfd_messge_set_preferred_rtp_ports (msg, GST_WFD_RTSP_TRANS_UNKNOWN, + GST_WFD_RTSP_PROFILE_UNKNOWN, GST_WFD_RTSP_LOWER_TRANS_UNKNOWN, 0, 0); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set supported video formats on wfd message..."); + goto error; + } + + /* set the preffered TCP ports for the WFD server */ + wfd_res = + gst_wfd_messge_set_preferred_tcp_ports (msg, GST_WFD_RTSP_TRANS_RTP, + GST_WFD_RTSP_PROFILE_AVP, GST_WFD_RTSP_LOWER_TRANS_UDP, priv->crtp_port0, priv->crtp_port1); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set tcp ports parameter on wfd message..."); + goto error; + } + + /* set the buffer length for the WFD server */ + wfd_res = + gst_wfd_message_set_buffer_length (msg, 0); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set tcp ports parameter on wfd message..."); + goto error; + } + + /* set the coupled sink for the WFD server */ + wfd_res = + gst_wfd_message_set_coupled_sink (msg, GST_WFD_SINK_NOT_COUPLED, NULL, TRUE); + + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set coupled sink parameter on wfd message..."); + goto error; + } + + *data = gst_wfd_message_param_names_as_text (msg); + if (*data == NULL) { + GST_ERROR_OBJECT (client, "Failed to get wfd message as text..."); + goto error; + } else { + gchar *append_data = NULL; + + g_signal_emit (client, gst_rtsp_client_wfd_signals[SIGNAL_WFD_M3_REQ_MSG], + 0, *data, &append_data); + + if (append_data) { + g_free (*data); + *data = append_data; + } + + *len = strlen (*data); + } + } else if (msg_type == M4_REQ_MSG) { + GstRTSPUrl *url = NULL; + + GstRTSPClient *parent_client = GST_RTSP_CLIENT_CAST (client); + GstRTSPConnection *connection = + gst_rtsp_client_get_connection (parent_client); + + /* Parameters for the preffered audio formats */ + GstWFDAudioFormats taudiocodec = GST_WFD_AUDIO_UNKNOWN; + GstWFDAudioFreq taudiofreq = GST_WFD_FREQ_UNKNOWN; + GstWFDAudioChannels taudiochannels = GST_WFD_CHANNEL_UNKNOWN; + + /* Parameters for the preffered video formats */ + GstWFDVideoCodecs tvideocodec = GST_WFD_VIDEO_UNKNOWN; + guint64 tcCEAResolution = GST_WFD_CEA_UNKNOWN; + guint64 tcVESAResolution = GST_WFD_VESA_UNKNOWN; + guint64 tcHHResolution = GST_WFD_HH_UNKNOWN; + GstWFDVideoH264Profile tcProfile = GST_WFD_H264_UNKNOWN_PROFILE; + GstWFDVideoH264Level tcLevel = GST_WFD_H264_LEVEL_UNKNOWN; + guint64 resolution_supported = 0; + + url = gst_rtsp_connection_get_url (connection); + if (url == NULL) { + GST_ERROR_OBJECT (client, "Failed to get connection URL"); + return; + } + + /* Logic to negotiate with information of M3 response */ + /* create M4 request to be sent */ + wfd_res = gst_wfd_message_new (&msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to create wfd message..."); + goto error; + } + + wfd_res = gst_wfd_message_init (msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to init wfd message..."); + goto error; + } + + buf = g_string_new (""); + if (buf == NULL) + goto error; + + g_string_append_printf (buf, "rtsp://"); + + if (priv->host_address) { + g_string_append (buf, priv->host_address); + } else { + GST_ERROR_OBJECT (client, "Failed to get host address"); + g_string_free (buf, TRUE); + goto error; + } + + g_string_append_printf (buf, "/wfd1.0/streamid=0"); + wfd_res = + gst_wfd_message_set_presentation_url (msg, g_string_free (buf, FALSE), + NULL); + + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to set presentation url"); + goto error; + } + + if (priv->caCodec != GST_WFD_AUDIO_UNKNOWN) { + taudiocodec = wfd_get_preferred_audio_codec (priv->audio_codec, priv->caCodec); + priv->caCodec = taudiocodec; + } + if (!_set_negotiated_audio_codec (client, priv->caCodec)) { + GST_ERROR_OBJECT (client, "Failed to set negotiated " + "audio codec to media factory..."); + } + + if (priv->caCodec != GST_WFD_AUDIO_UNKNOWN) { + if (priv->cFreq & GST_WFD_FREQ_48000) + taudiofreq = GST_WFD_FREQ_48000; + else if (priv->cFreq & GST_WFD_FREQ_44100) + taudiofreq = GST_WFD_FREQ_44100; + priv->cFreq = taudiofreq; + + /* TODO-WFD: Currently only 2 channels is present */ + if (priv->cChanels & GST_WFD_CHANNEL_8) + taudiochannels = GST_WFD_CHANNEL_2; + else if (priv->cChanels & GST_WFD_CHANNEL_6) + taudiochannels = GST_WFD_CHANNEL_2; + else if (priv->cChanels & GST_WFD_CHANNEL_4) + taudiochannels = GST_WFD_CHANNEL_2; + else if (priv->cChanels & GST_WFD_CHANNEL_2) + taudiochannels = GST_WFD_CHANNEL_2; + priv->cChanels = taudiochannels; + } + + if(priv->wfd2_mode) + wfd_res = + gst_wfd_message_set_preferred_wfd2_audio_codec (msg, taudiocodec, taudiofreq, + taudiochannels, priv->cBitwidth, priv->caLatency); + else + wfd_res = + gst_wfd_message_set_preferred_audio_format (msg, taudiocodec, taudiofreq, + taudiochannels, priv->cBitwidth, priv->caLatency); + + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (priv, "Failed to set preffered audio formats..."); + goto error; + } + + /* Set the preffered video formats */ + tvideocodec = wfd_get_preferred_video_codec (priv->video_codec, priv->cvCodec); + GST_INFO_OBJECT (priv, "Set the video formats. source codec %d, sink codec %d, Negotiated code %d", + priv->video_codec, priv->cvCodec, tvideocodec); + priv->cvCodec = tvideocodec; + + if (priv->cvCodec != GST_WFD_VIDEO_UNKNOWN) { + priv->cvCodec = GST_WFD_VIDEO_H264; + priv->cProfile = tcProfile = GST_WFD_H264_BASE_PROFILE; + priv->cLevel = tcLevel = GST_WFD_H264_LEVEL_3_1; + + resolution_supported = priv->video_resolution_supported; + + /* TODO-WFD: Need to verify this logic + if(priv->edid_supported) { + if (priv->edid_hres < 1920) resolution_supported = resolution_supported & 0x8C7F; + if (priv->edid_hres < 1280) resolution_supported = resolution_supported & 0x1F; + if (priv->edid_hres < 720) resolution_supported = resolution_supported & 0x01; + } + */ + + if (priv->video_native_resolution == GST_WFD_VIDEO_CEA_RESOLUTION) { + tcCEAResolution = + wfd_get_preferred_resolution (resolution_supported, + priv->cCEAResolution, priv->video_native_resolution, &priv->cMaxWidth, + &priv->cMaxHeight, &priv->cFramerate, &priv->cInterleaved); + GST_DEBUG + ("wfd negotiated resolution: %" G_GUINT64_FORMAT ", width: %d, height: %d, framerate: %d, interleaved: %d", + tcCEAResolution, priv->cMaxWidth, priv->cMaxHeight, priv->cFramerate, + priv->cInterleaved); + } else if (priv->video_native_resolution == GST_WFD_VIDEO_VESA_RESOLUTION) { + tcVESAResolution = + wfd_get_preferred_resolution (resolution_supported, + priv->cVESAResolution, priv->video_native_resolution, + &priv->cMaxWidth, &priv->cMaxHeight, &priv->cFramerate, + &priv->cInterleaved); + GST_DEBUG + ("wfd negotiated resolution: %" G_GUINT64_FORMAT ", width: %d, height: %d, framerate: %d, interleaved: %d", + tcVESAResolution, priv->cMaxWidth, priv->cMaxHeight, priv->cFramerate, + priv->cInterleaved); + } else if (priv->video_native_resolution == GST_WFD_VIDEO_HH_RESOLUTION) { + tcHHResolution = + wfd_get_preferred_resolution (resolution_supported, + priv->cHHResolution, priv->video_native_resolution, &priv->cMaxWidth, + &priv->cMaxHeight, &priv->cFramerate, &priv->cInterleaved); + GST_DEBUG + ("wfd negotiated resolution: %" G_GUINT64_FORMAT ", width: %d, height: %d, framerate: %d, interleaved: %d", + tcHHResolution, priv->cMaxWidth, priv->cMaxHeight, priv->cFramerate, + priv->cInterleaved); + } + + if (!_set_negotiated_resolution (client, priv->cMaxWidth, priv->cMaxHeight)) { + GST_ERROR_OBJECT (client, "Failed to set negotiated " + "resolution to media factory..."); + } + } + + if (!_set_negotiated_video_codec (client, priv->cvCodec)) { + GST_ERROR_OBJECT (client, "Failed to set negotiated " + "video format to media factory..."); + } + + wfd_res = + gst_wfd_message_set_preferred_video_format (msg, priv->cvCodec, + priv->video_native_resolution, GST_WFD_CEA_UNKNOWN, tcCEAResolution, + tcVESAResolution, tcHHResolution, tcProfile, tcLevel, priv->cvLatency, + priv->cMaxHeight, priv->cMaxWidth, priv->cmin_slice_size, + priv->cslice_enc_params, priv->cframe_rate_control); + + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to set preffered video formats..."); + goto error; + } + + if (priv->direct_streaming_supported) { + wfd_res = + gst_wfd_message_set_preferred_direct_video_format (msg, priv->cvCodec, + priv->video_native_resolution, GST_WFD_CEA_UNKNOWN, tcCEAResolution, + tcVESAResolution, tcHHResolution, tcProfile, tcLevel, priv->cvLatency, + priv->cMaxHeight, priv->cMaxWidth, priv->cmin_slice_size, + priv->cslice_enc_params, priv->cframe_rate_control); + + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to set preffered video formats for direct streaming..."); + goto error; + } + } + + /* set the preffered RTP ports for the WFD server */ + wfd_res = + gst_wfd_messge_set_preferred_rtp_ports (msg, GST_WFD_RTSP_TRANS_RTP, + GST_WFD_RTSP_PROFILE_AVP, GST_WFD_RTSP_LOWER_TRANS_UDP, + priv->crtp_port0, priv->crtp_port1); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set supported video formats on wfd message..."); + goto error; + } + + *data = gst_wfd_message_as_text (msg); + if (*data == NULL) { + GST_ERROR_OBJECT (client, "Failed to get wfd message as text..."); + goto error; + } else { + *len = strlen (*data); + } + } else if (msg_type == M4_DS_REQ_MSG) { + GstRTSPUrl *url = NULL; + + GstRTSPClient *parent_client = GST_RTSP_CLIENT_CAST (client); + GstRTSPConnection *connection = + gst_rtsp_client_get_connection (parent_client); + + /* Parameters for the preffered audio formats */ + GstWFDAudioFreq taudiofreq = GST_WFD_FREQ_UNKNOWN; + GstWFDAudioChannels taudiochannels = GST_WFD_CHANNEL_UNKNOWN; + + /* Parameters for the preffered video formats */ + guint64 tcCEAResolution = GST_WFD_CEA_UNKNOWN; + guint64 tcVESAResolution = GST_WFD_VESA_UNKNOWN; + guint64 tcHHResolution = GST_WFD_HH_UNKNOWN; + GstWFDVideoH264Profile tcProfile; + GstWFDVideoH264Level tcLevel; + guint64 resolution_supported = 0; + + url = gst_rtsp_connection_get_url (connection); + if (url == NULL) { + GST_ERROR_OBJECT (client, "Failed to get connection URL"); + return; + } + + /* create M4 for direct streaming request to be sent */ + wfd_res = gst_wfd_message_new (&msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to create wfd message..."); + goto error; + } + + wfd_res = gst_wfd_message_init (msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to init wfd message..."); + goto error; + } + + buf = g_string_new (""); + if (buf == NULL) + goto error; + + g_string_append_printf (buf, "rtsp://"); + + if (priv->host_address) { + g_string_append (buf, priv->host_address); + } else { + GST_ERROR_OBJECT (client, "Failed to get host address"); + if (buf) g_string_free (buf, TRUE); + goto error; + } + + g_string_append_printf (buf, "/wfd1.0/streamid=0"); + wfd_res = + gst_wfd_message_set_presentation_url (msg, g_string_free (buf, FALSE), + NULL); + + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to set presentation url"); + goto error; + } + + if (priv->cFreq & GST_WFD_FREQ_48000) + taudiofreq = GST_WFD_FREQ_48000; + else if (priv->cFreq & GST_WFD_FREQ_44100) + taudiofreq = GST_WFD_FREQ_44100; + priv->cFreq = taudiofreq; + + /* TODO-WFD: Currently only 2 channels is present */ + if (priv->cChanels & GST_WFD_CHANNEL_8) + taudiochannels = GST_WFD_CHANNEL_2; + else if (priv->cChanels & GST_WFD_CHANNEL_6) + taudiochannels = GST_WFD_CHANNEL_2; + else if (priv->cChanels & GST_WFD_CHANNEL_4) + taudiochannels = GST_WFD_CHANNEL_2; + else if (priv->cChanels & GST_WFD_CHANNEL_2) + taudiochannels = GST_WFD_CHANNEL_2; + priv->cChanels = taudiochannels; + + wfd_res = + gst_wfd_message_set_preferred_wfd2_audio_codec (msg, + priv->direct_detected_audio_codec, taudiofreq, + taudiochannels, priv->cBitwidth, priv->caLatency); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (priv, "Failed to set preffered audio formats for direct streaming..."); + goto error; + } + + /* Set the preffered video formats */ + priv->cProfile = tcProfile = GST_WFD_H264_BASE_PROFILE; + priv->cLevel = tcLevel = GST_WFD_H264_LEVEL_3_1; + + resolution_supported = priv->video_resolution_supported; + + /* TODO-WFD: Need to verify this logic + if(priv->edid_supported) { + if (priv->edid_hres < 1920) resolution_supported = resolution_supported & 0x8C7F; + if (priv->edid_hres < 1280) resolution_supported = resolution_supported & 0x1F; + if (priv->edid_hres < 720) resolution_supported = resolution_supported & 0x01; + } + */ + + if (priv->video_native_resolution == GST_WFD_VIDEO_CEA_RESOLUTION) { + tcCEAResolution = + wfd_get_preferred_resolution (resolution_supported, + priv->cCEAResolution, priv->video_native_resolution, &priv->cMaxWidth, + &priv->cMaxHeight, &priv->cFramerate, &priv->cInterleaved); + GST_DEBUG + ("wfd negotiated resolution: %" G_GUINT64_FORMAT ", width: %d, height: %d, framerate: %d, interleaved: %d", + tcCEAResolution, priv->cMaxWidth, priv->cMaxHeight, priv->cFramerate, + priv->cInterleaved); + } else if (priv->video_native_resolution == GST_WFD_VIDEO_VESA_RESOLUTION) { + tcVESAResolution = + wfd_get_preferred_resolution (resolution_supported, + priv->cVESAResolution, priv->video_native_resolution, + &priv->cMaxWidth, &priv->cMaxHeight, &priv->cFramerate, + &priv->cInterleaved); + GST_DEBUG + ("wfd negotiated resolution: %" G_GUINT64_FORMAT ", width: %d, height: %d, framerate: %d, interleaved: %d", + tcVESAResolution, priv->cMaxWidth, priv->cMaxHeight, priv->cFramerate, + priv->cInterleaved); + } else if (priv->video_native_resolution == GST_WFD_VIDEO_HH_RESOLUTION) { + tcHHResolution = + wfd_get_preferred_resolution (resolution_supported, + priv->cHHResolution, priv->video_native_resolution, &priv->cMaxWidth, + &priv->cMaxHeight, &priv->cFramerate, &priv->cInterleaved); + GST_DEBUG + ("wfd negotiated resolution: %" G_GUINT64_FORMAT ", width: %d, height: %d, framerate: %d, interleaved: %d", + tcHHResolution, priv->cMaxWidth, priv->cMaxHeight, priv->cFramerate, + priv->cInterleaved); + } + + wfd_res = + gst_wfd_message_set_preferred_direct_video_format (msg, + priv->direct_detected_video_codec, + priv->video_native_resolution, GST_WFD_CEA_UNKNOWN, tcCEAResolution, + tcVESAResolution, tcHHResolution, tcProfile, tcLevel, priv->cvLatency, + priv->cMaxHeight, priv->cMaxWidth, priv->cmin_slice_size, + priv->cslice_enc_params, priv->cframe_rate_control); + + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to set preffered video formats for direct streaming..."); + goto error; + } + + wfd_res = + gst_wfd_message_set_direct_streaming_mode (msg, TRUE); + + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to set preffered video formats for direct streaming..."); + goto error; + } + + *data = gst_wfd_message_as_text (msg); + if (*data == NULL) { + GST_ERROR_OBJECT (client, "Failed to get wfd message as text..."); + goto error; + } else { + *len = strlen (*data); + } + } else if (msg_type == TS_REQ_MSG) { + /* create transport switch request to be sent */ + wfd_res = gst_wfd_message_new (&msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to create wfd message..."); + goto error; + } + + wfd_res = gst_wfd_message_init (msg); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, "Failed to init wfd message..."); + goto error; + } + + /* set the preffered TCP ports for the WFD server */ + if (priv->ts_mode == WFD_TS_UDP) { + wfd_res = + gst_wfd_messge_set_preferred_rtp_ports (msg, GST_WFD_RTSP_TRANS_RTP, + GST_WFD_RTSP_PROFILE_AVP, GST_WFD_RTSP_LOWER_TRANS_UDP, priv->crtp_port0, priv->crtp_port1); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set preferred RTP ports on wfd message..."); + goto error; + } + } else { + wfd_res = + gst_wfd_messge_set_preferred_tcp_ports (msg, GST_WFD_RTSP_TRANS_RTP, + GST_WFD_RTSP_PROFILE_AVP, GST_WFD_RTSP_LOWER_TRANS_TCP, priv->crtp_port0_tcp, priv->crtp_port1_tcp); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set preferred TCP ports on wfd message..."); + goto error; + } + } + + wfd_res = + gst_wfd_message_set_buffer_length (msg, 200); + if (wfd_res != GST_WFD_OK) { + GST_ERROR_OBJECT (client, + "Failed to set preferred buffer length on wfd message..."); + goto error; + } + + *data = gst_wfd_message_as_text (msg); + if (*data == NULL) { + GST_ERROR_OBJECT (client, "Failed to get wfd message as text..."); + goto error; + } else { + gchar *append_data = NULL; + + g_signal_emit (client, + gst_rtsp_client_wfd_signals[SIGNAL_WFD_M4_REQ_MSG], 0, *data, + &append_data); + + if (append_data) { + g_free (*data); + *data = append_data; + } + *len = strlen (*data); + } + } else if (msg_type == M5_REQ_MSG) { + buf = g_string_new ("wfd_trigger_method: SETUP\r\n"); + if (buf == NULL) + goto error; + *len = buf->len; + *data = g_string_free (buf, FALSE); + } else if (msg_type == TEARDOWN_TRIGGER) { + buf = g_string_new ("wfd_trigger_method: TEARDOWN\r\n"); + if (buf == NULL) + goto error; + *len = buf->len; + *data = g_string_free (buf, FALSE); + } else if (msg_type == TEARDOWN_COUPLING_TRIGGER) { + buf = g_string_new ("wfd_trigger_method: TEARDOWN_COUPLING\r\n"); + if (buf == NULL) + goto error; + *len = buf->len; + *data = g_string_free (buf, FALSE); + } else if (msg_type == PLAY_TRIGGER) { + buf = g_string_new ("wfd_trigger_method: PLAY\r\n"); + if (buf == NULL) + goto error; + *len = buf->len; + *data = g_string_free (buf, FALSE); + } else if (msg_type == PAUSE_TRIGGER) { + buf = g_string_new ("wfd_trigger_method: PAUSE\r\n"); + if (buf == NULL) + goto error; + *len = buf->len; + *data = g_string_free (buf, FALSE); + } + + if (msg != NULL) + gst_wfd_message_free(msg); + + return; + +error: + + if (msg != NULL) + gst_wfd_message_free(msg); + + *data = NULL; + *len = 0; + + return; +} + +/** +* gst_prepare_request: +* @client: client object +* @request : requst message to be prepared +* @method : RTSP method of the request +* @url : url need to be in the request +* @message_type : WFD message type +* @trigger_type : trigger method to be used for M5 mainly +* +* Prepares request based on @method & @message_type +* +* Returns: a #GstRTSPResult. +*/ +GstRTSPResult +gst_prepare_request (GstRTSPWFDClient * client, GstRTSPMessage * request, + GstRTSPMethod method, gchar * url) +{ + GstRTSPResult res = GST_RTSP_OK; + gchar *str = NULL; + + GST_DEBUG_OBJECT (client, "Preparing request: %d", method); + + /* initialize the request */ + res = gst_rtsp_message_init_request (request, method, url); + + if (res < 0) { + GST_ERROR ("init request failed"); + return res; + } + + switch (method) { + /* Prepare OPTIONS request to send */ + case GST_RTSP_OPTIONS:{ + /* add wfd specific require filed "org.wfa.wfd1.0" */ + str = g_strdup ("org.wfa.wfd1.0"); + res = gst_rtsp_message_add_header (request, GST_RTSP_HDR_REQUIRE, str); + if (res < 0) { + GST_ERROR ("Failed to add header"); + g_free (str); + return res; + } + + g_free (str); + break; + } + + /* Prepare GET_PARAMETER request */ + case GST_RTSP_GET_PARAMETER:{ + gchar *msg = NULL; + guint msglen = 0; + + /* add content type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_TYPE, + "text/parameters"); + if (res < 0) { + GST_ERROR ("Failed to add header"); + return res; + } + + _set_wfd_message_body (client, M3_REQ_MSG, &msg, &msglen); + GST_DEBUG ("M3 server side message body: %s", msg); + + res = gst_rtsp_message_set_body (request, (guint8 *) msg, msglen); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to set body data to rtsp message..."); + goto error; + } + + g_free (msg); + break; + } + + /* Prepare SET_PARAMETER request */ + case GST_RTSP_SET_PARAMETER:{ + gchar *msg = NULL; + guint msglen = 0; + + /* add content type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_TYPE, + "text/parameters"); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp request..."); + goto error; + } + + _set_wfd_message_body (client, M4_REQ_MSG, &msg, &msglen); + GST_DEBUG ("M4 server side message body: %s", msg); + + res = gst_rtsp_message_set_body (request, (guint8 *) msg, msglen); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to set body data to rtsp message..."); + goto error; + } + + g_free (msg); + break; + } + + default:{ + } + } + + return res; + +error: + return GST_RTSP_ERROR; +} + +GstRTSPResult +prepare_trigger_request (GstRTSPWFDClient * client, GstRTSPMessage * request, + GstWFDTriggerType trigger_type, gchar * url) +{ + GstRTSPResult res = GST_RTSP_OK; + + /* initialize the request */ + res = gst_rtsp_message_init_request (request, GST_RTSP_SET_PARAMETER, url); + if (res < 0) { + GST_ERROR ("init request failed"); + return res; + } + + switch (trigger_type) { + case WFD_TRIGGER_SETUP:{ + gchar *msg; + guint msglen = 0; + GString *msglength; + + /* add content type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_TYPE, + "text/parameters"); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp request..."); + goto error; + } + + _set_wfd_message_body (client, M5_REQ_MSG, &msg, &msglen); + msglength = g_string_new (""); + g_string_append_printf (msglength, "%d", msglen); + GST_DEBUG ("M5 server side message body: %s", msg); + + /* add content-length type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_LENGTH, + g_string_free (msglength, FALSE)); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + res = gst_rtsp_message_set_body (request, (guint8 *) msg, msglen); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + g_free (msg); + break; + } + case WFD_TRIGGER_TEARDOWN:{ + gchar *msg; + guint msglen = 0; + GString *msglength; + + /* add content type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_TYPE, + "text/parameters"); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp request..."); + goto error; + } + + _set_wfd_message_body (client, TEARDOWN_TRIGGER, &msg, &msglen); + msglength = g_string_new (""); + g_string_append_printf (msglength, "%d", msglen); + GST_DEBUG ("Trigger TEARDOWN server side message body: %s", msg); + + /* add content-length type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_LENGTH, + g_string_free (msglength, FALSE)); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + res = gst_rtsp_message_set_body (request, (guint8 *) msg, msglen); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + g_free (msg); + break; + } + case WFD_TRIGGER_TEARDOWN_COUPLING:{ + gchar *msg; + guint msglen = 0; + GString *msglength; + + /* add content type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_TYPE, + "text/parameters"); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp request..."); + goto error; + } + + _set_wfd_message_body (client, TEARDOWN_COUPLING_TRIGGER, &msg, &msglen); + msglength = g_string_new (""); + g_string_append_printf (msglength, "%d", msglen); + GST_DEBUG ("Trigger TEARDOWN server side message body: %s", msg); + + /* add content-length type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_LENGTH, + g_string_free (msglength, FALSE)); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + res = gst_rtsp_message_set_body (request, (guint8 *) msg, msglen); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + g_free (msg); + break; + } + case WFD_TRIGGER_PLAY:{ + gchar *msg; + guint msglen = 0; + GString *msglength; + + /* add content type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_TYPE, + "text/parameters"); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp request..."); + goto error; + } + + _set_wfd_message_body (client, PLAY_TRIGGER, &msg, &msglen); + msglength = g_string_new (""); + g_string_append_printf (msglength, "%d", msglen); + GST_DEBUG ("Trigger PLAY server side message body: %s", msg); + + /* add content-length type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_LENGTH, + g_string_free (msglength, FALSE)); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + res = gst_rtsp_message_set_body (request, (guint8 *) msg, msglen); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + g_free (msg); + break; + } + case WFD_TRIGGER_PAUSE:{ + gchar *msg; + guint msglen = 0; + GString *msglength; + + /* add content type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_TYPE, + "text/parameters"); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp request..."); + goto error; + } + + _set_wfd_message_body (client, PAUSE_TRIGGER, &msg, &msglen); + msglength = g_string_new (""); + g_string_append_printf (msglength, "%d", msglen); + GST_DEBUG ("Trigger PAUSE server side message body: %s", msg); + + /* add content-length type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_LENGTH, + g_string_free (msglength, FALSE)); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + res = gst_rtsp_message_set_body (request, (guint8 *) msg, msglen); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + g_free (msg); + break; + } + /* TODO-WFD: implement to handle other trigger type */ + default:{ + } + } + + return res; + +error: + return res; +} + + +void +gst_send_request (GstRTSPWFDClient * client, GstRTSPSession * session, + GstRTSPMessage * request) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPClient *parent_client = GST_RTSP_CLIENT_CAST (client); + + /* remove any previous header */ + gst_rtsp_message_remove_header (request, GST_RTSP_HDR_SESSION, -1); + + /* add the new session header for new session ids */ + if (session) { + guint timeout; + const gchar *sessionid = NULL; + gchar *str; + + sessionid = gst_rtsp_session_get_sessionid (session); + GST_INFO_OBJECT (client, "Session id : %s", sessionid); + + timeout = gst_rtsp_session_get_timeout (session); + if (timeout != DEFAULT_WFD_TIMEOUT) + str = g_strdup_printf ("%s; timeout=%d", sessionid, timeout); + else + str = g_strdup (sessionid); + + gst_rtsp_message_take_header (request, GST_RTSP_HDR_SESSION, str); + } +#if 0 + if (gst_debug_category_get_threshold (rtsp_wfd_client_debug) >= GST_LEVEL_LOG) { + gst_rtsp_message_dump (request); + } +#endif + res = gst_rtsp_client_send_message (parent_client, session, request); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "gst_rtsp_client_send_message failed : %d", res); + } + + gst_rtsp_message_unset (request); +} + +/** +* prepare_response: +* @client: client object +* @request : requst message received +* @response : response to be prepare based on request +* @method : RTSP method +* +* prepare response to the request based on @method & @message_type +* +* Returns: a #GstRTSPResult. +*/ +GstRTSPResult +prepare_response (GstRTSPWFDClient * client, GstRTSPMessage * request, + GstRTSPMessage * response, GstRTSPMethod method) +{ + GstRTSPResult res = GST_RTSP_OK; + + switch (method) { + /* prepare OPTIONS response */ + case GST_RTSP_OPTIONS:{ + GstRTSPMethod options; + gchar *tmp = NULL; + gchar *str = NULL; + gchar *user_agent = NULL; + + options = GST_RTSP_OPTIONS | + GST_RTSP_PAUSE | + GST_RTSP_PLAY | + GST_RTSP_SETUP | + GST_RTSP_GET_PARAMETER | GST_RTSP_SET_PARAMETER | GST_RTSP_TEARDOWN; + + str = gst_rtsp_options_as_text (options); + + /*append WFD specific method */ + tmp = g_strdup (", org.wfa.wfd1.0"); + g_strlcat (str, tmp, strlen (tmp) + strlen (str) + 1); + + gst_rtsp_message_init_response (response, GST_RTSP_STS_OK, + gst_rtsp_status_as_text (GST_RTSP_STS_OK), request); + + gst_rtsp_message_add_header (response, GST_RTSP_HDR_PUBLIC, str); + g_free (str); + g_free (tmp); + str = NULL; + res = + gst_rtsp_message_get_header (request, GST_RTSP_HDR_USER_AGENT, + &user_agent, 0); + if (res == GST_RTSP_OK) { + gst_rtsp_message_add_header (response, GST_RTSP_HDR_USER_AGENT, + user_agent); + } else + res = GST_RTSP_OK; + break; + } + default: + GST_ERROR_OBJECT (client, "Unhandled method..."); + return GST_RTSP_EINVAL; + break; + } + + return res; +} + +static void +send_generic_wfd_response (GstRTSPWFDClient * client, GstRTSPStatusCode code, + GstRTSPContext * ctx) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPClient *parent_client = GST_RTSP_CLIENT_CAST (client); + + gst_rtsp_message_init_response (ctx->response, code, + gst_rtsp_status_as_text (code), ctx->request); + + res = gst_rtsp_client_send_message (parent_client, NULL, ctx->response); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "gst_rtsp_client_send_message failed : %d", res); + } +} + + +static GstRTSPResult +handle_M1_message (GstRTSPWFDClient * client) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPMessage request = { 0 }; + + res = gst_prepare_request (client, &request, GST_RTSP_OPTIONS, (gchar *) "*"); + if (GST_RTSP_OK != res) { + GST_ERROR_OBJECT (client, "Failed to prepare M1 request....\n"); + return res; + } + + GST_DEBUG_OBJECT (client, "Sending M1 request.. (OPTIONS request)"); + + gst_send_request (client, NULL, &request); + + return res; +} + +/** +* handle_M3_message: +* @client: client object +* +* Handles M3 WFD message. +* This API will send M3 message (GET_PARAMETER) to WFDSink to query supported formats by the WFDSink. +* After getting supported formats info, this API will set those values on WFDConfigMessage obj +* +* Returns: a #GstRTSPResult. +*/ +static GstRTSPResult +handle_M3_message (GstRTSPWFDClient * client) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPMessage request = { 0 }; + + res = gst_prepare_request (client, &request, GST_RTSP_GET_PARAMETER, + (gchar *) "rtsp://localhost/wfd1.0"); + + if (GST_RTSP_OK != res) { + GST_ERROR_OBJECT (client, "Failed to prepare M3 request....\n"); + goto error; + } + + GST_DEBUG_OBJECT (client, "Sending GET_PARAMETER request message (M3)..."); + + gst_send_request (client, NULL, &request); + + return res; + +error: + return res; +} + +static GstRTSPResult +handle_M4_message (GstRTSPWFDClient * client) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPMessage request = { 0 }; + + res = gst_prepare_request (client, &request, GST_RTSP_SET_PARAMETER, + (gchar *) "rtsp://localhost/wfd1.0"); + + if (GST_RTSP_OK != res) { + GST_ERROR_OBJECT (client, "Failed to prepare M4 request....\n"); + goto error; + } + + GST_DEBUG_OBJECT (client, "Sending SET_PARAMETER request message (M4)..."); + + gst_send_request (client, NULL, &request); + + return res; + +error: + return res; +} + +GstRTSPResult +gst_rtsp_wfd_client_trigger_request (GstRTSPWFDClient * client, + GstWFDTriggerType type) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPMessage request = { 0 }; + + res = prepare_trigger_request (client, &request, type, (gchar *) "rtsp://localhost/wfd1.0"); + + if (GST_RTSP_OK != res) { + GST_ERROR_OBJECT (client, "Failed to prepare M5 request....\n"); + goto error; + } + + GST_DEBUG_OBJECT (client, "Sending trigger request message...: %d", type); + + gst_send_request (client, NULL, &request); + + return res; + +error: + return res; +} + +GstRTSPResult +gst_rtsp_wfd_client_set_video_supported_resolution (GstRTSPWFDClient * client, + guint64 supported_reso) +{ + GstRTSPResult res = GST_RTSP_OK; - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + + g_return_val_if_fail (priv != NULL, GST_RTSP_EINVAL); + + priv->video_resolution_supported = supported_reso; + GST_DEBUG ("Resolution : %" G_GUINT64_FORMAT, supported_reso); + + return res; +} + +GstRTSPResult +gst_rtsp_wfd_client_set_video_native_resolution (GstRTSPWFDClient * client, + guint64 native_reso) +{ + GstRTSPResult res = GST_RTSP_OK; - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + + g_return_val_if_fail (priv != NULL, GST_RTSP_EINVAL); + + priv->video_native_resolution = native_reso; + GST_DEBUG ("Native Resolution : %" G_GUINT64_FORMAT, native_reso); + + return res; +} + +GstRTSPResult +gst_rtsp_wfd_client_set_video_codec (GstRTSPWFDClient * client, + guint8 video_codec) +{ + GstRTSPResult res = GST_RTSP_OK; - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + + g_return_val_if_fail (priv != NULL, GST_RTSP_EINVAL); + + priv->video_codec = video_codec; + GST_DEBUG ("Video codec : %d", video_codec); + + return res; +} + +GstRTSPResult +gst_rtsp_wfd_client_set_audio_codec (GstRTSPWFDClient * client, + guint8 audio_codec) +{ + GstRTSPResult res = GST_RTSP_OK; - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + + g_return_val_if_fail (priv != NULL, GST_RTSP_EINVAL); + + priv->audio_codec = audio_codec; + GST_DEBUG ("Audio codec : %d", audio_codec); + + return res; +} + +GstRTSPResult +gst_rtsp_wfd_client_set_coupling_mode (GstRTSPWFDClient * client, + gboolean coupling_mode) +{ + GstRTSPResult res = GST_RTSP_OK; - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + + g_return_val_if_fail (priv != NULL, GST_RTSP_EINVAL); + priv->coupling_mode = coupling_mode; + + return res; +} + + +static gboolean +wfd_ckeck_keep_alive_response (gpointer userdata) +{ + GstRTSPWFDClient *client = (GstRTSPWFDClient *) userdata; + GstRTSPWFDClientPrivate *priv = NULL; + if (!client) { + return FALSE; + } + - priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, GST_RTSP_EINVAL); + + if (priv->keep_alive_flag) { + return FALSE; + } + else { + GST_INFO ("%p: source error notification", client); + + g_signal_emit (client, + gst_rtsp_client_wfd_signals[SIGNAL_WFD_KEEP_ALIVE_FAIL], 0, NULL); + return FALSE; + } +} + +/*Sending keep_alive (M16) message. + Without calling gst_prepare_request function.*/ +static GstRTSPResult +handle_M16_message (GstRTSPWFDClient * client) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPMessage request = { 0 }; + + res = gst_rtsp_message_init_request (&request, GST_RTSP_GET_PARAMETER, + (gchar *) "rtsp://localhost/wfd1.0"); + + if (res < 0) { + GST_ERROR ("init request failed"); + return FALSE; + } + + gst_send_request (client, NULL, &request); + return GST_RTSP_OK; +} + +/*CHecking whether source has got response of any request. + * If yes, keep alive message is sent otherwise error message + * will be displayed.*/ +static gboolean +keep_alive_condition (gpointer userdata) +{ + GstRTSPWFDClient *client; + GstRTSPWFDClientPrivate *priv; + GstRTSPResult res; + client = (GstRTSPWFDClient *) userdata; + if (!client) { + return FALSE; + } - priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ priv = gst_rtsp_wfd_client_get_instance_private (client); + + g_return_val_if_fail (priv != NULL, FALSE); + + g_mutex_lock (&priv->keep_alive_lock); + if (!priv->keep_alive_flag) { + g_timeout_add (5000, wfd_ckeck_keep_alive_response, client); + } + else { + GST_DEBUG_OBJECT (client, "have received last keep alive message response"); + } + + GST_DEBUG ("sending keep alive message"); + res = handle_M16_message (client); + if (res == GST_RTSP_OK) { + priv->keep_alive_flag = FALSE; + } else { + GST_ERROR_OBJECT (client, "Failed to send Keep Alive Message"); + g_mutex_unlock (&priv->keep_alive_lock); + return FALSE; + } + + g_mutex_unlock (&priv->keep_alive_lock); + return TRUE; +} + +static void +wfd_set_keep_alive_condition (GstRTSPWFDClient * client) +{ + g_timeout_add ((DEFAULT_WFD_TIMEOUT - 5) * 1000, keep_alive_condition, + client); +} + +void +gst_rtsp_wfd_client_set_host_address (GstRTSPWFDClient * client, + const gchar * address) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + + g_return_if_fail (priv != NULL); + + if (priv->host_address) { + g_free (priv->host_address); + } + + priv->host_address = g_strdup (address); +} + +guint +gst_rtsp_wfd_client_get_audio_codec (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->caCodec; +} + +guint +gst_rtsp_wfd_client_get_audio_freq (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cFreq; +} + +guint +gst_rtsp_wfd_client_get_audio_channels (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cChanels; +} + +guint +gst_rtsp_wfd_client_get_audio_bit_width (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cBitwidth; +} + +guint +gst_rtsp_wfd_client_get_audio_latency (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->caLatency; +} + +guint +gst_rtsp_wfd_client_get_video_codec (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cvCodec; +} + +guint +gst_rtsp_wfd_client_get_video_native (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cNative; +} + +guint64 +gst_rtsp_wfd_client_get_video_native_resolution (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cNativeResolution; +} + +guint64 +gst_rtsp_wfd_client_get_video_cea_resolution (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cCEAResolution; +} + +guint64 +gst_rtsp_wfd_client_get_video_vesa_resolution (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cVESAResolution; +} + +guint64 +gst_rtsp_wfd_client_get_video_hh_resolution (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cHHResolution; +} + +guint +gst_rtsp_wfd_client_get_video_profile (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cProfile; +} + +guint +gst_rtsp_wfd_client_get_video_level (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cLevel; +} + +guint +gst_rtsp_wfd_client_get_video_latency (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cvLatency; +} + +guint32 +gst_rtsp_wfd_client_get_video_max_height (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cMaxHeight; +} + +guint32 +gst_rtsp_wfd_client_get_video_max_width (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cMaxWidth; +} + +guint32 +gst_rtsp_wfd_client_get_video_framerate (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cFramerate; +} + +guint32 +gst_rtsp_wfd_client_get_video_min_slice_size (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cmin_slice_size; +} + +guint32 +gst_rtsp_wfd_client_get_video_slice_enc_params (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cslice_enc_params; +} + +guint +gst_rtsp_wfd_client_get_video_framerate_control (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->cframe_rate_control; +} + +guint32 +gst_rtsp_wfd_client_get_rtp_port0 (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->crtp_port0; +} + +guint32 +gst_rtsp_wfd_client_get_rtp_port1 (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->crtp_port1; +} + +gboolean +gst_rtsp_wfd_client_get_edid_supported (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->edid_supported; +} + +guint32 +gst_rtsp_wfd_client_get_edid_hresolution (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->edid_hres; +} + +guint32 +gst_rtsp_wfd_client_get_edid_vresolution (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->edid_vres; +} + +gboolean +gst_rtsp_wfd_client_get_protection_enabled (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->protection_enabled; +} + +gboolean +gst_rtsp_wfd_client_get_coupling_mode (GstRTSPWFDClient * client) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, 0); + + return priv->coupling_mode; +} + +void +gst_rtsp_wfd_client_set_audio_freq (GstRTSPWFDClient * client, guint freq) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cFreq = freq; +} + +void +gst_rtsp_wfd_client_set_edid_supported (GstRTSPWFDClient * client, + gboolean supported) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->edid_supported = supported; +} + +void +gst_rtsp_wfd_client_set_edid_hresolution (GstRTSPWFDClient * client, + guint32 reso) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->edid_hres = reso; +} + +void +gst_rtsp_wfd_client_set_edid_vresolution (GstRTSPWFDClient * client, + guint32 reso) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->edid_vres = reso; +} + +void +gst_rtsp_wfd_client_set_protection_enabled (GstRTSPWFDClient * client, + gboolean enable) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->protection_enabled = enable; +} + +void +gst_rtsp_wfd_client_set_hdcp_version (GstRTSPWFDClient * client, + GstWFDHDCPProtection version) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->hdcp_version = version; +} + +void +gst_rtsp_wfd_client_set_hdcp_port (GstRTSPWFDClient * client, guint32 port) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->hdcp_tcpport = port; +} + +void +gst_rtsp_wfd_client_set_keep_alive_flag (GstRTSPWFDClient * client, + gboolean flag) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + g_mutex_lock (&priv->keep_alive_lock); + if (priv->keep_alive_flag == !(flag)) + priv->keep_alive_flag = flag; + g_mutex_unlock (&priv->keep_alive_lock); +} + +void +gst_rtsp_wfd_client_set_aud_codec (GstRTSPWFDClient * client, guint acodec) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->caCodec = acodec; +} + +void +gst_rtsp_wfd_client_set_audio_channels (GstRTSPWFDClient * client, + guint channels) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cChanels = channels; +} + +void +gst_rtsp_wfd_client_set_audio_bit_width (GstRTSPWFDClient * client, + guint bwidth) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cBitwidth = bwidth; +} + +void +gst_rtsp_wfd_client_set_audio_latency (GstRTSPWFDClient * client, guint latency) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->caLatency = latency; +} + +void +gst_rtsp_wfd_client_set_vid_codec (GstRTSPWFDClient * client, guint vcodec) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cvCodec = vcodec; +} + +void +gst_rtsp_wfd_client_set_video_native (GstRTSPWFDClient * client, guint native) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cNative = native; +} + +void +gst_rtsp_wfd_client_set_vid_native_resolution (GstRTSPWFDClient * client, + guint64 res) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cNativeResolution = res; +} + +void +gst_rtsp_wfd_client_set_video_cea_resolution (GstRTSPWFDClient * client, + guint64 res) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cCEAResolution = res; +} + +void +gst_rtsp_wfd_client_set_video_vesa_resolution (GstRTSPWFDClient * client, + guint64 res) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cVESAResolution = res; +} + +void +gst_rtsp_wfd_client_set_video_hh_resolution (GstRTSPWFDClient * client, + guint64 res) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cHHResolution = res; +} + +void +gst_rtsp_wfd_client_set_video_profile (GstRTSPWFDClient * client, guint profile) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cProfile = profile; +} + +void +gst_rtsp_wfd_client_set_video_level (GstRTSPWFDClient * client, guint level) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cLevel = level; +} + +void +gst_rtsp_wfd_client_set_video_latency (GstRTSPWFDClient * client, guint latency) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cvLatency = latency; +} + +void +gst_rtsp_wfd_client_set_video_max_height (GstRTSPWFDClient * client, + guint32 height) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cMaxHeight = height; +} + +void +gst_rtsp_wfd_client_set_video_max_width (GstRTSPWFDClient * client, + guint32 width) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cMaxWidth = width; +} + +void +gst_rtsp_wfd_client_set_video_framerate (GstRTSPWFDClient * client, + guint32 framerate) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cFramerate = framerate; +} + +void +gst_rtsp_wfd_client_set_video_min_slice_size (GstRTSPWFDClient * client, + guint32 slice_size) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cmin_slice_size = slice_size; +} + +void +gst_rtsp_wfd_client_set_video_slice_enc_params (GstRTSPWFDClient * client, + guint32 enc_params) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cslice_enc_params = enc_params; +} + +void +gst_rtsp_wfd_client_set_video_framerate_control (GstRTSPWFDClient * client, + guint framerate) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->cframe_rate_control = framerate; +} + +void +gst_rtsp_wfd_client_set_rtp_port0 (GstRTSPWFDClient * client, guint32 port) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->crtp_port0 = port; +} + +void +gst_rtsp_wfd_client_set_rtp_port1 (GstRTSPWFDClient * client, guint32 port) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->crtp_port1 = port; +} + +void +gst_rtsp_wfd_client_set_wfd2_supported (GstRTSPWFDClient *client, + gint flag) +{ - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_if_fail (priv != NULL); + + priv->wfd2_supported = flag; +} + +static void +direct_stream_end_cb (GstRTSPMediaFactoryWFD *factory, void *user_data) +{ + GstRTSPWFDClient *client = GST_RTSP_WFD_CLIENT_CAST (user_data); - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + GstRTSPResult res = GST_RTSP_OK; + + priv->direct_streaming_state = 0; + res = handle_M4_message (client); + + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to send message for direct streaming"); + } +} + +GstRTSPResult +gst_rtsp_wfd_client_set_direct_streaming(GstRTSPWFDClient * client, + gint direct_streaming, gchar *urisrc) +{ + GstRTSPClient *parent_client = GST_RTSP_CLIENT_CAST (client); - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + GstRTSPResult res = GST_RTSP_OK; + + GstRTSPMediaFactory *factory = NULL; + GstRTSPMountPoints *mount_points = NULL; + gchar *path = NULL; + gint matched = 0; + + if (priv->direct_streaming_supported == FALSE) { + GST_ERROR_OBJECT (client, "Direct streaming not supported by client"); + return GST_RTSP_ERROR; + } + + if (priv->direct_streaming_state == direct_streaming) { + GST_DEBUG_OBJECT (client, "Direct streaming state not changed"); + return res; + } + + if (!(mount_points = gst_rtsp_client_get_mount_points (parent_client))) { + res = GST_RTSP_ERROR; + GST_ERROR_OBJECT (client, "Failed to set direct streaing: no mount points..."); + goto no_mount_points; + } + + path = g_strdup(WFD_MOUNT_POINT); + if (!path) { + res = GST_RTSP_ERROR; + GST_ERROR_OBJECT (client, "Failed to set direct streaing: no path..."); + goto no_path; + } + + if (!(factory = gst_rtsp_mount_points_match (mount_points, + path, &matched))) { + GST_ERROR_OBJECT (client, "Failed to set direct streaing: no factory..."); + res = GST_RTSP_ERROR; + goto no_factory; + } + + if (direct_streaming) { + res = gst_rtsp_media_factory_wfd_uri_type_find (factory, + urisrc, &priv->direct_detected_video_codec, + &priv->direct_detected_audio_codec); + + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to create direct streaming pipeline"); + goto no_pipe; + } + } + + if (!(priv->direct_detected_video_codec & GST_WFD_VIDEO_H264)) { + GST_ERROR_OBJECT (client, "Detected video codec not supported"); + res = GST_RTSP_ERROR; + goto no_pipe; + } + + if (!(priv->direct_detected_audio_codec & GST_WFD_AUDIO_AAC || + priv->direct_detected_audio_codec & GST_WFD_AUDIO_LPCM || + priv->direct_detected_audio_codec & GST_WFD_AUDIO_AC3)) { + GST_ERROR_OBJECT (client, "Detected audio codec not supported"); + res = GST_RTSP_ERROR; + goto no_pipe; + } + + g_signal_connect_object (GST_RTSP_MEDIA_FACTORY_WFD_CAST (factory), "direct-stream-end", + G_CALLBACK (direct_stream_end_cb), client, 0); + + res = gst_rtsp_media_factory_wfd_set_direct_streaming (factory, + direct_streaming, urisrc); + + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to create direct streaming pipeline"); + goto no_pipe; + } + + if (direct_streaming) { + res = handle_M4_direct_streaming_message (client); + + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to send message for direct streaming"); + goto no_pipe; + } + } + + priv->direct_streaming_state = direct_streaming; + +no_pipe: + g_object_unref(factory); +no_factory: + g_free(path); +no_path: + g_object_unref(mount_points); +no_mount_points: + return res; +} + +/** +* prepare_direct_streaming_request: +* @client: client object +* @request : requst message to be prepared +* @url : url need to be in the request +* +* Prepares request based on @method & @message_type +* +* Returns: a #GstRTSPResult. +*/ +static GstRTSPResult +prepare_direct_streaming_request (GstRTSPWFDClient * client, GstRTSPMessage * request) +{ + GstRTSPResult res = GST_RTSP_OK; + gchar *msg = NULL; + guint msglen = 0; + GString *msglength; + + GST_DEBUG_OBJECT (client, "Preparing request for direct streaming"); + + /* initialize the request */ + res = gst_rtsp_message_init_request (request, GST_RTSP_SET_PARAMETER, + (gchar *) "rtsp://localhost/wfd1.0"); + if (res < 0) { + GST_ERROR ("init request failed"); + return res; + } + + /* add content type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_TYPE, + "text/parameters"); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp request..."); + goto error; + } + + _set_wfd_message_body (client, M4_DS_REQ_MSG, &msg, &msglen); + msglength = g_string_new (""); + g_string_append_printf (msglength, "%d", msglen); + GST_DEBUG ("M4 for direct streaming server side message body: %s", msg); + + /* add content-length type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_LENGTH, + g_string_free (msglength, FALSE)); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + res = gst_rtsp_message_set_body (request, (guint8 *) msg, msglen); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + g_free (msg); + + return res; + +error: + return GST_RTSP_ERROR; +} + +static GstRTSPResult +handle_M4_direct_streaming_message (GstRTSPWFDClient * client) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPMessage request = { 0 }; + + res = prepare_direct_streaming_request (client, &request); + if (GST_RTSP_OK != res) { + GST_ERROR_OBJECT (client, "Failed to prepare M4 request....\n"); + goto error; + } + + GST_DEBUG_OBJECT (client, "Sending SET_PARAMETER request message for direct streaming (M4)..."); + + gst_send_request (client, NULL, &request); + + return res; + +error: + return res; +} + +/** +* prepare_transport_switch_request: +* @client: client object +* @request : requst message to be prepared +* @url : url need to be in the request +* +* Prepares request based on @method & @message_type +* +* Returns: a #GstRTSPResult. +*/ +static GstRTSPResult +prepare_transport_switch_request (GstRTSPWFDClient * client, GstRTSPMessage * request) +{ + GstRTSPResult res = GST_RTSP_OK; + gchar *url = NULL; + gchar *msg = NULL; + guint msglen = 0; + GString *msglength; + + GstRTSPMethod method = GST_RTSP_SET_PARAMETER; + + url = g_strdup ("rtsp://localhost/wfd1.0"); + if (!url) + return GST_RTSP_ERROR; + + GST_DEBUG_OBJECT (client, "Preparing request for transport switch"); + + /* initialize the request */ + res = gst_rtsp_message_init_request (request, method, url); + g_free (url); + if (res < 0) { + GST_ERROR ("init request failed"); + return res; + } + + /* add content type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_TYPE, + "text/parameters"); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp request..."); + goto error; + } + + _set_wfd_message_body (client, TS_REQ_MSG, &msg, &msglen); + msglength = g_string_new (""); + g_string_append_printf (msglength, "%d", msglen); + GST_DEBUG ("Transport switch server side message body: %s", msg); + + /* add content-length type */ + res = + gst_rtsp_message_add_header (request, GST_RTSP_HDR_CONTENT_LENGTH, + g_string_free (msglength, FALSE)); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + res = gst_rtsp_message_set_body (request, (guint8 *) msg, msglen); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (client, "Failed to add header to rtsp message..."); + goto error; + } + + g_free (msg); + + return res; + +error: + return GST_RTSP_ERROR; +} + +GstRTSPResult +gst_rtsp_wfd_client_switch_to_udp (GstRTSPWFDClient * client) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPMessage request = { 0 }; + GList *tl = NULL; + GPtrArray *ta = NULL; + + if (client->priv->ts_mode == WFD_TS_UDP) { + GST_ERROR_OBJECT (client, "Transport already UDP"); + return res; + } + + ta = g_ptr_array_new(); + + tl = gst_rtsp_stream_transport_filter (client->priv->stats.stream, NULL, NULL); + client->priv->transports = tl; + g_ptr_array_add (ta, tl->data); + + client->priv->ts_mode = WFD_TS_UDP; + res = prepare_transport_switch_request (client, &request); + if (GST_RTSP_OK != res) { + GST_ERROR_OBJECT (client, "Failed to prepare transport switch request....\n"); + goto error; + } + + GST_DEBUG_OBJECT (client, "Sending SET_PARAMETER request message for transport switch..."); + + gst_send_request (client, NULL, &request); + + gst_rtsp_media_set_state (client->priv->media, GST_STATE_PAUSED, ta); + + g_ptr_array_free (ta, FALSE); + + return res; + +error: + g_ptr_array_free (ta, FALSE); + return res; +} + +GstRTSPResult +gst_rtsp_wfd_client_switch_to_tcp (GstRTSPWFDClient * client) +{ + GstRTSPResult res = GST_RTSP_OK; + GstRTSPMessage request = { 0 }; + GList *tl = NULL; + GPtrArray *ta = NULL; + + ta = g_ptr_array_new(); + + tl = gst_rtsp_stream_transport_filter (client->priv->stats.stream, NULL, NULL); + client->priv->transports = tl; + g_ptr_array_add (ta, tl->data); + + if (client->priv->ts_mode == WFD_TS_TCP) { + GST_ERROR_OBJECT (client, "Transport already TCP"); + return res; + } + + client->priv->ts_mode = WFD_TS_TCP; + res = prepare_transport_switch_request (client, &request); + if (GST_RTSP_OK != res) { + GST_ERROR_OBJECT (client, "Failed to prepare transport switch request....\n"); + goto error; + } + + GST_DEBUG_OBJECT (client, "Sending SET_PARAMETER request message for transport switch..."); + + gst_send_request (client, NULL, &request); + + gst_rtsp_media_set_state (client->priv->media, GST_STATE_PAUSED, ta); + + g_ptr_array_free (ta, FALSE); + + return res; + +error: + g_ptr_array_free (ta, FALSE); + return res; +} +gchar * gst_rtsp_wfd_client_get_sink_user_agent (GstRTSPWFDClient * client) +{ + char *str = NULL; - GstRTSPWFDClientPrivate *priv = GST_RTSP_WFD_CLIENT_GET_PRIVATE (client); ++ GstRTSPWFDClientPrivate *priv = gst_rtsp_wfd_client_get_instance_private (client); + g_return_val_if_fail (priv != NULL, NULL); + + if (priv->sink_user_agent != NULL) + str = g_strdup (priv->sink_user_agent); + + return str; +} diff --cc subprojects/gst-rtsp-server/gst/rtsp-server/rtsp-media-factory-wfd.c index 277fb6819b,0000000000..663b4c261b mode 100644,000000..100644 --- a/subprojects/gst-rtsp-server/gst/rtsp-server/rtsp-media-factory-wfd.c +++ b/subprojects/gst-rtsp-server/gst/rtsp-server/rtsp-media-factory-wfd.c @@@ -1,2086 -1,0 +1,2081 @@@ +/* GStreamer + * Copyright (C) 2015 Samsung Electronics Hyunjun Ko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ +/* + * SECTION:rtsp-media-factory + * @short_description: A factory for media pipelines + * @see_also: #GstRTSPMountPoints, #GstRTSPMedia + * + * The #GstRTSPMediaFactoryWFD is responsible for creating or recycling + * #GstRTSPMedia objects based on the passed URL. + * + * The default implementation of the object can create #GstRTSPMedia objects + * containing a pipeline created from a launch description set with + * gst_rtsp_media_factory_wfd_set_launch(). + * + * Media from a factory can be shared by setting the shared flag with + * gst_rtsp_media_factory_wfd_set_shared(). When a factory is shared, + * gst_rtsp_media_factory_wfd_construct() will return the same #GstRTSPMedia when + * the url matches. + * + * Last reviewed on 2013-07-11 (1.0.0) + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include + +#include "rtsp-media-factory-wfd.h" +#include "gstwfdmessage.h" + - #define GST_RTSP_MEDIA_FACTORY_WFD_GET_PRIVATE(obj) \ - (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_RTSP_MEDIA_FACTORY_WFD, GstRTSPMediaFactoryWFDPrivate)) - +#define GST_RTSP_MEDIA_FACTORY_WFD_GET_LOCK(f) (&(GST_RTSP_MEDIA_FACTORY_WFD_CAST(f)->priv->lock)) +#define GST_RTSP_MEDIA_FACTORY_WFD_LOCK(f) (g_mutex_lock(GST_RTSP_MEDIA_FACTORY_WFD_GET_LOCK(f))) +#define GST_RTSP_MEDIA_FACTORY_WFD_UNLOCK(f) (g_mutex_unlock(GST_RTSP_MEDIA_FACTORY_WFD_GET_LOCK(f))) + +typedef struct _GstRTPSMediaWFDTypeFindResult GstRTPSMediaWFDTypeFindResult; + +struct _GstRTPSMediaWFDTypeFindResult{ + gint h264_found; + gint aac_found; + gint ac3_found; + GstElementFactory *demux_fact; + GstElementFactory *src_fact; +}; + +typedef struct _GstRTSPMediaWFDDirectPipelineData GstRTSPMediaWFDDirectPipelineData; + +struct _GstRTSPMediaWFDDirectPipelineData { + GstBin *pipeline; + GstElement *ap; + GstElement *vp; + GstElement *aq; + GstElement *vq; + GstElement *tsmux; + GstElement *mux_fs; + gchar *uri; +}; + + +struct _GstRTSPMediaFactoryWFDPrivate +{ + GMutex lock; + GstRTSPPermissions *permissions; + gchar *launch; + gboolean shared; + GstRTSPLowerTrans protocols; + guint buffer_size; + guint mtu_size; + + guint8 videosrc_type; + guint8 video_codec; + gchar *video_encoder; + guint video_bitrate; + guint video_width; + guint video_height; + guint video_framerate; + guint video_enc_skip_inbuf_value; + GstElement *video_queue; + GstBin *video_srcbin; + + GstElement *venc; + guint decide_udp_bitrate[21]; + guint min_udp_bitrate; + guint max_udp_bitrate; + gboolean decided_udp_bitrate; + + gchar *audio_device; + gchar *audio_encoder_aac; + gchar *audio_encoder_ac3; + guint8 audio_codec; + guint64 audio_latency_time; + guint64 audio_buffer_time; + gboolean audio_do_timestamp; + guint8 audio_channels; + guint8 audio_freq; + guint8 audio_bitrate; + GstElement *audio_queue; + GstBin *audio_srcbin; + + GMutex direct_lock; + GCond direct_cond; + GType decodebin_type; + GstBin *discover_pipeline; + GstRTPSMediaWFDTypeFindResult res; + GstRTSPMediaWFDDirectPipelineData *direct_pipe; + GstBin *stream_bin; + GstElement *mux; + GstElement *mux_queue; + GstElement *pay; + GstElement *stub_fs; + GMainLoop *discover_loop; + + guint64 video_resolution_supported; + + gboolean dump_ts; +}; + +#define DEFAULT_LAUNCH NULL +#define DEFAULT_SHARED FALSE +#define DEFAULT_PROTOCOLS GST_RTSP_LOWER_TRANS_UDP | GST_RTSP_LOWER_TRANS_UDP_MCAST | \ + GST_RTSP_LOWER_TRANS_TCP +#define DEFAULT_BUFFER_SIZE 0x80000 + +enum +{ + PROP_0, + PROP_LAUNCH, + PROP_SHARED, + PROP_SUSPEND_MODE, + PROP_EOS_SHUTDOWN, + PROP_PROTOCOLS, + PROP_BUFFER_SIZE, + PROP_LAST +}; + +enum +{ + SIGNAL_MEDIA_CONSTRUCTED, + SIGNAL_MEDIA_CONFIGURE, + SIGNAL_DIRECT_STREAMING_END, + SIGNAL_LAST +}; + +GST_DEBUG_CATEGORY_STATIC (rtsp_media_wfd_debug); +#define GST_CAT_DEFAULT rtsp_media_wfd_debug + +static guint gst_rtsp_media_factory_wfd_signals[SIGNAL_LAST] = { 0 }; + +static void gst_rtsp_media_factory_wfd_get_property (GObject * object, + guint propid, GValue * value, GParamSpec * pspec); +static void gst_rtsp_media_factory_wfd_set_property (GObject * object, + guint propid, const GValue * value, GParamSpec * pspec); + +static void gst_rtsp_media_factory_wfd_finalize (GObject * obj); + + +static GstElement *rtsp_media_factory_wfd_create_element (GstRTSPMediaFactory * + factory, const GstRTSPUrl * url); +static GstRTSPMedia *rtsp_media_factory_wfd_construct (GstRTSPMediaFactory * + factory, const GstRTSPUrl * url); + +static void _config_bitrate (GstRTSPMediaFactoryWFD * factory); + - G_DEFINE_TYPE (GstRTSPMediaFactoryWFD, gst_rtsp_media_factory_wfd, ++G_DEFINE_TYPE_WITH_PRIVATE (GstRTSPMediaFactoryWFD, gst_rtsp_media_factory_wfd, + GST_TYPE_RTSP_MEDIA_FACTORY); + +static void +gst_rtsp_media_factory_wfd_class_init (GstRTSPMediaFactoryWFDClass * klass) +{ + GObjectClass *gobject_class; + GstRTSPMediaFactoryClass *factory_class; + - g_type_class_add_private (klass, sizeof (GstRTSPMediaFactoryWFDPrivate)); - + gobject_class = G_OBJECT_CLASS (klass); + factory_class = GST_RTSP_MEDIA_FACTORY_CLASS (klass); + + gobject_class->get_property = gst_rtsp_media_factory_wfd_get_property; + gobject_class->set_property = gst_rtsp_media_factory_wfd_set_property; + gobject_class->finalize = gst_rtsp_media_factory_wfd_finalize; + + gst_rtsp_media_factory_wfd_signals[SIGNAL_DIRECT_STREAMING_END] = + g_signal_new ("direct-stream-end", G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTSPMediaFactoryWFDClass, + direct_stream_end), NULL, NULL, g_cclosure_marshal_generic, + G_TYPE_NONE, 0, G_TYPE_NONE); + + factory_class->construct = rtsp_media_factory_wfd_construct; + factory_class->create_element = rtsp_media_factory_wfd_create_element; + + GST_DEBUG_CATEGORY_INIT (rtsp_media_wfd_debug, "rtspmediafactorywfd", 0, + "GstRTSPMediaFactoryWFD"); +} + +void +gst_rtsp_media_factory_wfd_set (GstRTSPMediaFactoryWFD * factory, + guint8 videosrc_type, gchar * audio_device, guint64 audio_latency_time, + guint64 audio_buffer_time, gboolean audio_do_timestamp, guint mtu_size) +{ + GstRTSPMediaFactoryWFDPrivate *priv = - GST_RTSP_MEDIA_FACTORY_WFD_GET_PRIVATE (factory); ++ gst_rtsp_media_factory_wfd_get_instance_private (factory); + factory->priv = priv; + + priv->videosrc_type = videosrc_type; + priv->audio_device = audio_device; + priv->audio_latency_time = audio_latency_time; + priv->audio_buffer_time = audio_buffer_time; + priv->audio_do_timestamp = audio_do_timestamp; + priv->mtu_size = mtu_size; +} + +void +gst_rtsp_media_factory_wfd_set_encoders (GstRTSPMediaFactoryWFD * factory, + gchar * video_encoder, gchar * audio_encoder_aac, gchar * audio_encoder_ac3) +{ + GstRTSPMediaFactoryWFDPrivate *priv = - GST_RTSP_MEDIA_FACTORY_WFD_GET_PRIVATE (factory); ++ gst_rtsp_media_factory_wfd_get_instance_private (factory); + factory->priv = priv; + + priv->video_encoder = video_encoder; + priv->audio_encoder_aac = audio_encoder_aac; + priv->audio_encoder_ac3 = audio_encoder_ac3; +} + +void +gst_rtsp_media_factory_wfd_set_dump_ts (GstRTSPMediaFactoryWFD * factory, + gboolean dump_ts) +{ + GstRTSPMediaFactoryWFDPrivate *priv = - GST_RTSP_MEDIA_FACTORY_WFD_GET_PRIVATE (factory); ++ gst_rtsp_media_factory_wfd_get_instance_private (factory); + factory->priv = priv; + + priv->dump_ts = dump_ts; +} + +void +gst_rtsp_media_factory_wfd_set_negotiated_resolution (GstRTSPMediaFactory * + factory, guint32 width, guint32 height) +{ + GstRTSPMediaFactoryWFD *factory_wfd = GST_RTSP_MEDIA_FACTORY_WFD (factory); + GstRTSPMediaFactoryWFDPrivate *priv = factory_wfd->priv; + + priv->video_width = width; + priv->video_height = height; + _config_bitrate (factory_wfd); +} + +void +gst_rtsp_media_factory_wfd_set_audio_codec (GstRTSPMediaFactory * factory, + guint audio_codec) +{ + GstRTSPMediaFactoryWFD *factory_wfd = GST_RTSP_MEDIA_FACTORY_WFD (factory); + GstRTSPMediaFactoryWFDPrivate *priv = factory_wfd->priv; + + priv->audio_codec = audio_codec; +} + +void +gst_rtsp_media_factory_wfd_set_video_codec (GstRTSPMediaFactory * factory, + guint video_codec) +{ + GstRTSPMediaFactoryWFD *factory_wfd = GST_RTSP_MEDIA_FACTORY_WFD (factory); + GstRTSPMediaFactoryWFDPrivate *priv = factory_wfd->priv; + + priv->video_codec = video_codec; +} + +static void +_config_bitrate (GstRTSPMediaFactoryWFD * factory) +{ + GstRTSPMediaFactoryWFDPrivate *priv = factory->priv; + + if (priv->decided_udp_bitrate) { + priv->video_bitrate = priv->decide_udp_bitrate[0]; + priv->min_udp_bitrate = priv->decide_udp_bitrate[1]; + priv->max_udp_bitrate = priv->decide_udp_bitrate[2]; + + if ((priv->video_width * priv->video_height) >= (1920 * 1080)) { + priv->video_bitrate = priv->decide_udp_bitrate[3]; + priv->min_udp_bitrate = priv->decide_udp_bitrate[4]; + priv->max_udp_bitrate = priv->decide_udp_bitrate[5]; + } else if ((priv->video_width * priv->video_height) >= (1280 * 720)) { + priv->video_bitrate = priv->decide_udp_bitrate[6]; + priv->min_udp_bitrate = priv->decide_udp_bitrate[7]; + priv->max_udp_bitrate = priv->decide_udp_bitrate[8]; + } else if ((priv->video_width * priv->video_height) >= (960 * 540)) { + priv->video_bitrate = priv->decide_udp_bitrate[9]; + priv->min_udp_bitrate = priv->decide_udp_bitrate[10]; + priv->max_udp_bitrate = priv->decide_udp_bitrate[11]; + } else if ((priv->video_width * priv->video_height) >= (854 * 480)) { + priv->video_bitrate = priv->decide_udp_bitrate[12]; + priv->min_udp_bitrate = priv->decide_udp_bitrate[13]; + priv->max_udp_bitrate = priv->decide_udp_bitrate[14]; + } else if ((priv->video_width * priv->video_height) >= (640 * 480)) { + priv->video_bitrate = priv->decide_udp_bitrate[15]; + priv->min_udp_bitrate = priv->decide_udp_bitrate[16]; + priv->max_udp_bitrate = priv->decide_udp_bitrate[17]; + } + } +} + +void +gst_rtsp_media_factory_wfd_set_venc_bitrate (GstRTSPMediaFactory * factory, + gint bitrate) +{ + GstRTSPMediaFactoryWFD *factory_wfd = GST_RTSP_MEDIA_FACTORY_WFD (factory); + GstRTSPMediaFactoryWFDPrivate *priv = factory_wfd->priv; + + g_object_set (priv->venc, "target-bitrate", bitrate, NULL); + priv->video_bitrate = (guint) bitrate; +} + +void +gst_rtsp_media_factory_wfd_get_venc_bitrate (GstRTSPMediaFactory * factory, + gint * bitrate) +{ + int cur_bitrate = 0; + + GstRTSPMediaFactoryWFD *factory_wfd = GST_RTSP_MEDIA_FACTORY_WFD (factory); + GstRTSPMediaFactoryWFDPrivate *priv = factory_wfd->priv; + + g_object_get (priv->venc, "target-bitrate", &cur_bitrate, NULL); + + if (cur_bitrate == 0) { + *bitrate = priv->video_bitrate; + } else { + *bitrate = (gint) cur_bitrate; + } +} + +void +gst_rtsp_media_factory_wfd_get_config_bitrate (GstRTSPMediaFactory * factory, + guint32 * min, guint32 * max) +{ + GstRTSPMediaFactoryWFD *factory_wfd = GST_RTSP_MEDIA_FACTORY_WFD (factory); + GstRTSPMediaFactoryWFDPrivate *priv = factory_wfd->priv; + + *min = priv->min_udp_bitrate; + *max = priv->max_udp_bitrate; +} + +void +gst_rtsp_media_factory_wfd_set_config_bitrate (GstRTSPMediaFactoryWFD * factory, + guint * config_bitrate) +{ + GstRTSPMediaFactoryWFDPrivate *priv = factory->priv; + + gint idx = 0; + for (idx = 0; idx < 21; idx++) { + priv->decide_udp_bitrate[idx] = config_bitrate[idx]; + } + priv->decided_udp_bitrate = TRUE; + + _config_bitrate (factory); +} + +static void +gst_rtsp_media_factory_wfd_init (GstRTSPMediaFactoryWFD * factory) +{ + GstRTSPMediaFactoryWFDPrivate *priv = - GST_RTSP_MEDIA_FACTORY_WFD_GET_PRIVATE (factory); ++ gst_rtsp_media_factory_wfd_get_instance_private (factory); + factory->priv = priv; + + priv->launch = g_strdup (DEFAULT_LAUNCH); + priv->shared = DEFAULT_SHARED; + priv->protocols = DEFAULT_PROTOCOLS; + priv->buffer_size = DEFAULT_BUFFER_SIZE; + + //priv->videosrc_type = GST_WFD_VSRC_XIMAGESRC; + //priv->videosrc_type = GST_WFD_VSRC_XVIMAGESRC; + //priv->videosrc_type = GST_WFD_VSRC_CAMERASRC; + priv->videosrc_type = GST_WFD_VSRC_VIDEOTESTSRC; + priv->video_codec = GST_WFD_VIDEO_H264; + priv->video_encoder = g_strdup ("omxh264enc"); + priv->video_bitrate = 200000; + priv->video_width = 640; + priv->video_height = 480; + priv->video_framerate = 30; + priv->video_enc_skip_inbuf_value = 5; + priv->video_srcbin = NULL; + priv->min_udp_bitrate = 938861; + priv->max_udp_bitrate = 1572864; + priv->decided_udp_bitrate = FALSE; + + priv->audio_device = g_strdup ("alsa_output.1.analog-stereo.monitor"); + priv->audio_codec = GST_WFD_AUDIO_AAC; + priv->audio_encoder_aac = g_strdup ("avenc_aac"); + priv->audio_encoder_ac3 = g_strdup ("avenc_ac3"); + priv->audio_latency_time = 10000; + priv->audio_buffer_time = 200000; + priv->audio_do_timestamp = FALSE; + priv->audio_channels = GST_WFD_CHANNEL_2; + priv->audio_freq = GST_WFD_FREQ_48000; + priv->audio_srcbin = NULL; + + g_mutex_init (&priv->direct_lock); + g_cond_init (&priv->direct_cond); + + priv->discover_pipeline = NULL; + priv->direct_pipe = NULL; + memset (&priv->res, 0x00, sizeof (GstRTPSMediaWFDTypeFindResult)); + priv->stream_bin = NULL; + priv->mux = NULL; + priv->mux_queue = NULL; + priv->pay = NULL; + + g_mutex_init (&priv->lock); +} + +static void +gst_rtsp_media_factory_wfd_finalize (GObject * obj) +{ + GstRTSPMediaFactoryWFD *factory = GST_RTSP_MEDIA_FACTORY_WFD (obj); + GstRTSPMediaFactoryWFDPrivate *priv = factory->priv; + + if (priv->permissions) + gst_rtsp_permissions_unref (priv->permissions); + g_free (priv->launch); + g_mutex_clear (&priv->lock); + + g_mutex_clear (&priv->direct_lock); + g_cond_clear (&priv->direct_cond); + + if (priv->audio_device) + g_free (priv->audio_device); + if (priv->audio_encoder_aac) + g_free (priv->audio_encoder_aac); + if (priv->audio_encoder_ac3) + g_free (priv->audio_encoder_ac3); + + if (priv->video_encoder) + g_free (priv->video_encoder); + + G_OBJECT_CLASS (gst_rtsp_media_factory_wfd_parent_class)->finalize (obj); +} + +GstRTSPMediaFactoryWFD * +gst_rtsp_media_factory_wfd_new (void) +{ + GstRTSPMediaFactoryWFD *result; + + result = g_object_new (GST_TYPE_RTSP_MEDIA_FACTORY_WFD, NULL); + + return result; +} + +static void +gst_rtsp_media_factory_wfd_get_property (GObject * object, + guint propid, GValue * value, GParamSpec * pspec) +{ + //GstRTSPMediaFactoryWFD *factory = GST_RTSP_MEDIA_FACTORY_WFD (object); + + switch (propid) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, propid, pspec); + } +} + +static void +gst_rtsp_media_factory_wfd_set_property (GObject * object, + guint propid, const GValue * value, GParamSpec * pspec) +{ + //GstRTSPMediaFactoryWFD *factory = GST_RTSP_MEDIA_FACTORY_WFD (object); + + switch (propid) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, propid, pspec); + } +} + +static GstPadProbeReturn +rtsp_media_wfd_dump_data (GstPad * pad, GstPadProbeInfo * info, gpointer u_data) +{ + guint8 *data; + gsize size; + FILE *f; + GstMapInfo mapinfo; + + if (info->type == (GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_PUSH)) { + GstBuffer *buffer = gst_pad_probe_info_get_buffer (info); + + gst_buffer_map (buffer, &mapinfo, GST_MAP_READ); + data = mapinfo.data; + size = gst_buffer_get_size (buffer); + + f = fopen ("/root/probe.ts", "a"); + if (f != NULL) { + fwrite (data, size, 1, f); + fclose (f); + } + gst_buffer_unmap (buffer, &mapinfo); + } + + return GST_PAD_PROBE_OK; +} + +static gboolean +_rtsp_media_factory_wfd_create_audio_capture_bin (GstRTSPMediaFactoryWFD * + factory, GstBin * srcbin) +{ + GstElement *audiosrc = NULL; + GstElement *acaps = NULL; + GstElement *acaps2 = NULL; + GstElement *aenc = NULL; + GstElement *audio_convert = NULL; + GstElement *aqueue = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + GstStructure *audio_properties_name = NULL; + + guint channels = 0; + gboolean is_enc_req = TRUE; + guint freq = 0; + g_autofree gchar *acodec = NULL; + + priv = factory->priv; + + if (priv->audio_codec == GST_WFD_AUDIO_UNKNOWN) { + GST_INFO_OBJECT (factory, "Skip create audio source"); + return TRUE; + } + + priv->audio_srcbin = (GstBin *)gst_bin_new ("audio"); + + /* create audio src element */ + audiosrc = gst_element_factory_make ("pulsesrc", "audiosrc"); + if (!audiosrc) { + GST_ERROR_OBJECT (factory, "failed to create audiosrc element"); + goto create_error; + } + + GST_INFO_OBJECT (factory, "audio device : %s", priv->audio_device); + GST_INFO_OBJECT (factory, "audio latency time : %"G_GUINT64_FORMAT, + priv->audio_latency_time); + GST_INFO_OBJECT (factory, "audio_buffer_time : %"G_GUINT64_FORMAT, + priv->audio_buffer_time); + GST_INFO_OBJECT (factory, "audio_do_timestamp : %d", + priv->audio_do_timestamp); + + audio_properties_name = gst_structure_new_from_string (priv->audio_device); + + g_object_set (audiosrc, "stream-properties", audio_properties_name, NULL); + g_object_set (audiosrc, "buffer-time", (gint64) priv->audio_buffer_time, + NULL); + g_object_set (audiosrc, "latency-time", (gint64) priv->audio_latency_time, + NULL); + g_object_set (audiosrc, "do-timestamp", (gboolean) priv->audio_do_timestamp, + NULL); + g_object_set (audiosrc, "provide-clock", (gboolean) FALSE, NULL); + g_object_set (audiosrc, "is-live", (gboolean) TRUE, NULL); + + if (priv->audio_codec == GST_WFD_AUDIO_LPCM) { + /* To meet miracast certification */ + gint64 block_size = 1920; + g_object_set (audiosrc, "blocksize", (gint64) block_size, NULL); + + audio_convert = gst_element_factory_make ("capssetter", "audio_convert"); + if (NULL == audio_convert) { + GST_ERROR_OBJECT (factory, "failed to create audio convert element"); + goto create_error; + } + g_object_set (audio_convert, "caps", gst_caps_new_simple ("audio/x-lpcm", + "width", G_TYPE_INT, 16, + "rate", G_TYPE_INT, 48000, + "channels", G_TYPE_INT, 2, + "dynamic_range", G_TYPE_INT, 0, + "emphasis", G_TYPE_BOOLEAN, FALSE, + "mute", G_TYPE_BOOLEAN, FALSE, NULL), NULL); + g_object_set (audio_convert, "join", (gboolean) FALSE, NULL); + g_object_set (audio_convert, "replace", (gboolean) TRUE, NULL); + + acaps2 = gst_element_factory_make ("capsfilter", "audiocaps2"); + if (NULL == acaps2) { + GST_ERROR_OBJECT (factory, "failed to create audio capsilfter element"); + goto create_error; + } + /* In case of LPCM, uses big endian */ + g_object_set (G_OBJECT (acaps2), "caps", + gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, "S16BE", + /* In case of LPCM, uses big endian */ + "rate", G_TYPE_INT, 48000, + "channels", G_TYPE_INT, 2, NULL), NULL); + } + + /* create audio caps element */ + acaps = gst_element_factory_make ("capsfilter", "audiocaps"); + if (NULL == acaps) { + GST_ERROR_OBJECT (factory, "failed to create audio capsilfter element"); + goto create_error; + } + + if (priv->audio_channels == GST_WFD_CHANNEL_2) + channels = 2; + else if (priv->audio_channels == GST_WFD_CHANNEL_4) + channels = 4; + else if (priv->audio_channels == GST_WFD_CHANNEL_6) + channels = 6; + else if (priv->audio_channels == GST_WFD_CHANNEL_8) + channels = 8; + else + channels = 2; + + if (priv->audio_freq == GST_WFD_FREQ_44100) + freq = 44100; + else if (priv->audio_freq == GST_WFD_FREQ_48000) + freq = 48000; + else + freq = 44100; + + if (priv->audio_codec == GST_WFD_AUDIO_LPCM) { + g_object_set (G_OBJECT (acaps), "caps", + gst_caps_new_simple ("audio/x-lpcm", "width", G_TYPE_INT, 16, + "rate", G_TYPE_INT, 48000, + "channels", G_TYPE_INT, 2, + "dynamic_range", G_TYPE_INT, 0, + "emphasis", G_TYPE_BOOLEAN, FALSE, + "mute", G_TYPE_BOOLEAN, FALSE, NULL), NULL); + } else if ((priv->audio_codec == GST_WFD_AUDIO_AAC) + || (priv->audio_codec == GST_WFD_AUDIO_AC3)) { + g_object_set (G_OBJECT (acaps), "caps", gst_caps_new_simple ("audio/x-raw", + "endianness", G_TYPE_INT, 1234, "signed", G_TYPE_BOOLEAN, TRUE, + "depth", G_TYPE_INT, 16, "rate", G_TYPE_INT, freq, "channels", + G_TYPE_INT, channels, NULL), NULL); + } + + if (priv->audio_codec == GST_WFD_AUDIO_AAC) { + acodec = g_strdup (priv->audio_encoder_aac); + is_enc_req = TRUE; + } else if (priv->audio_codec == GST_WFD_AUDIO_AC3) { + acodec = g_strdup (priv->audio_encoder_ac3); + is_enc_req = TRUE; + } else if (priv->audio_codec == GST_WFD_AUDIO_LPCM) { + GST_DEBUG_OBJECT (factory, "No codec required, raw data will be sent"); + is_enc_req = FALSE; + } else { + GST_ERROR_OBJECT (factory, "Yet to support other than H264 format"); + goto create_error; + } + + if (is_enc_req) { + aenc = gst_element_factory_make (acodec, "audioenc"); + if (NULL == aenc) { + GST_ERROR_OBJECT (factory, "failed to create audio encoder element"); + goto create_error; + } + + g_object_set (aenc, "compliance", -2, NULL); + g_object_set (aenc, "tolerance", 400000000, NULL); + g_object_set (aenc, "bitrate", (guint) 128000, NULL); + g_object_set (aenc, "rate-control", 2, NULL); + + aqueue = gst_element_factory_make ("queue", "audio-queue"); + if (!aqueue) { + GST_ERROR_OBJECT (factory, "failed to create audio queue element"); + goto create_error; + } + + gst_bin_add_many (priv->audio_srcbin, audiosrc, acaps, aenc, aqueue, NULL); + gst_bin_add (srcbin, GST_ELEMENT (priv->audio_srcbin)); + + if (!gst_element_link_many (audiosrc, acaps, aenc, aqueue, NULL)) { + GST_ERROR_OBJECT (factory, "Failed to link audio src elements..."); + goto create_error; + } + } else { + aqueue = gst_element_factory_make ("queue", "audio-queue"); + if (!aqueue) { + GST_ERROR_OBJECT (factory, "failed to create audio queue element"); + goto create_error; + } + + gst_bin_add_many (priv->audio_srcbin, audiosrc, acaps2, audio_convert, acaps, aqueue, NULL); + gst_bin_add (srcbin, GST_ELEMENT (priv->audio_srcbin)); + + if (!gst_element_link_many (audiosrc, acaps2, audio_convert, acaps, aqueue, + NULL)) { + GST_ERROR_OBJECT (factory, "Failed to link audio src elements..."); + goto create_error; + } + } + + priv->audio_queue = aqueue; + if (audio_properties_name) + gst_structure_free (audio_properties_name); + return TRUE; + +create_error: + gst_object_unref (acaps); + gst_object_unref (aqueue); + if (audio_properties_name) + gst_structure_free (audio_properties_name); + return FALSE; +} + +static gboolean +_rtsp_media_factory_wfd_create_videotest_bin (GstRTSPMediaFactoryWFD * factory, + GstBin * srcbin) +{ + GstElement *videosrc = NULL; + GstElement *vcaps = NULL; + GstElement *videoconvert = NULL; + GstElement *venc_caps = NULL; + GstElement *venc = NULL; + GstElement *vparse = NULL; + GstElement *vqueue = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + + priv = factory->priv; + + GST_INFO_OBJECT (factory, "picked videotestsrc as video source"); + priv->video_srcbin = (GstBin *)gst_bin_new ("video"); + + videosrc = gst_element_factory_make ("videotestsrc", "videosrc"); + if (NULL == videosrc) { + GST_ERROR_OBJECT (factory, "failed to create ximagesrc element"); + goto create_error; + } + + /* create video caps element */ + vcaps = gst_element_factory_make ("capsfilter", "videocaps"); + if (NULL == vcaps) { + GST_ERROR_OBJECT (factory, "failed to create video capsilfter element"); + goto create_error; + } + + g_object_set (G_OBJECT (vcaps), "caps", + gst_caps_new_simple ("video/x-raw", + "format", G_TYPE_STRING, "I420", + "width", G_TYPE_INT, priv->video_width, + "height", G_TYPE_INT, priv->video_height, + "framerate", GST_TYPE_FRACTION, priv->video_framerate, 1, NULL), + NULL); + + /* create video convert element */ + videoconvert = gst_element_factory_make ("videoconvert", "videoconvert"); + if (NULL == videoconvert) { + GST_ERROR_OBJECT (factory, "failed to create video videoconvert element"); + goto create_error; + } + + venc_caps = gst_element_factory_make ("capsfilter", "venc_caps"); + if (NULL == venc_caps) { + GST_ERROR_OBJECT (factory, "failed to create video capsilfter element"); + goto create_error; + } + + g_object_set (G_OBJECT (venc_caps), "caps", + gst_caps_new_simple ("video/x-raw", + "format", G_TYPE_STRING, "SN12", + "width", G_TYPE_INT, priv->video_width, + "height", G_TYPE_INT, priv->video_height, + "framerate", GST_TYPE_FRACTION, priv->video_framerate, 1, NULL), + NULL); + + if (priv->video_codec != GST_WFD_VIDEO_H264) { + GST_ERROR_OBJECT (factory, "Yet to support other than H264 format"); + goto create_error; + } + + venc = gst_element_factory_make (priv->video_encoder, "videoenc"); + if (!venc) { + GST_ERROR_OBJECT (factory, "failed to create video encoder element"); + goto create_error; + } + + g_object_set (venc, "aud", 0, NULL); + g_object_set (venc, "byte-stream", 1, NULL); + g_object_set (venc, "bitrate", 512, NULL); + + vparse = gst_element_factory_make ("h264parse", "videoparse"); + if (NULL == vparse) { + GST_ERROR_OBJECT (factory, "failed to create h264 parse element"); + goto create_error; + } + g_object_set (vparse, "config-interval", 1, NULL); + + vqueue = gst_element_factory_make ("queue", "video-queue"); + if (!vqueue) { + GST_ERROR_OBJECT (factory, "failed to create video queue element"); + goto create_error; + } + + gst_bin_add_many (priv->video_srcbin, videosrc, vcaps, videoconvert, venc_caps, venc, vparse, vqueue, NULL); + gst_bin_add (srcbin, GST_ELEMENT (priv->video_srcbin)); + if (!gst_element_link_many (videosrc, vcaps, videoconvert, venc_caps, venc, + vparse, vqueue, NULL)) { + GST_ERROR_OBJECT (factory, "Failed to link video src elements..."); + goto create_error; + } + + priv->video_queue = vqueue; + priv->venc = venc; + + return TRUE; + +create_error: + gst_object_unref(videosrc); + gst_object_unref(vcaps); + gst_object_unref(videoconvert); + gst_object_unref(venc_caps); + gst_object_unref(venc); + gst_object_unref(vparse); + gst_object_unref(vqueue); + return FALSE; +} + +static gboolean +_rtsp_media_factory_wfd_create_waylandsrc_bin (GstRTSPMediaFactoryWFD * factory, + GstBin * srcbin) +{ + GstElement *videosrc = NULL; + GstElement *vcaps = NULL; + GstElement *venc = NULL; + GstElement *vparse = NULL; + GstElement *vqueue = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + + priv = factory->priv; + + GST_INFO_OBJECT (factory, "picked waylandsrc as video source"); + + if (priv->video_codec == GST_WFD_VIDEO_UNKNOWN) { + GST_INFO_OBJECT (factory, "Skip create video source."); + return TRUE; + } + + priv->video_srcbin = (GstBin *)gst_bin_new ("video"); + + videosrc = gst_element_factory_make ("waylandsrc", "videosrc"); + if (NULL == videosrc) { + GST_ERROR_OBJECT (factory, "failed to create ximagesrc element"); + goto create_error; + } + + /* create video caps element */ + vcaps = gst_element_factory_make ("capsfilter", "videocaps"); + if (NULL == vcaps) { + GST_ERROR_OBJECT (factory, "failed to create video capsilfter element"); + goto create_error; + } + + g_object_set (G_OBJECT (vcaps), "caps", + gst_caps_new_simple ("video/x-raw", + "format", G_TYPE_STRING, "SN12", + "width", G_TYPE_INT, priv->video_width, + "height", G_TYPE_INT, priv->video_height, + "framerate", GST_TYPE_FRACTION, priv->video_framerate, 1, NULL), + NULL); + + if (priv->video_codec != GST_WFD_VIDEO_H264) { + GST_ERROR_OBJECT (factory, "Yet to support other than H264 format"); + goto create_error; + } + + venc = gst_element_factory_make (priv->video_encoder, "videoenc"); + if (!venc) { + GST_ERROR_OBJECT (factory, "failed to create video encoder element"); + goto create_error; + } + + g_object_set (venc, "aud", 0, NULL); + g_object_set (venc, "byte-stream", 1, NULL); + g_object_set (venc, "bitrate", 512, NULL); + g_object_set (venc, "target-bitrate", priv->video_bitrate, NULL); + + vparse = gst_element_factory_make ("h264parse", "videoparse"); + if (NULL == vparse) { + GST_ERROR_OBJECT (factory, "failed to create h264 parse element"); + goto create_error; + } + g_object_set (vparse, "config-interval", 1, NULL); + + vqueue = gst_element_factory_make ("queue", "video-queue"); + if (!vqueue) { + GST_ERROR_OBJECT (factory, "failed to create video queue element"); + goto create_error; + } + + gst_bin_add_many (priv->video_srcbin, videosrc, vcaps, venc, vparse, vqueue, NULL); + gst_bin_add (srcbin, GST_ELEMENT (priv->video_srcbin)); + if (!gst_element_link_many (videosrc, vcaps, venc, vparse, vqueue, NULL)) { + GST_ERROR_OBJECT (factory, "Failed to link video src elements..."); + goto create_error; + } + + priv->video_queue = vqueue; + priv->venc = venc; + + return TRUE; + +create_error: + gst_object_unref (videosrc); + gst_object_unref (vqueue); + return FALSE; +} + +static gboolean +_rtsp_media_factory_wfd_create_camera_capture_bin (GstRTSPMediaFactoryWFD * + factory, GstBin * srcbin) +{ + GstElement *videosrc = NULL; + GstElement *vcaps = NULL; + GstElement *venc = NULL; + GstElement *vparse = NULL; + GstElement *vqueue = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + + priv = factory->priv; + priv->video_srcbin = (GstBin *)gst_bin_new ("video"); + + videosrc = gst_element_factory_make ("camerasrc", "videosrc"); + if (NULL == videosrc) { + GST_ERROR_OBJECT (factory, "failed to create camerasrc element"); + goto create_error; + } + + /* create video caps element */ + vcaps = gst_element_factory_make ("capsfilter", "videocaps"); + if (NULL == vcaps) { + GST_ERROR_OBJECT (factory, "failed to create video capsilfter element"); + goto create_error; + } + + GST_INFO_OBJECT (factory, "picked camerasrc as video source"); + g_object_set (G_OBJECT (vcaps), "caps", + gst_caps_new_simple ("video/x-raw", + "width", G_TYPE_INT, priv->video_width, + "height", G_TYPE_INT, priv->video_height, + "format", G_TYPE_STRING, "SN12", + "framerate", GST_TYPE_FRACTION, priv->video_framerate, 1, NULL), + NULL); + + if (priv->video_codec != GST_WFD_VIDEO_H264) { + GST_ERROR_OBJECT (factory, "Yet to support other than H264 format"); + goto create_error; + } + + venc = gst_element_factory_make (priv->video_encoder, "videoenc"); + if (!venc) { + GST_ERROR_OBJECT (factory, "failed to create video encoder element"); + goto create_error; + } + + g_object_set (venc, "bitrate", priv->video_bitrate, NULL); + g_object_set (venc, "byte-stream", 1, NULL); + g_object_set (venc, "append-dci", 1, NULL); + + vparse = gst_element_factory_make ("h264parse", "videoparse"); + if (NULL == vparse) { + GST_ERROR_OBJECT (factory, "failed to create h264 parse element"); + goto create_error; + } + g_object_set (vparse, "config-interval", 1, NULL); + + vqueue = gst_element_factory_make ("queue", "video-queue"); + if (!vqueue) { + GST_ERROR_OBJECT (factory, "failed to create video queue element"); + goto create_error; + } + + gst_bin_add_many (priv->video_srcbin, videosrc, vcaps, venc, vparse, vqueue, NULL); + gst_bin_add (srcbin, GST_ELEMENT (priv->video_srcbin)); + + if (!gst_element_link_many (videosrc, vcaps, venc, vparse, vqueue, NULL)) { + GST_ERROR_OBJECT (factory, "Failed to link video src elements..."); + goto create_error; + } + + priv->video_queue = vqueue; + priv->venc = venc; + + return TRUE; + +create_error: + gst_object_unref (videosrc); + gst_object_unref (vqueue); + return FALSE; +} + +static gboolean +_rtsp_media_factory_wfd_create_xcapture_bin (GstRTSPMediaFactoryWFD * factory, + GstBin * srcbin) +{ + GstElement *videosrc = NULL; + GstElement *vcaps = NULL; + GstElement *venc_caps = NULL; + GstElement *videoconvert = NULL, *videoscale = NULL; + GstElement *venc = NULL; + GstElement *vparse = NULL; + GstElement *vqueue = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + + priv = factory->priv; + + GST_INFO_OBJECT (factory, "picked ximagesrc as video source"); + priv->video_srcbin = (GstBin *)gst_bin_new ("video"); + + videosrc = gst_element_factory_make ("ximagesrc", "videosrc"); + if (NULL == videosrc) { + GST_ERROR_OBJECT (factory, "failed to create ximagesrc element"); + goto create_error; + } + + videoscale = gst_element_factory_make ("videoscale", "videoscale"); + if (NULL == videoscale) { + GST_ERROR_OBJECT (factory, "failed to create videoscale element"); + goto create_error; + } + + videoconvert = gst_element_factory_make ("videoconvert", "videoconvert"); + if (NULL == videoconvert) { + GST_ERROR_OBJECT (factory, "failed to create videoconvert element"); + goto create_error; + } + + /* create video caps element */ + vcaps = gst_element_factory_make ("capsfilter", "videocaps"); + if (NULL == vcaps) { + GST_ERROR_OBJECT (factory, "failed to create video capsilfter element"); + goto create_error; + } + + g_object_set (G_OBJECT (vcaps), "caps", + gst_caps_new_simple ("video/x-raw", + "width", G_TYPE_INT, priv->video_width, + "height", G_TYPE_INT, priv->video_height, + "framerate", GST_TYPE_FRACTION, priv->video_framerate, 1, NULL), + NULL); + + if (priv->video_codec != GST_WFD_VIDEO_H264) { + GST_ERROR_OBJECT (factory, "Yet to support other than H264 format"); + goto create_error; + } + + venc = gst_element_factory_make (priv->video_encoder, "videoenc"); + if (!venc) { + GST_ERROR_OBJECT (factory, "failed to create video encoder element"); + goto create_error; + } + + g_object_set (venc, "aud", 0, NULL); + g_object_set (venc, "byte-stream", 1, NULL); + g_object_set (venc, "bitrate", 512, NULL); + + venc_caps = gst_element_factory_make ("capsfilter", "venc_caps"); + if (NULL == venc_caps) { + GST_ERROR_OBJECT (factory, "failed to create video capsilfter element"); + goto create_error; + } + + g_object_set (G_OBJECT (venc_caps), "caps", + gst_caps_new_simple ("video/x-h264", + "profile", G_TYPE_STRING, "baseline", NULL), NULL); + + vparse = gst_element_factory_make ("h264parse", "videoparse"); + if (NULL == vparse) { + GST_ERROR_OBJECT (factory, "failed to create h264 parse element"); + goto create_error; + } + g_object_set (vparse, "config-interval", 1, NULL); + + vqueue = gst_element_factory_make ("queue", "video-queue"); + if (!vqueue) { + GST_ERROR_OBJECT (factory, "failed to create video queue element"); + goto create_error; + } + + gst_bin_add_many (priv->video_srcbin, videosrc, videoscale, videoconvert, vcaps, venc, + venc_caps, vparse, vqueue, NULL); + gst_bin_add (srcbin, GST_ELEMENT (priv->video_srcbin)); + if (!gst_element_link_many (videosrc, videoscale, videoconvert, vcaps, venc, + venc_caps, vparse, vqueue, NULL)) { + GST_ERROR_OBJECT (factory, "Failed to link video src elements..."); + goto create_error; + } + + priv->video_queue = vqueue; + priv->venc = venc; + + return TRUE; + +create_error: + gst_object_unref(videosrc); + gst_object_unref(vcaps); + gst_object_unref(venc_caps); + gst_object_unref(videoconvert); + gst_object_unref(videoscale); + gst_object_unref(venc); + gst_object_unref(vparse); + gst_object_unref(vqueue); + return FALSE; +} + +static gboolean +_rtsp_media_factory_wfd_create_xvcapture_bin (GstRTSPMediaFactoryWFD * factory, + GstBin * srcbin) +{ + GstElement *videosrc = NULL; + GstElement *vcaps = NULL; + GstElement *venc = NULL; + GstElement *vparse = NULL; + GstElement *vqueue = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + + priv = factory->priv; + + GST_INFO_OBJECT (factory, "picked xvimagesrc as video source"); + priv->video_srcbin = (GstBin *)gst_bin_new ("video"); + + videosrc = gst_element_factory_make ("xvimagesrc", "videosrc"); + if (NULL == videosrc) { + GST_ERROR_OBJECT (factory, "failed to create xvimagesrc element"); + goto create_error; + } + + /* create video caps element */ + vcaps = gst_element_factory_make ("capsfilter", "videocaps"); + if (NULL == vcaps) { + GST_ERROR_OBJECT (factory, "failed to create video capsilfter element"); + goto create_error; + } + + g_object_set (G_OBJECT (vcaps), "caps", + gst_caps_new_simple ("video/x-raw", + "width", G_TYPE_INT, priv->video_width, + "height", G_TYPE_INT, priv->video_height, + "format", G_TYPE_STRING, "SN12", + "framerate", GST_TYPE_FRACTION, priv->video_framerate, 1, NULL), + NULL); + + if (priv->video_codec != GST_WFD_VIDEO_H264) { + GST_ERROR_OBJECT (factory, "Yet to support other than H264 format"); + goto create_error; + } + + venc = gst_element_factory_make (priv->video_encoder, "videoenc"); + if (!venc) { + GST_ERROR_OBJECT (factory, "failed to create video encoder element"); + goto create_error; + } + g_object_set (venc, "bitrate", priv->video_bitrate, NULL); + g_object_set (venc, "byte-stream", 1, NULL); + g_object_set (venc, "append-dci", 1, NULL); + g_object_set (venc, "idr-period", 120, NULL); + g_object_set (venc, "skip-inbuf", priv->video_enc_skip_inbuf_value, NULL); + + vparse = gst_element_factory_make ("h264parse", "videoparse"); + if (NULL == vparse) { + GST_ERROR_OBJECT (factory, "failed to create h264 parse element"); + goto create_error; + } + g_object_set (vparse, "config-interval", 1, NULL); + + vqueue = gst_element_factory_make ("queue", "video-queue"); + if (!vqueue) { + GST_ERROR_OBJECT (factory, "failed to create video queue element"); + goto create_error; + } + + gst_bin_add_many (priv->video_srcbin, videosrc, vcaps, venc, vparse, vqueue, NULL); + gst_bin_add (srcbin, GST_ELEMENT (priv->video_srcbin)); + if (!gst_element_link_many (videosrc, vcaps, venc, vparse, vqueue, NULL)) { + GST_ERROR_OBJECT (factory, "Failed to link video src elements..."); + goto create_error; + } + + priv->video_queue = vqueue; + priv->venc = venc; + + return TRUE; + +create_error: + gst_object_unref (videosrc); + gst_object_unref (vqueue); + + return FALSE; +} + +static GstElement * +_rtsp_media_factory_wfd_create_srcbin (GstRTSPMediaFactoryWFD * factory) +{ + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + + GstBin *srcbin = NULL; + GstElement *mux = NULL; + GstElement *mux_queue = NULL; + GstElement *payload = NULL; + GstPad *srcpad = NULL; + GstPad *mux_vsinkpad = NULL; + GstPad *mux_asinkpad = NULL; + GstPad *ghost_pad = NULL; + + priv = factory->priv; + + /* create source bin */ + srcbin = GST_BIN (gst_bin_new ("srcbin")); + if (!srcbin) { + GST_ERROR_OBJECT (factory, "failed to create source bin..."); + goto create_error; + } + + GST_INFO_OBJECT (factory, "Check video codec... %d", priv->video_codec); + /* create video src element */ + switch (priv->videosrc_type) { + case GST_WFD_VSRC_XIMAGESRC: + if (!_rtsp_media_factory_wfd_create_xcapture_bin (factory, srcbin)) { + GST_ERROR_OBJECT (factory, "failed to create xcapture bin..."); + goto create_error; + } + break; + case GST_WFD_VSRC_XVIMAGESRC: + if (!_rtsp_media_factory_wfd_create_xvcapture_bin (factory, srcbin)) { + GST_ERROR_OBJECT (factory, "failed to create xvcapture bin..."); + goto create_error; + } + break; + case GST_WFD_VSRC_CAMERASRC: + if (!_rtsp_media_factory_wfd_create_camera_capture_bin (factory, srcbin)) { + GST_ERROR_OBJECT (factory, "failed to create camera capture bin..."); + goto create_error; + } + break; + case GST_WFD_VSRC_VIDEOTESTSRC: + if (!_rtsp_media_factory_wfd_create_videotest_bin (factory, srcbin)) { + GST_ERROR_OBJECT (factory, "failed to create videotestsrc bin..."); + goto create_error; + } + break; + case GST_WFD_VSRC_WAYLANDSRC: + if (!_rtsp_media_factory_wfd_create_waylandsrc_bin (factory, srcbin)) { + GST_ERROR_OBJECT (factory, "failed to create videotestsrc bin..."); + goto create_error; + } + break; + default: + GST_ERROR_OBJECT (factory, "unknow mode selected..."); + goto create_error; + } + + mux = gst_element_factory_make ("mpegtsmux", "tsmux"); + if (!mux) { + GST_ERROR_OBJECT (factory, "failed to create muxer element"); + goto create_error; + } + + g_object_set (mux, "wfd-mode", TRUE, NULL); + + mux_queue = gst_element_factory_make ("queue", "muxer-queue"); + if (!mux_queue) { + GST_ERROR_OBJECT (factory, "failed to create muxer-queue element"); + goto create_error; + } + + g_object_set (mux_queue, "max-size-buffers", 20000, NULL); + + payload = gst_element_factory_make ("rtpmp2tpay", "pay0"); + if (!payload) { + GST_ERROR_OBJECT (factory, "failed to create payload element"); + goto create_error; + } + + g_object_set (payload, "pt", 33, NULL); + g_object_set (payload, "mtu", priv->mtu_size, NULL); + g_object_set (payload, "rtp-flush", (gboolean) TRUE, NULL); + + gst_bin_add_many (srcbin, mux, mux_queue, payload, NULL); + + if (!gst_element_link_many (mux, mux_queue, payload, NULL)) { + GST_ERROR_OBJECT (factory, "Failed to link muxer & payload..."); + goto create_error; + } + + if (priv->video_codec > GST_WFD_VIDEO_UNKNOWN) { + /* request video sink pad from muxer, which has elementary pid 0x1011 */ - mux_vsinkpad = gst_element_get_request_pad (mux, "sink_4113"); ++ mux_vsinkpad = gst_element_request_pad_simple (mux, "sink_4113"); + if (!mux_vsinkpad) { + GST_ERROR_OBJECT (factory, "Failed to get sink pad from muxer..."); + goto create_error; + } + + /* request srcpad from video queue */ + srcpad = gst_element_get_static_pad (priv->video_queue, "src"); + if (!srcpad) { + GST_ERROR_OBJECT (factory, "Failed to get srcpad from video queue..."); + goto create_error; + } + ghost_pad = gst_ghost_pad_new ("video_src", srcpad); + gst_element_add_pad (GST_ELEMENT (priv->video_srcbin), ghost_pad); + + if (gst_pad_link (ghost_pad, mux_vsinkpad) != GST_PAD_LINK_OK) { + GST_ERROR_OBJECT (factory, + "Failed to link video queue src pad & muxer video sink pad..."); + goto create_error; + } + + gst_object_unref (mux_vsinkpad); + gst_object_unref (srcpad); + srcpad = NULL; + ghost_pad = NULL; + } + + GST_INFO_OBJECT (factory, "Check audio codec... %d", priv->audio_codec); + + /* create audio source elements & add to pipeline */ + if (!_rtsp_media_factory_wfd_create_audio_capture_bin (factory, srcbin)) + goto create_error; + + if (priv->audio_codec > GST_WFD_AUDIO_UNKNOWN) { + /* request audio sink pad from muxer, which has elementary pid 0x1100 */ - mux_asinkpad = gst_element_get_request_pad (mux, "sink_4352"); ++ mux_asinkpad = gst_element_request_pad_simple (mux, "sink_4352"); + if (!mux_asinkpad) { + GST_ERROR_OBJECT (factory, "Failed to get sinkpad from muxer..."); + goto create_error; + } + + /* request srcpad from audio queue */ + srcpad = gst_element_get_static_pad (priv->audio_queue, "src"); + if (!srcpad) { + GST_ERROR_OBJECT (factory, "Failed to get srcpad from audio queue..."); + goto create_error; + } + ghost_pad = gst_ghost_pad_new ("audio_src", srcpad); + gst_element_add_pad (GST_ELEMENT (priv->audio_srcbin), ghost_pad); + + /* link audio queue's srcpad & muxer sink pad */ + if (gst_pad_link (ghost_pad, mux_asinkpad) != GST_PAD_LINK_OK) { + GST_ERROR_OBJECT (factory, + "Failed to link audio queue src pad & muxer audio sink pad..."); + goto create_error; + } + gst_object_unref (mux_asinkpad); + gst_object_unref (srcpad); + } + + if (priv->dump_ts) + { + GstPad *pad_probe = NULL; + pad_probe = gst_element_get_static_pad (mux, "src"); + + if (NULL == pad_probe) { + GST_INFO_OBJECT (factory, "pad for probe not created"); + } else { + GST_INFO_OBJECT (factory, "pad for probe SUCCESSFUL"); + } + gst_pad_add_probe (pad_probe, GST_PAD_PROBE_TYPE_BUFFER, + rtsp_media_wfd_dump_data, factory, NULL); + if (pad_probe) + gst_object_unref (pad_probe); + } + + GST_DEBUG_OBJECT (factory, "successfully created source bin..."); + + priv->stream_bin = srcbin; + priv->mux = gst_object_ref (mux); + priv->mux_queue = gst_object_ref (mux_queue); + priv->pay = gst_object_ref (payload); + + return GST_ELEMENT_CAST (srcbin); + +create_error: + GST_ERROR_OBJECT (factory, "Failed to create pipeline"); + if (mux_vsinkpad) + gst_object_unref (mux_vsinkpad); + if (mux_asinkpad) + gst_object_unref (mux_asinkpad); + if (srcpad) + gst_object_unref (srcpad); + if (srcbin) + gst_object_unref (srcbin); + return NULL; +} + +static GstElement * +rtsp_media_factory_wfd_create_element (GstRTSPMediaFactory * factory, + const GstRTSPUrl * url) +{ + GstRTSPMediaFactoryWFD *_factory = GST_RTSP_MEDIA_FACTORY_WFD_CAST (factory); + GstElement *element = NULL; + + GST_RTSP_MEDIA_FACTORY_WFD_LOCK (factory); + + element = _rtsp_media_factory_wfd_create_srcbin (_factory); + + GST_RTSP_MEDIA_FACTORY_WFD_UNLOCK (factory); + + return element; +} + +static GstRTSPMedia * +rtsp_media_factory_wfd_construct (GstRTSPMediaFactory * factory, + const GstRTSPUrl * url) +{ + GstRTSPMedia *media; + GstElement *element, *pipeline; + GstRTSPMediaFactoryClass *klass; + + klass = GST_RTSP_MEDIA_FACTORY_GET_CLASS (factory); + + if (!klass->create_pipeline) + goto no_create; + + element = gst_rtsp_media_factory_create_element (factory, url); + if (element == NULL) + goto no_element; + + /* create a new empty media */ + media = gst_rtsp_media_new (element); + //media = g_object_new (GST_TYPE_RTSP_MEDIA_EXT, "element", element, NULL); + + gst_rtsp_media_collect_streams (media); + + pipeline = klass->create_pipeline (factory, media); + if (pipeline == NULL) + goto no_pipeline; + + return media; + + /* ERRORS */ +no_create: + { + g_critical ("no create_pipeline function"); + return NULL; + } +no_element: + { + g_critical ("could not create element"); + return NULL; + } +no_pipeline: + { + g_critical ("can't create pipeline"); + g_object_unref (media); + return NULL; + } +} + +gint type_detected = FALSE; +gint linked = FALSE; +static gint in_pad_probe; + +static GstPadProbeReturn +_rtsp_media_factory_wfd_restore_pipe_probe_cb (GstPad *pad, GstPadProbeInfo *info, gpointer user_data) +{ + GstPad *old_src = NULL; + GstPad *sink = NULL; + GstPad *old_sink = NULL; + GstPad *new_src = NULL; + GstRTSPMediaFactoryWFD *factory = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + GstRTSPMediaWFDDirectPipelineData *pipe_data = NULL; + + if (!g_atomic_int_compare_and_exchange (&in_pad_probe, FALSE, TRUE)) + return GST_PAD_PROBE_OK; + + factory = (GstRTSPMediaFactoryWFD *) user_data; + priv = factory->priv; + pipe_data = priv->direct_pipe; + + gst_element_sync_state_with_parent (GST_ELEMENT(priv->audio_srcbin)); + gst_element_sync_state_with_parent (GST_ELEMENT(priv->video_srcbin)); + gst_element_sync_state_with_parent (GST_ELEMENT(priv->mux)); + gst_element_sync_state_with_parent (GST_ELEMENT(priv->mux_queue)); + + sink = gst_element_get_static_pad (priv->pay, "sink"); + old_src = gst_pad_get_peer (sink); + gst_pad_unlink (old_src, sink); + + new_src = gst_element_get_static_pad (priv->mux_queue, "src"); + old_sink = gst_pad_get_peer (new_src); + gst_pad_unlink (new_src, old_sink); + gst_element_set_state (priv->stub_fs, GST_STATE_NULL); + gst_bin_remove ((GstBin *)priv->stream_bin, priv->stub_fs); + + gst_pad_link (new_src, sink); + gst_object_unref (new_src); + gst_object_unref (old_sink); + + gst_element_set_state (GST_ELEMENT(pipe_data->pipeline), GST_STATE_PAUSED); + + /* signal that new pipeline linked */ + g_mutex_lock (&priv->direct_lock); + g_cond_signal (&priv->direct_cond); + linked = TRUE; + g_mutex_unlock (&priv->direct_lock); + + return GST_PAD_PROBE_REMOVE; +} + +static gboolean +_rtsp_media_factory_wfd_destroy_direct_pipe(void *user_data) +{ + GstRTSPMediaFactoryWFD *factory = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + GstRTSPMediaWFDDirectPipelineData *pipe_data = NULL; + + factory = (GstRTSPMediaFactoryWFD *) user_data; + priv = factory->priv; + pipe_data = priv->direct_pipe; + + GST_DEBUG_OBJECT (factory, "Deleting pipeline"); + gst_element_set_state (GST_ELEMENT(pipe_data->pipeline), GST_STATE_NULL); + gst_bin_remove ((GstBin *)priv->stream_bin, GST_ELEMENT(pipe_data->pipeline)); + g_free (pipe_data); + g_signal_emit (factory, + gst_rtsp_media_factory_wfd_signals[SIGNAL_DIRECT_STREAMING_END], 0, NULL); + return FALSE; +} + +static void +_rtsp_media_factory_wfd_demux_pad_added_cb (GstElement *element, + GstPad *pad, + gpointer data) +{ + GstPad *sinkpad = NULL; + GstRTSPMediaFactoryWFD *factory = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + GstRTSPMediaWFDDirectPipelineData *pipe_data = NULL; + + GstCaps *caps = gst_pad_get_current_caps (pad); + g_autofree gchar *pad_name = gst_pad_get_name (pad); + g_autofree gchar *caps_string = gst_caps_to_string (caps); + g_autofree gchar *temp_caps = NULL; + + gst_caps_unref (caps); + + factory = (GstRTSPMediaFactoryWFD *) data; + priv = factory->priv; + pipe_data = priv->direct_pipe; + temp_caps = g_ascii_strdown(caps_string, -1); + + if (g_strrstr (temp_caps, "audio")) { + sinkpad = gst_element_get_static_pad (pipe_data->ap, "sink"); + if (gst_pad_is_linked (sinkpad)) { + gst_object_unref (sinkpad); + GST_DEBUG_OBJECT (factory, "pad linked"); + return; + } + if (gst_pad_link (pad, sinkpad) != GST_PAD_LINK_OK) + GST_DEBUG_OBJECT (factory, "can't link demux %s pad", pad_name); + + gst_object_unref (sinkpad); + sinkpad = NULL; + } + + if (g_strrstr (temp_caps, "video")) { + if (g_strrstr (temp_caps, "h264")) { + sinkpad = gst_element_get_static_pad (pipe_data->vp, "sink"); + if (gst_pad_link (pad, sinkpad) != GST_PAD_LINK_OK) + GST_DEBUG_OBJECT (factory, "can't link demux %s pad", pad_name); + + gst_object_unref (sinkpad); + sinkpad = NULL; + } + } +} + +static GstPadProbeReturn +_rtsp_media_factory_wfd_pay_pad_probe_cb (GstPad *pad, GstPadProbeInfo *info, gpointer user_data) +{ + GstPad *old_src = NULL; + GstPad *sink = NULL; + GstPad *old_sink = NULL; + GstPad *new_src = NULL; + GstPad *fas_sink = NULL; + GstPad *gp = NULL; + GstRTSPMediaFactoryWFD *factory = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + GstRTSPMediaWFDDirectPipelineData *pipe_data = NULL; + + if (!g_atomic_int_compare_and_exchange (&in_pad_probe, FALSE, TRUE)) + return GST_PAD_PROBE_OK; + + factory = (GstRTSPMediaFactoryWFD *) user_data; + priv = factory->priv; + pipe_data = priv->direct_pipe; + + sink = gst_element_get_static_pad (priv->pay, "sink"); + old_src = gst_pad_get_peer (sink); + gst_pad_unlink (old_src, sink); + + new_src = gst_element_get_static_pad (pipe_data->tsmux, "src"); + old_sink = gst_pad_get_peer (new_src); + gst_pad_unlink (new_src, old_sink); + gst_element_set_state (pipe_data->mux_fs, GST_STATE_NULL); + gst_bin_remove ((GstBin *)pipe_data->pipeline, pipe_data->mux_fs); + + gp = gst_ghost_pad_new ("audio_file", new_src); + gst_pad_set_active(gp,TRUE); + gst_element_add_pad (GST_ELEMENT (pipe_data->pipeline), gp); + gst_pad_link (gp, sink); + gst_object_unref (new_src); + gst_object_unref (old_sink); + + priv->stub_fs = gst_element_factory_make ("fakesink", NULL); + gst_bin_add (priv->stream_bin, priv->stub_fs); + gst_element_sync_state_with_parent (priv->stub_fs); + fas_sink = gst_element_get_static_pad (priv->stub_fs, "sink"); + gst_pad_link (old_src, fas_sink); + gst_object_unref (old_src); + gst_object_unref (fas_sink); + gst_element_set_state (GST_ELEMENT(priv->audio_srcbin), GST_STATE_PAUSED); + gst_element_set_state (GST_ELEMENT(priv->video_srcbin), GST_STATE_PAUSED); + gst_element_set_state (GST_ELEMENT(priv->mux), GST_STATE_PAUSED); + gst_element_set_state (GST_ELEMENT(priv->mux_queue), GST_STATE_PAUSED); + + /* signal that new pipeline linked */ + g_mutex_lock (&priv->direct_lock); + linked = TRUE; + g_cond_signal (&priv->direct_cond); + g_mutex_unlock (&priv->direct_lock); + + return GST_PAD_PROBE_REMOVE; +} + +static gboolean +_rtsp_media_factory_wfd_relink_pipeline(GstRTSPMediaFactoryWFD * factory) +{ + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + GstPad *probe_pad = NULL; + gint64 end_time = 0; + + priv = factory->priv; + + probe_pad = gst_element_get_static_pad (priv->pay, "sink"); + if (probe_pad == NULL) + return FALSE; + + in_pad_probe = FALSE; + linked = FALSE; + gst_pad_add_probe (probe_pad, GST_PAD_PROBE_TYPE_IDLE, _rtsp_media_factory_wfd_restore_pipe_probe_cb, factory, NULL); + + g_mutex_lock (&factory->priv->direct_lock); + end_time = g_get_monotonic_time () + 5 * G_TIME_SPAN_SECOND; + if (!g_cond_wait_until (&factory->priv->direct_cond, &factory->priv->direct_lock, end_time)) { + g_mutex_unlock (&factory->priv->direct_lock); + GST_ERROR_OBJECT (factory, "Failed to relink pipeline"); + return linked; + } + g_mutex_unlock (&factory->priv->direct_lock); + return linked; +} + + +static GstPadProbeReturn +_rtsp_media_factory_wfd_src_pad_probe_cb(GstPad * pad, GstPadProbeInfo * info, gpointer user_data) +{ + GstRTSPMediaFactoryWFD *factory = NULL; + GstEvent *event = GST_PAD_PROBE_INFO_EVENT(info); + + factory = (GstRTSPMediaFactoryWFD *) user_data; + + if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) { + GST_INFO_OBJECT (factory, "Got event: %s in direct streaming", GST_EVENT_TYPE_NAME (event)); + info->data = NULL; + info->data = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, gst_structure_new_empty ("fillEOS")); + + if (!_rtsp_media_factory_wfd_relink_pipeline(factory)) { + GST_ERROR_OBJECT (factory, "Failed to relink pipeline"); + return GST_PAD_PROBE_REMOVE; + } + + g_idle_add((GSourceFunc)_rtsp_media_factory_wfd_destroy_direct_pipe, factory); + return GST_PAD_PROBE_REMOVE; + } + + return GST_PAD_PROBE_OK; +} + +static gboolean +_rtsp_media_factory_wfd_create_direct_pipeline(GstRTSPMediaFactoryWFD * factory) +{ + GstElement *src = NULL; + GstElement *demux = NULL; + g_autofree gchar *path = NULL; + g_autofree gchar *elem_name = NULL; + GstPad *srcpad = NULL; + GstPad *mux_vsinkpad = NULL; + GstPad *mux_asinkpad = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + GstRTSPMediaWFDDirectPipelineData *pipe_data = NULL; + + priv = factory->priv; + pipe_data = priv->direct_pipe; + + pipe_data->pipeline = (GstBin *) gst_bin_new ("direct"); + + src = gst_element_factory_create(priv->res.src_fact, NULL); + demux = gst_element_factory_create(priv->res.demux_fact, NULL); + pipe_data->ap = gst_element_factory_make ("aacparse", NULL); + pipe_data->vp = gst_element_factory_make ("h264parse", NULL); + pipe_data->aq = gst_element_factory_make ("queue", NULL); + pipe_data->vq = gst_element_factory_make ("queue", NULL); + pipe_data->tsmux = gst_element_factory_make ("mpegtsmux", NULL); + pipe_data->mux_fs = gst_element_factory_make ("fakesink", NULL); + + if (src == NULL || demux == NULL || pipe_data->tsmux == NULL || + pipe_data->ap == NULL || pipe_data->vp == NULL || + pipe_data->aq == NULL || pipe_data->vq == NULL || + pipe_data->mux_fs == NULL) { + GST_ERROR_OBJECT (factory, "Not all element created"); + return FALSE; + } + + elem_name = g_ascii_strdown(g_type_name(G_OBJECT_TYPE(src)), -1); + + if (g_strrstr (elem_name, "file")) { + path = g_filename_from_uri (pipe_data->uri, NULL, NULL); + + if (path == NULL) { + GST_ERROR_OBJECT(factory, "No file path"); + return FALSE; + } + g_object_set (src, "location", path, NULL); + } else + g_object_set (src, "uri", pipe_data->uri, NULL); + + gst_bin_add_many (pipe_data->pipeline, src, demux, pipe_data->ap, + pipe_data->vp, pipe_data->aq, pipe_data->vq, + pipe_data->tsmux, pipe_data->mux_fs, NULL); + + if (!gst_element_link (src, demux)) { + GST_ERROR_OBJECT (factory, "Can't link src with demux"); + return FALSE; + } + + if (!gst_element_link (pipe_data->ap, pipe_data->aq)) { + GST_ERROR_OBJECT (factory, "Can't link audio parse and queue"); + return FALSE; + } + + if (!gst_element_link (pipe_data->vp, pipe_data->vq)) { + GST_ERROR_OBJECT (factory, "Can't link video parse and queue"); + return FALSE; + } + + if (!gst_element_link (pipe_data->tsmux, pipe_data->mux_fs)) { + GST_DEBUG_OBJECT (factory, "Can't link muxer and fakesink"); + return FALSE; + } + + g_signal_connect_object (demux, "pad-added", G_CALLBACK (_rtsp_media_factory_wfd_demux_pad_added_cb), factory, 0); + + gst_bin_add (priv->stream_bin, GST_ELEMENT (pipe_data->pipeline)); + + + /* request video sink pad from muxer, which has elementary pid 0x1011 */ - mux_vsinkpad = gst_element_get_request_pad (pipe_data->tsmux, "sink_4113"); ++ mux_vsinkpad = gst_element_request_pad_simple (pipe_data->tsmux, "sink_4113"); + if (!mux_vsinkpad) { + GST_ERROR_OBJECT (factory, "Failed to get sink pad from muxer..."); + return FALSE; + } + + /* request srcpad from video queue */ + srcpad = gst_element_get_static_pad (pipe_data->vq, "src"); + if (!srcpad) { + GST_ERROR_OBJECT (factory, "Failed to get srcpad from video queue..."); + } + + if (gst_pad_link (srcpad, mux_vsinkpad) != GST_PAD_LINK_OK) { + GST_ERROR_OBJECT (factory, "Failed to link video queue src pad & muxer video sink pad..."); + return FALSE; + } + + gst_object_unref (mux_vsinkpad); + gst_object_unref (srcpad); + srcpad = NULL; + + /* request audio sink pad from muxer, which has elementary pid 0x1100 */ - mux_asinkpad = gst_element_get_request_pad (pipe_data->tsmux, "sink_4352"); ++ mux_asinkpad = gst_element_request_pad_simple (pipe_data->tsmux, "sink_4352"); + if (!mux_asinkpad) { + GST_ERROR_OBJECT (factory, "Failed to get sinkpad from muxer..."); + return FALSE; + } + + /* request srcpad from audio queue */ + srcpad = gst_element_get_static_pad (pipe_data->aq, "src"); + if (!srcpad) { + GST_ERROR_OBJECT (factory, "Failed to get srcpad from audio queue..."); + return FALSE; + } + + /* link audio queue's srcpad & muxer sink pad */ + if (gst_pad_link (srcpad, mux_asinkpad) != GST_PAD_LINK_OK) { + GST_ERROR_OBJECT (factory, "Failed to link audio queue src pad & muxer audio sink pad..."); + return FALSE; + } + gst_object_unref (mux_asinkpad); + gst_object_unref (srcpad); + srcpad = NULL; + + gst_element_sync_state_with_parent (GST_ELEMENT (pipe_data->pipeline)); + + srcpad = gst_element_get_static_pad (priv->pay, "sink"); + + in_pad_probe = FALSE; + gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_IDLE, _rtsp_media_factory_wfd_pay_pad_probe_cb, factory, NULL); + gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, _rtsp_media_factory_wfd_src_pad_probe_cb, factory, NULL); + + return TRUE; +} + +static void +_rtsp_media_factory_wfd_decodebin_element_added_cb (GstElement *decodebin, + GstElement *child, void *user_data) +{ + g_autofree gchar *elem_name = g_ascii_strdown(g_type_name(G_OBJECT_TYPE(child)), -1); + GstRTSPMediaFactoryWFD *factory = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + + factory = (GstRTSPMediaFactoryWFD *) user_data; + priv = factory->priv; + + if (g_strrstr (elem_name, "h264")) + priv->res.h264_found++; + if (g_strrstr (elem_name, "aac")) + priv->res.aac_found++; + if (g_strrstr (elem_name, "ac3")) + priv->res.ac3_found++; + if (g_strrstr (elem_name, "demux")) + priv->res.demux_fact = gst_element_get_factory(child); +} + +static void +_rtsp_media_factory_wfd_uridecodebin_element_added_cb (GstElement *uridecodebin, + GstElement *child, void *user_data) +{ + g_autofree gchar *elem_name = g_ascii_strdown(g_type_name(G_OBJECT_TYPE(child)), -1); + GstRTSPMediaFactoryWFD *factory = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + + factory = (GstRTSPMediaFactoryWFD *) user_data; + priv = factory->priv; + + if (g_strrstr (elem_name, "src")) + priv->res.src_fact = gst_element_get_factory(child); + + if (G_OBJECT_TYPE(child) == priv->decodebin_type) + g_signal_connect_object (child, "element-added", + G_CALLBACK (_rtsp_media_factory_wfd_decodebin_element_added_cb), factory, 0); +} + +static void +_rtsp_media_factory_wfd_discover_pad_added_cb (GstElement *uridecodebin, GstPad *pad, + GstBin *pipeline) +{ + GstPad *sinkpad = NULL; + GstCaps *caps; + + GstElement *queue = gst_element_factory_make ("queue", NULL); + GstElement *sink = gst_element_factory_make ("fakesink", NULL); + + if (G_UNLIKELY (queue == NULL || sink == NULL)) + goto error; + + g_object_set (sink, "silent", TRUE, NULL); + g_object_set (queue, "max-size-buffers", 1, "silent", TRUE, NULL); + + caps = gst_pad_query_caps (pad, NULL); + + sinkpad = gst_element_get_static_pad (queue, "sink"); + if (sinkpad == NULL) + goto error; + + gst_caps_unref (caps); + + gst_bin_add_many (pipeline, queue, sink, NULL); + + if (!gst_element_link_pads_full (queue, "src", sink, "sink", + GST_PAD_LINK_CHECK_NOTHING)) + goto error; + if (!gst_element_sync_state_with_parent (sink)) + goto error; + if (!gst_element_sync_state_with_parent (queue)) + goto error; + + if (gst_pad_link_full (pad, sinkpad, + GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK) + goto error; + gst_object_unref (sinkpad); + + return; + +error: + if (sinkpad) + gst_object_unref (sinkpad); + if (queue) + gst_object_unref (queue); + if (sink) + gst_object_unref (sink); + return; +} + +static void +_rtsp_media_factory_wfd_uridecode_no_pad_cb (GstElement * uridecodebin, void * user_data) +{ + GstRTSPMediaFactoryWFD *factory = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + + factory = (GstRTSPMediaFactoryWFD *) user_data; + priv = factory->priv; + type_detected = TRUE; + g_main_loop_quit (priv->discover_loop); +} + +static void +_rtsp_media_factory_wfd_discover_pipe_bus_call (GstBus *bus, + GstMessage *msg, + gpointer data) +{ + GstRTSPMediaFactoryWFD *factory = NULL; + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + + factory = (GstRTSPMediaFactoryWFD *) data; + priv = factory->priv; + + switch (GST_MESSAGE_TYPE (msg)) { + case GST_MESSAGE_ERROR: { + GError *error = NULL; + + gst_message_parse_error (msg, &error, NULL); + + GST_ERROR_OBJECT (factory, "Error: %s", error->message); + g_error_free (error); + + type_detected = FALSE; + g_main_loop_quit (priv->discover_loop); + break; + } + default: + break; + } +} + +static gboolean +_rtsp_media_factory_wfd_find_media_type (GstRTSPMediaFactoryWFD * factory, gchar *uri) +{ + GstRTSPMediaFactoryWFDPrivate *priv = NULL; + GstElement *uridecode = NULL; + GstElement *tmp = NULL; + GstBus *bus; + GMainContext *context; + GSource *source; + + priv = factory->priv; + + context = g_main_context_new(); + priv->discover_loop = g_main_loop_new(context, FALSE); + + tmp = gst_element_factory_make ("decodebin", NULL); + priv->decodebin_type = G_OBJECT_TYPE (tmp); + gst_object_unref (tmp); + + /* if a URI was provided, use it instead of the default one */ + priv->discover_pipeline = (GstBin *) gst_pipeline_new ("Discover"); + uridecode = gst_element_factory_make("uridecodebin", "uri"); + g_object_set (G_OBJECT (uridecode), "uri", uri, NULL); + gst_bin_add (priv->discover_pipeline, uridecode); - if (priv->decodebin_type == NULL || priv->discover_pipeline == NULL || uridecode == NULL) { ++ if (priv->decodebin_type == 0 || priv->discover_pipeline == NULL || uridecode == NULL) { + GST_INFO_OBJECT (factory, "Failed to create type find pipeline"); + type_detected = FALSE; + return FALSE; + } + + /* we add a message handler */ + bus = gst_pipeline_get_bus (GST_PIPELINE (priv->discover_pipeline)); + source = gst_bus_create_watch (bus); + gst_bus_add_signal_watch (bus); + + g_source_set_callback (source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL); + g_source_attach (source, context); + g_signal_connect_object (bus, "message", + G_CALLBACK (_rtsp_media_factory_wfd_discover_pipe_bus_call), factory, 0); + + g_signal_connect_object (uridecode, "pad-added", + G_CALLBACK (_rtsp_media_factory_wfd_discover_pad_added_cb), priv->discover_pipeline, 0); + g_signal_connect_object (uridecode, "element-added", + G_CALLBACK (_rtsp_media_factory_wfd_uridecodebin_element_added_cb), + factory, 0); + g_signal_connect_object (uridecode, "no-more-pads", + G_CALLBACK (_rtsp_media_factory_wfd_uridecode_no_pad_cb), factory, 0); + gst_element_set_state (GST_ELEMENT (priv->discover_pipeline), GST_STATE_PLAYING); + + g_main_loop_run(priv->discover_loop); + + gst_element_set_state (GST_ELEMENT (priv->discover_pipeline), GST_STATE_NULL); + g_source_destroy(source); + g_source_unref (source); + g_main_loop_unref(priv->discover_loop); + g_main_context_unref(context); + gst_object_unref(bus); + gst_object_unref (GST_OBJECT (priv->discover_pipeline)); + + return TRUE; +} + +gint +gst_rtsp_media_factory_wfd_uri_type_find(GstRTSPMediaFactory *factory, + gchar *filesrc, guint8 *detected_video_codec, guint8 *detected_audio_codec) +{ + GstRTSPMediaFactoryWFD *_factory = GST_RTSP_MEDIA_FACTORY_WFD_CAST (factory); + GstRTSPMediaFactoryWFDPrivate *priv = _factory->priv; + + type_detected = FALSE; + + _rtsp_media_factory_wfd_find_media_type (_factory, filesrc); + + if (type_detected == FALSE) { + GST_ERROR_OBJECT (_factory, "Media type cannot be detected"); + return GST_RTSP_ERROR; + } + GST_INFO_OBJECT (_factory, "Media type detected"); + + if (priv->res.h264_found) + *detected_video_codec = GST_WFD_VIDEO_H264; + + if (priv->res.aac_found) + *detected_audio_codec = GST_WFD_AUDIO_AAC; + + if (priv->res.ac3_found) + *detected_audio_codec = GST_WFD_AUDIO_AC3; + + return GST_RTSP_OK; +} + +gint +gst_rtsp_media_factory_wfd_set_direct_streaming(GstRTSPMediaFactory * factory, + gint direct_streaming, gchar *filesrc) +{ + GstRTSPMediaFactoryWFD *_factory = GST_RTSP_MEDIA_FACTORY_WFD_CAST (factory); + linked = FALSE; + + if (direct_streaming == 0) { + if (!_rtsp_media_factory_wfd_relink_pipeline(_factory)) { + GST_ERROR_OBJECT (factory, "Failed to relink pipeline"); + return GST_RTSP_ERROR; + } + + _rtsp_media_factory_wfd_destroy_direct_pipe ((void *)_factory); + + GST_INFO_OBJECT (_factory, "Direct streaming bin removed"); + + return GST_RTSP_OK; + } + + _factory->priv->direct_pipe = g_new0 (GstRTSPMediaWFDDirectPipelineData, 1); + _factory->priv->direct_pipe->uri = g_strdup(filesrc); + + if (!_rtsp_media_factory_wfd_create_direct_pipeline(_factory)) { + GST_ERROR_OBJECT (_factory, "Direct pipeline creation failed"); + return GST_RTSP_ERROR; + } + + g_mutex_lock (&_factory->priv->direct_lock); + while (linked != TRUE) { + gint64 end_time = g_get_monotonic_time () + 5 * G_TIME_SPAN_SECOND; + if (!g_cond_wait_until (&_factory->priv->direct_cond, &_factory->priv->direct_lock, end_time)) { + g_mutex_unlock (&_factory->priv->direct_lock); + GST_ERROR_OBJECT (_factory, "Direct pipeline linking failed"); + return GST_RTSP_ERROR; + } + } + g_mutex_unlock (&_factory->priv->direct_lock); + + GST_INFO_OBJECT (_factory, "Direct streaming bin created"); + + return GST_RTSP_OK; +} diff --cc subprojects/gst-rtsp-server/gst/rtsp-server/rtsp-server-wfd.c index d29061f0bb,0000000000..379a096852 mode 100644,000000..100644 --- a/subprojects/gst-rtsp-server/gst/rtsp-server/rtsp-server-wfd.c +++ b/subprojects/gst-rtsp-server/gst/rtsp-server/rtsp-server-wfd.c @@@ -1,495 -1,0 +1,490 @@@ +/* GStreamer + * Copyright (C) 2015 Samsung Electronics Hyunjun Ko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ +/* + * SECTION:rtsp-server + * @short_description: The main server object + * @see_also: #GstRTSPClient, #GstRTSPThreadPool + * + * The server object is the object listening for connections on a port and + * creating #GstRTSPClient objects to handle those connections. + * + * The server will listen on the address set with gst_rtsp_server_set_address() + * and the port or service configured with gst_rtsp_server_set_service(). + * Use gst_rtsp_server_set_backlog() to configure the amount of pending requests + * that the server will keep. By default the server listens on the current + * network (0.0.0.0) and port 8554. + * + * The server will require an SSL connection when a TLS certificate has been + * set in the auth object with gst_rtsp_auth_set_tls_certificate(). + * + * To start the server, use gst_rtsp_server_attach() to attach it to a + * #GMainContext. For more control, gst_rtsp_server_create_source() and + * gst_rtsp_server_create_socket() can be used to get a #GSource and #GSocket + * respectively. + * + * gst_rtsp_server_transfer_connection() can be used to transfer an existing + * socket to the RTSP server, for example from an HTTP server. + * + * Once the server socket is attached to a mainloop, it will start accepting + * connections. When a new connection is received, a new #GstRTSPClient object + * is created to handle the connection. The new client will be configured with + * the server #GstRTSPAuth, #GstRTSPMountPoints, #GstRTSPSessionPool and + * #GstRTSPThreadPool. + * + * The server uses the configured #GstRTSPThreadPool object to handle the + * remainder of the communication with this client. + * + * Last reviewed on 2013-07-11 (1.0.0) + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include + +#include "rtsp-server-wfd.h" +#include "rtsp-client-wfd.h" + - #define GST_RTSP_WFD_SERVER_GET_PRIVATE(obj) \ - (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_RTSP_WFD_SERVER, GstRTSPWFDServerPrivate)) - +#define GST_RTSP_WFD_SERVER_GET_LOCK(server) (&(GST_RTSP_WFD_SERVER_CAST(server)->priv->lock)) +#define GST_RTSP_WFD_SERVER_LOCK(server) (g_mutex_lock(GST_RTSP_WFD_SERVER_GET_LOCK(server))) +#define GST_RTSP_WFD_SERVER_UNLOCK(server) (g_mutex_unlock(GST_RTSP_WFD_SERVER_GET_LOCK(server))) + +struct _GstRTSPWFDServerPrivate +{ + GMutex lock; /* protects everything in this struct */ + + /* the clients that are connected */ + GList *clients; + guint64 native_resolution; + guint64 supported_resolution; + guint8 audio_codec; + guint8 video_codec; + gint wfd2_supported; + gboolean coupling_mode; +}; + - G_DEFINE_TYPE (GstRTSPWFDServer, gst_rtsp_wfd_server, GST_TYPE_RTSP_SERVER); ++G_DEFINE_TYPE_WITH_PRIVATE (GstRTSPWFDServer, gst_rtsp_wfd_server, GST_TYPE_RTSP_SERVER); + +GST_DEBUG_CATEGORY_STATIC (rtsp_wfd_server_debug); +#define GST_CAT_DEFAULT rtsp_wfd_server_debug + +static void gst_rtsp_wfd_server_get_property (GObject * object, guint propid, + GValue * value, GParamSpec * pspec); +static void gst_rtsp_wfd_server_set_property (GObject * object, guint propid, + const GValue * value, GParamSpec * pspec); +static void gst_rtsp_wfd_server_finalize (GObject * object); + +static GstRTSPClient *create_client_wfd (GstRTSPServer * server); +static void client_connected_wfd (GstRTSPServer * server, + GstRTSPClient * client); + +static void +gst_rtsp_wfd_server_class_init (GstRTSPWFDServerClass * klass) +{ + GObjectClass *gobject_class; + GstRTSPServerClass *rtsp_server_class; + - g_type_class_add_private (klass, sizeof (GstRTSPWFDServerPrivate)); - + gobject_class = G_OBJECT_CLASS (klass); + rtsp_server_class = GST_RTSP_SERVER_CLASS (klass); + + gobject_class->get_property = gst_rtsp_wfd_server_get_property; + gobject_class->set_property = gst_rtsp_wfd_server_set_property; + gobject_class->finalize = gst_rtsp_wfd_server_finalize; + + rtsp_server_class->create_client = create_client_wfd; + rtsp_server_class->client_connected = client_connected_wfd; + + + GST_DEBUG_CATEGORY_INIT (rtsp_wfd_server_debug, "rtspwfdserver", 0, + "GstRTSPWFDServer"); +} + +static void +gst_rtsp_wfd_server_init (GstRTSPWFDServer * server) +{ - GstRTSPWFDServerPrivate *priv = GST_RTSP_WFD_SERVER_GET_PRIVATE (server); ++ GstRTSPWFDServerPrivate *priv = gst_rtsp_wfd_server_get_instance_private (server); + + g_return_if_fail (priv != NULL); + + server->priv = priv; + server->priv->native_resolution = 0; + server->priv->supported_resolution = 1; + server->priv->audio_codec = 2; + server->priv->coupling_mode = FALSE; + GST_INFO_OBJECT (server, "New server is initialized"); +} + +static void +gst_rtsp_wfd_server_finalize (GObject * object) +{ + GstRTSPWFDServer *server = GST_RTSP_WFD_SERVER (object); + //GstRTSPWFDServerPrivate *priv = server->priv; + + GST_DEBUG_OBJECT (server, "finalize server"); + + G_OBJECT_CLASS (gst_rtsp_wfd_server_parent_class)->finalize (object); +} + +/* + * gst_rtsp_server_new: + * + * Create a new #GstRTSPWFDServer instance. + */ +GstRTSPWFDServer * +gst_rtsp_wfd_server_new (void) +{ + GstRTSPWFDServer *result; + + result = g_object_new (GST_TYPE_RTSP_WFD_SERVER, NULL); + + return result; +} + +static void +gst_rtsp_wfd_server_get_property (GObject * object, guint propid, + GValue * value, GParamSpec * pspec) +{ + //GstRTSPWFDServer *server = GST_RTSP_WFD_SERVER (object); + + switch (propid) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, propid, pspec); + } +} + +static void +gst_rtsp_wfd_server_set_property (GObject * object, guint propid, + const GValue * value, GParamSpec * pspec) +{ + //GstRTSPWFDServer *server = GST_RTSP_WFD_SERVER (object); + + switch (propid) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, propid, pspec); + } +} + +static gboolean +_start_wfd (gpointer data) +{ + GstRTSPWFDClient *client = (GstRTSPWFDClient *) data; + + GST_INFO_OBJECT (client, "WFD client is STARTing"); + + gst_rtsp_wfd_client_start_wfd (client); + return FALSE; +} + +static void +client_connected_wfd (GstRTSPServer * server, GstRTSPClient * client) +{ + gchar *server_addr = NULL; + GST_INFO_OBJECT (server, "Client is connected"); + + server_addr = gst_rtsp_server_get_address (server); + gst_rtsp_wfd_client_set_host_address (GST_RTSP_WFD_CLIENT_CAST (client), + server_addr); + g_free (server_addr); + g_idle_add (_start_wfd, client); + return; +} + +static GstRTSPClient * +create_client_wfd (GstRTSPServer * server) +{ + GstRTSPWFDClient *client; + GstRTSPThreadPool *thread_pool = NULL; + GstRTSPSessionPool *session_pool = NULL; + GstRTSPMountPoints *mount_points = NULL; + GstRTSPAuth *auth = NULL; - GstRTSPWFDServerPrivate *priv = GST_RTSP_WFD_SERVER_GET_PRIVATE (server); ++ GstRTSPWFDServerPrivate *priv = gst_rtsp_wfd_server_get_instance_private (server); + + g_return_val_if_fail (priv != NULL, NULL); + + GST_INFO_OBJECT (server, "New Client is being created"); + + /* a new client connected, create a session to handle the client. */ + client = gst_rtsp_wfd_client_new (); + + thread_pool = gst_rtsp_server_get_thread_pool (server); + session_pool = gst_rtsp_server_get_session_pool (server); + mount_points = gst_rtsp_server_get_mount_points (server); + auth = gst_rtsp_server_get_auth (server); + + /* set the session pool that this client should use */ + GST_RTSP_WFD_SERVER_LOCK (server); + gst_rtsp_client_set_session_pool (GST_RTSP_CLIENT_CAST (client), + session_pool); + /* set the mount points that this client should use */ + gst_rtsp_client_set_mount_points (GST_RTSP_CLIENT_CAST (client), + mount_points); + /* set authentication manager */ + gst_rtsp_client_set_auth (GST_RTSP_CLIENT_CAST (client), auth); + /* set threadpool */ + gst_rtsp_client_set_thread_pool (GST_RTSP_CLIENT_CAST (client), thread_pool); + + gst_rtsp_wfd_client_set_video_supported_resolution (client, + priv->supported_resolution); + + gst_rtsp_wfd_client_set_video_native_resolution (client, + priv->native_resolution); + + gst_rtsp_wfd_client_set_audio_codec (client, priv->audio_codec); + + gst_rtsp_wfd_client_set_video_codec (client, priv->video_codec); + + gst_rtsp_wfd_client_set_coupling_mode (client, priv->coupling_mode); + + /* enable or disable R2 features following ini */ + gst_rtsp_wfd_client_set_wfd2_supported (client, priv->wfd2_supported); + + GST_RTSP_WFD_SERVER_UNLOCK (server); + + return GST_RTSP_CLIENT (client); +} + +GstRTSPResult +gst_rtsp_wfd_server_trigger_request (GstRTSPServer * server, + GstWFDTriggerType type) +{ + GstRTSPResult res = GST_RTSP_OK; + GList *clients, *walk, *next; + + g_return_val_if_fail (GST_IS_RTSP_SERVER (server), GST_RTSP_ERROR); + + clients = gst_rtsp_server_client_filter (server, NULL, NULL); + if (clients == NULL) { + GST_ERROR_OBJECT (server, "There is no client in this server"); + } + + for (walk = clients; walk; walk = next) { + GstRTSPClient *client = walk->data; + + next = g_list_next (walk); + + res = + gst_rtsp_wfd_client_trigger_request (GST_RTSP_WFD_CLIENT (client), + type); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (server, "Failed to send trigger request %d", type); + } + g_object_unref (client); + } + + return res; + +} + +GstRTSPResult +gst_rtsp_wfd_server_set_supported_reso (GstRTSPWFDServer * server, + guint64 supported_reso) +{ + GstRTSPResult res = GST_RTSP_OK; - GstRTSPWFDServerPrivate *priv = GST_RTSP_WFD_SERVER_GET_PRIVATE (server); ++ GstRTSPWFDServerPrivate *priv = gst_rtsp_wfd_server_get_instance_private (server); + + g_return_val_if_fail (GST_IS_RTSP_WFD_SERVER (server), GST_RTSP_ERROR); + g_return_val_if_fail (priv != NULL, GST_RTSP_ERROR); + + GST_RTSP_WFD_SERVER_LOCK (server); + + priv->supported_resolution = supported_reso; + + GST_RTSP_WFD_SERVER_UNLOCK (server); + return res; +} + +GstRTSPResult +gst_rtsp_wfd_server_set_video_native_reso (GstRTSPWFDServer * server, + guint64 native_reso) +{ + GstRTSPResult res = GST_RTSP_OK; - GstRTSPWFDServerPrivate *priv = GST_RTSP_WFD_SERVER_GET_PRIVATE (server); ++ GstRTSPWFDServerPrivate *priv = gst_rtsp_wfd_server_get_instance_private (server); + + g_return_val_if_fail (GST_IS_RTSP_WFD_SERVER (server), GST_RTSP_ERROR); + g_return_val_if_fail (priv != NULL, GST_RTSP_ERROR); + + GST_RTSP_WFD_SERVER_LOCK (server); + + priv->native_resolution = native_reso; + + GST_RTSP_WFD_SERVER_UNLOCK (server); + return res; +} + +GstRTSPResult +gst_rtsp_wfd_server_set_video_codec (GstRTSPWFDServer * server, + guint8 video_codec) +{ + GstRTSPResult res = GST_RTSP_OK; - GstRTSPWFDServerPrivate *priv = GST_RTSP_WFD_SERVER_GET_PRIVATE (server); ++ GstRTSPWFDServerPrivate *priv = gst_rtsp_wfd_server_get_instance_private (server); + + g_return_val_if_fail (GST_IS_RTSP_WFD_SERVER (server), GST_RTSP_ERROR); + g_return_val_if_fail (priv != NULL, GST_RTSP_ERROR); + + GST_RTSP_WFD_SERVER_LOCK (server); + + priv->video_codec = video_codec; + + GST_RTSP_WFD_SERVER_UNLOCK (server); + return res; +} + +GstRTSPResult +gst_rtsp_wfd_server_set_audio_codec (GstRTSPWFDServer * server, + guint8 audio_codec) +{ + GstRTSPResult res = GST_RTSP_OK; - GstRTSPWFDServerPrivate *priv = GST_RTSP_WFD_SERVER_GET_PRIVATE (server); ++ GstRTSPWFDServerPrivate *priv = gst_rtsp_wfd_server_get_instance_private (server); + + g_return_val_if_fail (GST_IS_RTSP_WFD_SERVER (server), GST_RTSP_ERROR); + g_return_val_if_fail (priv != NULL, GST_RTSP_ERROR); + + GST_RTSP_WFD_SERVER_LOCK (server); + + priv->audio_codec = audio_codec; + + GST_RTSP_WFD_SERVER_UNLOCK (server); + return res; +} + +GstRTSPResult +gst_rtsp_wfd_server_set_direct_streaming (GstRTSPWFDServer *server, + gint direct_streaming, gchar *urisrc) +{ + GstRTSPResult res = GST_RTSP_OK; + GList *clients, *walk, *next; + + g_return_val_if_fail (GST_IS_RTSP_SERVER (server), GST_RTSP_ERROR); + + clients = gst_rtsp_server_client_filter (GST_RTSP_SERVER(server), NULL, NULL); + if (clients == NULL) { + GST_ERROR_OBJECT (server, "There is no client in this server"); + } + + for (walk = clients; walk; walk = next) { + GstRTSPClient *client = walk->data; + + next = g_list_next (walk); + + res = + gst_rtsp_wfd_client_set_direct_streaming (GST_RTSP_WFD_CLIENT (client), + direct_streaming, urisrc); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (server, "Failed to set direct streaming to %d", direct_streaming); + } + g_object_unref (client); + } + + return res; +} + + +GstRTSPResult +gst_rtsp_wfd_server_set_coupling_mode (GstRTSPWFDServer * server, + gboolean coupling_mode) +{ + GstRTSPResult res = GST_RTSP_OK; - GstRTSPWFDServerPrivate *priv = GST_RTSP_WFD_SERVER_GET_PRIVATE (server); ++ GstRTSPWFDServerPrivate *priv = gst_rtsp_wfd_server_get_instance_private (server); + + g_return_val_if_fail (GST_IS_RTSP_WFD_SERVER (server), GST_RTSP_ERROR); + g_return_val_if_fail (priv != NULL, GST_RTSP_ERROR); + + GST_RTSP_WFD_SERVER_LOCK (server); + priv->coupling_mode = coupling_mode; + GST_RTSP_WFD_SERVER_UNLOCK (server); + return res; +} + +GstRTSPResult +gst_rtsp_wfd_server_switch_to_udp (GstRTSPWFDServer *server) +{ + GstRTSPResult res = GST_RTSP_OK; + GList *clients, *walk, *next; + + g_return_val_if_fail (GST_IS_RTSP_SERVER (server), GST_RTSP_ERROR); + + clients = gst_rtsp_server_client_filter (GST_RTSP_SERVER(server), NULL, NULL); + if (clients == NULL) { + GST_ERROR_OBJECT (server, "There is no client in this server"); + } + + for (walk = clients; walk; walk = next) { + GstRTSPClient *client = walk->data; + + next = g_list_next (walk); + + res = + gst_rtsp_wfd_client_switch_to_udp (GST_RTSP_WFD_CLIENT (client)); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (server, "Failed to switch transport to UDP"); + } + g_object_unref (client); + } + + return res; +} + +GstRTSPResult +gst_rtsp_wfd_server_switch_to_tcp (GstRTSPWFDServer *server) +{ + GstRTSPResult res = GST_RTSP_OK; + GList *clients, *walk, *next; + + g_return_val_if_fail (GST_IS_RTSP_SERVER (server), GST_RTSP_ERROR); + + clients = gst_rtsp_server_client_filter (GST_RTSP_SERVER(server), NULL, NULL); + if (clients == NULL) { + GST_ERROR_OBJECT (server, "There is no client in this server"); + } + + for (walk = clients; walk; walk = next) { + GstRTSPClient *client = walk->data; + + next = g_list_next (walk); + + res = + gst_rtsp_wfd_client_switch_to_tcp (GST_RTSP_WFD_CLIENT (client)); + if (res != GST_RTSP_OK) { + GST_ERROR_OBJECT (server, "Failed to switch transport to TCP"); + } + g_object_unref (client); + } + + return res; +} + +GstRTSPResult +gst_rtsp_wfd_server_set_wfd2_supported (GstRTSPWFDServer *server, + guint flag) +{ + GstRTSPResult res = GST_RTSP_OK; - GstRTSPWFDServerPrivate *priv = GST_RTSP_WFD_SERVER_GET_PRIVATE (server); ++ GstRTSPWFDServerPrivate *priv = gst_rtsp_wfd_server_get_instance_private (server); + + g_return_val_if_fail (GST_IS_RTSP_WFD_SERVER (server), GST_RTSP_ERROR); + g_return_val_if_fail (priv != NULL, GST_RTSP_ERROR); + + GST_RTSP_WFD_SERVER_LOCK (server); + + priv->wfd2_supported = flag; + + GST_RTSP_WFD_SERVER_UNLOCK (server); + return res; +} diff --cc subprojects/gstreamer/gst/gstquery.c index f5e3f2f335,b96fe2b407..577c2d2e50 --- a/subprojects/gstreamer/gst/gstquery.c +++ b/subprojects/gstreamer/gst/gstquery.c @@@ -109,9 -106,8 +109,10 @@@ static GstQueryQuarks query_quarks[] = {GST_QUERY_DRAIN, "drain", 0}, {GST_QUERY_CONTEXT, "context", 0}, {GST_QUERY_BITRATE, "bitrate", 0}, +#ifdef TIZEN_PROFILE_TV + {GST_QUERY_RESOURCE, "resource", 0}, +#endif + {GST_QUERY_SELECTABLE, "selectable", 0}, - {0, NULL, 0} };