--- /dev/null
- upstream_branch = upstream/1.6
+[general]
++upstream_branch = upstream/1.16
+upstream_tag = ${upstreamversion}
-DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc
+DISTCHECK_CONFIGURE_FLAGS=--disable-gtk-doc
ALWAYS_SUBDIRS = \
gst sys ext \
tests \
- docs \
po \
common \
m4 \
AUTHORS COPYING NEWS README RELEASE REQUIREMENTS \
ChangeLog gst-plugins-good.doap autogen.sh \
$(shell find "$(top_srcdir)" -type f -name meson.build ! -path "$(top_srcdir)/$(PACKAGE_TARNAME)-*" ) \
- meson_options.txt config.h.meson
+ meson_options.txt
DISTCLEANFILES = _stdint.h
include $(top_srcdir)/common/po.mak
check-valgrind:
- cd tests/check && make check-valgrind
+ $(MAKE) -C tests/check check-valgrind
if HAVE_GST_CHECK
check-torture:
- cd tests/check && make torture
+ $(MAKE) -C tests/check torture
+ build-checks:
+ $(MAKE) -C tests/check build-checks
else
check-torture:
true
+ build-checks:
+ true
endif
include $(top_srcdir)/common/coverage/lcov.mak
$(top_builddir)/gst/videofilter/.libs/*videoflip.{so,dll,DLL,dylib} \
$(top_builddir)/gst/videofilter/.libs/*videobalance.{so,dll,DLL,dylib} \
$(top_builddir)/gst/videofilter/.libs/*gamma.{so,dll,DLL,dylib} \
- $(top_builddir)/sys/oss4/.libs/libgstoss4audio.so
+ $(top_builddir)/sys/directsound/.libs/libgstdirectsoundsink.{dll,DLL} \
+ $(top_builddir)/sys/oss4/.libs/libgstoss4audio.so \
+ $(top_builddir)/sys/waveform/.libs/libgstwaveformsink.{dll,DLL}
CRUFT_DIRS = \
$(top_srcdir)/docs/plugins/tmpl \
dnl initialize autoconf
dnl releases only do -Wall, git and prerelease does -Werror too
dnl use a three digit version number for releases, and four for git/pre
- AC_INIT([GStreamer Good Plug-ins],[1.12.2],[http://bugzilla.gnome.org/enter_bug.cgi?product=GStreamer],[gst-plugins-good])
+ AC_INIT([GStreamer Good Plug-ins],[1.16.2],[http://bugzilla.gnome.org/enter_bug.cgi?product=GStreamer],[gst-plugins-good])
AG_GST_INIT
dnl sets host_* variables
AC_CANONICAL_HOST
+ dnl PKG_CONFIG_SYSROOT_DIR is a valid environment variable
+ m4_pattern_allow(PKG_CONFIG_SYSROOT_DIR)
+
dnl use pretty build output with automake >= 1.11
m4_ifdef([AM_SILENT_RULES],[AM_SILENT_RULES([yes])],
[AM_DEFAULT_VERBOSITY=1
[GStreamer API Version])
AG_GST_LIBTOOL_PREPARE
- AS_LIBTOOL(GST, 1202, 0, 1202)
+ AS_LIBTOOL(GST, 1602, 0, 1602)
dnl *** required versions of GStreamer stuff ***
- GST_REQ=1.12.0
- GSTPB_REQ=1.12.0
+ GST_REQ=1.16.2
+ GSTPB_REQ=1.16.2
dnl *** autotools stuff ****
dnl Determine endianness
AC_C_BIGENDIAN
+ dnl *** software ***
+
+ dnl check for large file support
+ dnl affected plugins must include config.h
+ AC_SYS_LARGEFILE
+
dnl *** checks for programs ***
dnl find a compiler
dnl Check for mmap (needed by electricfence plugin)
AC_FUNC_MMAP
+ AC_CHECK_SIZEOF([off_t])
AM_CONDITIONAL(GST_HAVE_MMAP, test "x$ac_cv_func_mmap_fixed_mapped" = "xyes")
dnl Check for mmap (needed by electricfence plugin)
AC_MSG_NOTICE(Using GStreamer Core Plugins in $GST_PLUGINS_DIR)
AC_MSG_NOTICE(Using GStreamer Base Plugins in $GSTPB_PLUGINS_DIR)
+ AG_GST_PKG_CHECK_MODULES(GST_GL, gstreamer-gl-[$GST_API_VERSION], [$GST_REQ], no)
+
+ dnl FIXME: if uninstalled setup we might not want to pick up an installed gst-gl?
+ if test "x$HAVE_GST_GL" = "xyes"; then
+
+ AC_MSG_CHECKING([GStreamer OpenGL window systems ...])
+ GST_GL_WINSYS=`$PKG_CONFIG --variable=gl_winsys gstreamer-gl-1.0`
+ AC_MSG_RESULT([$GST_GL_WINSYS])
+ GST_GL_HAVE_WINDOW_X11=""
+ GST_GL_HAVE_WINDOW_WAYLAND=""
+ GST_GL_HAVE_WINDOW_ANDROID=""
+ GST_GL_HAVE_WINDOW_COCOA=""
+ GST_GL_HAVE_WINDOW_EAGL=""
+ case "$GST_GL_WINSYS" in *x11*) GST_GL_HAVE_WINDOW_X11="1" ;; esac
+ case "$GST_GL_WINSYS" in *wayland*) GST_GL_HAVE_WINDOW_WAYLAND="1" ;; esac
+ case "$GST_GL_WINSYS" in *android*) GST_GL_HAVE_WINDOW_ANDROID="1" ;; esac
+ case "$GST_GL_WINSYS" in *cocoa*) GST_GL_HAVE_WINDOW_COCOA="1" ;; esac
+ case "$GST_GL_WINSYS" in *eagl*) GST_GL_HAVE_WINDOW_EAGL="1" ;; esac
+
+ AC_MSG_CHECKING([GStreamer OpenGL platforms ...])
+ GST_GL_PLATFORMS=`$PKG_CONFIG --variable=gl_platforms gstreamer-gl-1.0`
+ AC_MSG_RESULT([$GST_GL_PLATFORMS])
+ GST_GL_HAVE_PLATFORM_GLX=""
+ GST_GL_HAVE_PLATFORM_EGL=""
+ GST_GL_HAVE_PLATFORM_CGL=""
+ GST_GL_HAVE_PLATFORM_EAGL=""
+ case "$GST_GL_PLATFORMS" in *glx*) GST_GL_HAVE_PLATFORM_GLX="1" ;; esac
+ case "$GST_GL_PLATFORMS" in *egl*) GST_GL_HAVE_PLATFORM_EGL="1" ;; esac
+ case "$GST_GL_PLATFORMS" in *cgl*) GST_GL_HAVE_PLATFORM_CGL="1" ;; esac
+ case "$GST_GL_PLATFORMS" in *eagl*) GST_GL_HAVE_PLATFORM_EAGL="1" ;; esac
+
+ AC_MSG_CHECKING([GStreamer OpenGL apis ...])
+ GST_GL_APIS=`$PKG_CONFIG --variable=gl_apis gstreamer-gl-1.0`
+ AC_MSG_RESULT([$GST_GL_APIS])
+ GST_GL_HAVE_API_GLES2=""
+ GST_GL_HAVE_API_GL=""
+ case "$GST_GL_APIS" in *gles2*) GST_GL_HAVE_API_GLES2="1" ;; esac
+ case "$GST_GL_APIS" in "gl"|"gl "*|*" gl"|*" gl "*) GST_GL_HAVE_API_GL="1" ;; esac
+ fi
+
AM_CONDITIONAL(HAVE_GST_CHECK, test "x$HAVE_GST_CHECK" = "xyes")
dnl Check for documentation xrefs
AM_CONDITIONAL(HAVE_GTK, test "x$HAVE_GTK" = "xyes")
AM_CONDITIONAL(HAVE_GTK_X11, test "x$HAVE_GTK_X11" = "xyes")
+AC_ARG_ENABLE(pcmdump, AC_HELP_STRING([--enable-pcmdump], [pcm dump]),
+ [
+ case "${enableval}" in
+ yes) PCM_DUMP_ENABLE=yes ;;
+ no) PCM_DUMP_ENABLE=no ;;
+ *) AC_MSG_ERROR(bad value ${enableval} for --enable-pcmdump) ;;
+ esac
+ ],
+ [PCM_DUMP_ENABLE=no])
+AM_CONDITIONAL([PCM_DUMP_ENABLE], [test "x$PCM_DUMP_ENABLE" = "xyes"])
+
+if test "x$PCM_DUMP_ENABLE" = "xyes"; then
+PKG_CHECK_MODULES(VCONF, vconf)
+AC_SUBST(VCONF_CFLAGS)
+AC_SUBST(VCONF_LIBS)
+fi
+
+PKG_CHECK_MODULES(GIO, gio-2.0)
+AC_SUBST(GIO_CFLAGS)
+AC_SUBST(GIO_LIBS)
+
dnl Check for -Bsymbolic-functions linker flag used to avoid
dnl intra-library PLT jumps, if available.
AC_ARG_ENABLE(Bsymbolic,
["${srcdir}/gst-plugins-good.doap"],
[$PACKAGE_VERSION_MAJOR.$PACKAGE_VERSION_MINOR.$PACKAGE_VERSION_MICRO])
- dnl build static plugins or not
- AC_MSG_CHECKING([whether to build static plugins or not])
- AC_ARG_ENABLE(
- static-plugins,
- AC_HELP_STRING(
- [--enable-static-plugins],
- [build static plugins @<:@default=no@:>@]),
- [AS_CASE(
- [$enableval], [no], [], [yes], [],
- [AC_MSG_ERROR([bad value "$enableval" for --enable-static-plugins])])],
- [enable_static_plugins=no])
- AC_MSG_RESULT([$enable_static_plugins])
- if test "x$enable_static_plugins" = xyes; then
- AC_DEFINE(GST_PLUGIN_BUILD_STATIC, 1,
- [Define if static plugins should be built])
- GST_PLUGIN_LIBTOOLFLAGS=""
- else
- GST_PLUGIN_LIBTOOLFLAGS="--tag=disable-static"
- fi
- AC_SUBST(GST_PLUGIN_LIBTOOLFLAGS)
- AM_CONDITIONAL(GST_PLUGIN_BUILD_STATIC, test "x$enable_static_plugins" = "xyes")
-
# set by AG_GST_PARSE_SUBSYSTEM_DISABLES above
dnl make sure it doesn't complain about unused variables if debugging is disabled
NO_WARNINGS=""
[OSS4_MISSING_HEADER="yes";HAVE_OSS4="no"])
])
- dnl *** Sun Audio ***
- translit(dnm, m, l) AM_CONDITIONAL(USE_SUNAUDIO, true)
- AG_GST_CHECK_FEATURE(SUNAUDIO, [Sun Audio], sunaudio, [
- AC_CHECK_HEADER(sys/audioio.h, HAVE_SUNAUDIO="yes", HAVE_SUNAUDIO="no")
- ])
-
dnl *** OSX Audio ***
translit(dnm, m, l) AM_CONDITIONAL(USE_OSX_AUDIO, true)
HAVE_IOS="no"
fi
dnl Allow enabling v4l2 device probing
+ default_v4l2_probe=no
AS_CASE([$host],
[*-*linux*],
[AS_CASE([$host_cpu],
[arm*], [
- enable_v4l2_probe="yes"],
+ default_v4l2_probe=yes],
[aarch64*], [
- enable_v4l2_probe="yes"])])
+ default_v4l2_probe=yes])])
AC_ARG_ENABLE(
v4l2-probe,
AC_HELP_STRING(
[--enable-v4l2-probe],
- [enable V4L2 plugin to probe devices @<:@default=no@:>@]))
+ [enable V4L2 plugin to probe devices]),
+ [], [enable_v4l2_probe=$default_v4l2_probe])
if test "x$enable_v4l2_probe" = "xyes"; then
AC_DEFINE(GST_V4L2_ENABLE_PROBE, 1,
[Define if Video4Linux probe shall be run at plugin load])
translit(dnm, m, l) AM_CONDITIONAL(USE_X, true)
AG_GST_CHECK_FEATURE(X, [X libraries and plugins],
[ximagesrc], [
- PKG_CHECK_MODULES([X], [x11], [
- HAVE_X="yes"
+ AG_GST_CHECK_X
+
+ if test "x$HAVE_X" = "xyes"
+ then
dnl check for Xfixes
- PKG_CHECK_MODULES([XFIXES], [ xfixes ], [
- AC_DEFINE(HAVE_XFIXES, 1, [Defined if Xfixes is available])
- ], [ HAVE_XFIXES="no" ])
+ PKG_CHECK_MODULES(XFIXES, xfixes, HAVE_XFIXES="yes", HAVE_XFIXES="no")
+ if test "x$HAVE_XFIXES" = "xyes"
+ then
+ XFIXES_CFLAGS="-DHAVE_XFIXES $XFIXES_CFLAGS"
+ fi
+ AC_SUBST(XFIXES_LIBS)
+ AC_SUBST(XFIXES_CFLAGS)
dnl check for Xdamage
- PKG_CHECK_MODULES([XDAMAGE], [ xdamage ], [
- AC_DEFINE(HAVE_XDAMAGE, 1, [Defined if Xdamage is available])
- ], [ HAVE_XDAMAGE="no" ])
-
- dnl check for X Shm
- PKG_CHECK_MODULES([XEXT], [ xext ], [
- AC_CHECK_LIB([Xext], [ XShmAttach ], [
- AC_DEFINE(HAVE_XSHM, 1, [Defined if XShm is available])
- XSHM_LIBS="$XEXT_LIBS"
- XSHM_CFLAGS="$XEXT_CFLAGS"
- ], [ HAVE_XEXT="no" ] , [ $X_LIBS ])
- ])
- AC_SUBST(XSHM_LIBS)
- AC_SUBST(XSHM_CFLAGS)
- ], [ HAVE_X="no" ])
+ PKG_CHECK_MODULES(XDAMAGE, xdamage, HAVE_XDAMAGE="yes", HAVE_XDAMAGE="no")
+ if test "x$HAVE_XDAMAGE" = "xyes"
+ then
+ XDAMAGE_CFLAGS="-DHAVE_XDAMAGE $XDAMAGE_CFLAGS"
+ fi
+ AC_SUBST(XDAMAGE_LIBS)
+ AC_SUBST(XDAMAGE_CFLAGS)
+ fi
+])
+
+dnl FIXME: this should be rolled into the test above, it's just an additional
+dnl feature of the ximagesrc plug-in
+dnl This is the same as in gst-plugins-base
+dnl check for X Shm
+translit(dnm, m, l) AM_CONDITIONAL(USE_XSHM, true)
+AG_GST_CHECK_FEATURE(XSHM, [X Shared Memory extension], , [
+ if test x$HAVE_X = xyes; then
+ AC_CHECK_LIB(Xext, XShmAttach,
+ HAVE_XSHM="yes", HAVE_XSHM="no",
+ $X_LIBS)
+ if test "x$HAVE_XSHM" = "xyes"; then
+ XSHM_LIBS="-lXext"
+ else
+ dnl On AIX, it is in XextSam instead, but we still need -lXext
+ AC_CHECK_LIB(XextSam, XShmAttach,
+ HAVE_XSHM="yes", HAVE_XSHM="no",
+ $X_LIBS)
+ if test "x$HAVE_XSHM" = "xyes"; then
+ XSHM_LIBS="-lXext -lXextSam"
+ fi
+ fi
+ fi
+], ,[
+ AC_SUBST(HAVE_XSHM)
+ AC_SUBST(XSHM_LIBS)
])
dnl *** ext plug-ins ***
AG_GST_PKG_CHECK_MODULES(GDK_PIXBUF, gdk-pixbuf-2.0 >= 2.8.0)
])
+ dnl *** gtk+ ***
+ HAVE_GTK3_GL="no"
+ translit(dnm, m, l) AM_CONDITIONAL(USE_GTK3, true)
+ AG_GST_CHECK_FEATURE(GTK3, [Gtk+ elements], gtk, [
+ PKG_CHECK_MODULES(GTK3, gtk+-3.0, [
+ AC_DEFINE([HAVE_GTK3], 1, [Define if Gtk+ 3.0 is installed])
+ HAVE_GTK3="yes"
+ ], [
+ HAVE_GTK3="no"
+ ])
+ PKG_CHECK_MODULES(GTK3_GL, gtk+-3.0 >= 3.15.0, [
+ GDK_WINDOWING="no"
+ if test "x$GST_GL_HAVE_WINDOW_X11" = "x1" -a "x$GST_GL_HAVE_PLATFORM_GLX" = "x1"; then
+ PKG_CHECK_MODULES(GTK3_X11, gtk+-x11-3.0, [
+ GTK3_CFLAGS="$GTK3_CFLAGS $GTK3_X11_CFLAGS"
+ GTK3_LIBS="$GTK3_LIBS $GTK3_X11_LIBS"
+ GDK_WINDOWING="yes"
+ ], [AC_MSG_NOTICE([Could not find Gtk X11 integration])])
+ fi
+ if test "x$GST_GL_HAVE_WINDOW_WAYLAND" = "x1" -a "x$GST_GL_HAVE_PLATFORM_EGL" = "x1"; then
+ PKG_CHECK_MODULES(GTK3_WAYLAND, gtk+-wayland-3.0, [
+ GTK3_CFLAGS="$GTK3_CFLAGS $GTK3_WAYLAND_CFLAGS"
+ GTK3_LIBS="$GTK3_LIBS $GTK3_WAYLAND_LIBS"
+ GDK_WINDOWING="yes"
+ ], [AC_MSG_NOTICE([Could not find Gtk Wayland integration])])
+ fi
+ if test "x$GDK_WINDOWING" = "xyes" -a "x$HAVE_GST_GL" = "xyes"; then
+ AC_DEFINE([HAVE_GTK3_GL], 1, [Define if Gtk+ 3.0 GL is installed])
+ HAVE_GTK3_GL="yes"
+ fi
+ ], [
+ HAVE_GTK3_GL="no"
+ ])
+ ])
+ AM_CONDITIONAL(USE_GTK3_GL, test "x$HAVE_GTK3_GL" = "xyes")
+
dnl *** Jack ***
translit(dnm, m, l) AM_CONDITIONAL(USE_JACK, true)
AG_GST_CHECK_FEATURE(JACK, Jack, jack, [
AC_SUBST(JPEG_LIBS)
])
+ dnl *** lame ***
+ translit(dnm, m, l) AM_CONDITIONAL(USE_LAME, true)
+ AG_GST_CHECK_FEATURE(LAME, [lame mp3 encoder library], lame, [
+ AG_GST_CHECK_LIBHEADER(LAME, mp3lame, lame_init, $LIBM, lame/lame.h,
+ [
+ HAVE_LAME="yes"
+ LAME_LIBS="-lmp3lame $LIBM"
+ dnl is lame presets available
+ LAME_CFLAGS=""
+ AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <lame/lame.h>]], [[ int preset = MEDIUM ]])],[LAME_CFLAGS="-DGSTLAME_PRESET"],[LAME_CFLAGS=""
+ ])
+ AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <lame/lame.h>]], [[
+ void *ptr = &lame_set_VBR_quality
+ ]])],[LAME_CFLAGS="$LAME_CFLAGS -DHAVE_LAME_SET_VBR_QUALITY"],[LAME_CFLAGS="$LAME_CFLAGS"
+ ])
+ AC_SUBST(LAME_CFLAGS)
+ AC_SUBST(LAME_LIBS)
+ ])
+ ])
+
dnl *** libcaca ***
translit(dnm, m, l) AM_CONDITIONAL(USE_LIBCACA, true)
AG_GST_CHECK_FEATURE(LIBCACA, [libcaca coloured ASCII art], cacasink, [
translit(dnm, m, l) AM_CONDITIONAL(USE_LIBPNG, true)
AG_GST_CHECK_FEATURE(LIBPNG, [Portable Network Graphics library], png, [
AG_GST_PKG_CHECK_MODULES(LIBPNG, libpng >= 1.2)
+ if test $HAVE_LIBPNG = "yes"; then
+ PKG_CHECK_MODULES(LIBPNG_1_5, libpng >= 1.5.1, [
+ AC_DEFINE(HAVE_LIBPNG_1_5, 1, [Defined if libpng version is 1.5.1 or newer])
+ ], [true])
+ fi
+ ])
+
+ dnl *** mpg123 ***
+ translit(dnm, m, l) AM_CONDITIONAL(USE_MPG123, true)
+ AG_GST_CHECK_FEATURE(MPG123, [mpg123 audio decoder], mpg123, [
+ PKG_CHECK_MODULES(MPG123, libmpg123 >= 1.13, HAVE_MPG123="yes", HAVE_MPG123="no")
+ AC_SUBST(MPG123_CFLAGS)
+ AC_SUBST(MPG123_LIBS)
])
dnl *** pulseaudio ***
])
AM_CONDITIONAL(USE_LIBIEC61883, [ test "x${HAVE_LIBIEC61883}" = xyes ] )
+ dnl *** Qt ***
+ translit(dnm, m, l) AM_CONDITIONAL(USE_QT, true)
+ AG_GST_CHECK_FEATURE(QT, [Qt elements], qt, [
+ PKG_CHECK_MODULES(QT, Qt5Core Qt5Gui Qt5Qml Qt5Quick >= 5.4.0, [
+ QT_PATH=`$PKG_CONFIG --variable=exec_prefix Qt5Core`
+ QT_HOST_PATH=`$PKG_CONFIG --variable=host_bins Qt5Core`
+ AC_ARG_WITH([moc],
+ AS_HELP_STRING([--with-moc], [Set location of qt moc tool]),
+ [MOC=$withval])
+ AC_PATH_PROGS(MOC, [moc-qt5 moc], [moc], ["${QT_HOST_PATH}" "${QT_PATH}/bin"])
+ AC_ARG_WITH([rcc],
+ AS_HELP_STRING([--with-rcc], [Set location of qt rcc tool]),
+ [RCC=$withval])
+ AC_PATH_PROGS(RCC, [rcc-qt5 rcc], [rcc], ["${QT_HOST_PATH}" "${QT_PATH}/bin"])
+ AC_ARG_WITH([uic],
+ AS_HELP_STRING([--with-uic], [Set location of qt uic tool]),
+ [UIC=$withval])
+ AC_PATH_PROGS(UIC, [uic-qt5 uic], [uic], ["${QT_HOST_PATH}" "${QT_PATH}/bin"])
+ if test "x$MOC" = "x" || test "x$UIC" = "x" || test "x$RCC" = "x"; then
+ AC_MSG_WARN([One of the required qt build programs was not found])
+ HAVE_QT="no"
+ else
+ HAVE_QT="yes"
+ HAVE_QT_WINDOWING="no"
+ QT_VERSION="`$PKG_CONFIG --define-prefix --modversion Qt5Core`"
+ QPA_INCLUDE_PATH=`$PKG_CONFIG --variable=includedir Qt5Core`/QtGui/${QT_VERSION}/QtGui
+ AS_IF([test -f "$PKG_CONFIG_SYSROOT_DIR/$QPA_INCLUDE_PATH/qpa/qplatformnativeinterface.h"], [
+ QT_CFLAGS="$QT_CFLAGS -I$PKG_CONFIG_SYSROOT_DIR/$QPA_INCLUDE_PATH"
+ HAVE_QT_QPA_HEADER="yes"
+ AC_DEFINE([HAVE_QT_QPA_HEADER], [], [Define if the Qt QPA header is installed])
+ ], [AC_MSG_NOTICE([Cannot find QPA])])
+ if test "x$GST_GL_HAVE_WINDOW_X11" = "x1" -a "x$GST_GL_HAVE_PLATFORM_GLX" = "x1" || test "x$GST_GL_HAVE_WINDOW_X11" = "x1" -a "x$GST_GL_HAVE_PLATFORM_EGL" = "x1"; then
+ PKG_CHECK_MODULES(QT_X11, Qt5X11Extras, [
+ AC_DEFINE([HAVE_QT_X11], [], [Define if Qt X11 integration is installed])
+ QT_CFLAGS="$QT_CFLAGS $QT_X11_CFLAGS"
+ QT_LIBS="$QT_LIBS $QT_X11_LIBS"
+ HAVE_QT_WINDOWING="yes"
+ ], [AC_MSG_NOTICE([Could not find Qt X11 integration])])
+ fi
+ if test "x$GST_GL_HAVE_WINDOW_WAYLAND" = "x1" -a "x$GST_GL_HAVE_PLATFORM_EGL" = "x1" -a "x$HAVE_QT_QPA_HEADER" = "xyes"; then
+ PKG_CHECK_MODULES(QT_WAYLAND, Qt5WaylandClient, [
+ AC_DEFINE([HAVE_QT_WAYLAND], [],
+ [Define if Qt Wayland integration is installed])
+ QT_CFLAGS="$QT_CFLAGS $QT_WAYLAND_CFLAGS"
+ QT_LIBS="$QT_LIBS $QT_WAYLAND_LIBS"
+ HAVE_QT_WINDOWING="yes"
+ ], [AC_MSG_NOTICE([Could not find Qt Wayland integration])])
+ fi
+ if test "x$GST_GL_HAVE_PLATFORM_EGL" = "x1"; then
+ if test "x$GST_GL_HAVE_WINDOW_ANDROID" = "x1"; then
+ PKG_CHECK_MODULES(QT_ANDROID, Qt5AndroidExtras, [
+ # c++ on android requires a standard library and there are multiple
+ # choices. cerbero provides a pkg-config file the describes a choice
+ # so try to use that. Outside cerbero one has to pass these flags
+ # manually for the library one is using
+ PKG_CHECK_MODULES(GNUSTL, gnustl, [
+ QT_CFLAGS="$QT_CFLAGS $GNUSTL_CFLAGS"
+ QT_LIBS="$QT_LIBS $GNUSTL_LIBS"
+ ], [
+ AC_MSG_NOTICE([Could not find Standard C++ library])])
+ AC_DEFINE([HAVE_QT_ANDROID], [],
+ [Define if Qt Android integration is installed])
+ QT_CFLAGS="$QT_CFLAGS $QT_ANDROID_CFLAGS"
+ QT_LIBS="$QT_LIBS $QT_ANDROID_LIBS"
+ HAVE_QT_WINDOWING="yes"
+ ], [AC_MSG_NOTICE([Could not find Qt Android integration])])
+ else
+ AC_DEFINE([HAVE_QT_EGLFS], [],
+ [Define if Qt eglfs integration is installed])
+ HAVE_QT_WINDOWING="yes"
+ fi
+ fi
+ if test "x$GST_GL_HAVE_WINDOW_COCOA" = "x1" -a "x$GST_GL_HAVE_PLATFORM_CGL" = "x1"; then
+ PKG_CHECK_MODULES(QT_MAC, Qt5MacExtras, [
+ AC_DEFINE([HAVE_QT_MAC], [],
+ [Define if Qt Mac integration is installed])
+ QT_LIBDIR=`$PKG_CONFIG --variable=libdir Qt5Core`
+ QT_CFLAGS="$QT_CFLAGS $QT_MAC_CFLAGS -F$QT_LIBDIR -framework QtCore -framework QtGui -framework QtQuick -framework QtQml -framework QtMacExtras"
+ QT_LIBS="$QT_LIBS $QT_MAC_LIBS"
+ HAVE_QT_WINDOWING="yes"
+ ], [AC_MSG_NOTICE([Could not find Qt Mac integration])])
+ fi
+ if test "x$GST_GL_HAVE_WINDOW_EAGL" = "x1" -a "x$GST_GL_HAVE_PLATFORM_EAGL" = "x1"; then
+ if test "x$HAVE_IOS" = "xyes"; then
+ # iOS doesn't have its own extras package so if we have the core
+ # modules we are good to go
+ AC_DEFINE([HAVE_QT_IOS], [],
+ [Define if Qt iOS integration is installed])
+ HAVE_QT_WINDOWING="yes"
+ fi
+ fi
+ if test "x$HAVE_QT_WINDOWING" = "xno"; then
+ AC_MSG_WARN([Could not find any Qt Windowing integration])
+ HAVE_QT="no"
+ fi
+ AC_SUBST([QT_CFLAGS])
+ AC_SUBST([QT_LIBS])
+ fi
+ ], [
+ HAVE_QT="no"
+ ])
+ ])
+
dnl *** shout2 ***
translit(dnm, m, l) AM_CONDITIONAL(USE_SHOUT2, true)
AG_GST_CHECK_FEATURE(SHOUT2, [Shoutcast/Icecast client library], shout2, [
dnl *** soup ***
translit(dnm, m, l) AM_CONDITIONAL(USE_SOUP, true)
AG_GST_CHECK_FEATURE(SOUP, [soup http client plugin (2.4)], souphttpsrc, [
- PKG_CHECK_MODULES(SOUP, libsoup-2.4 >= 2.46, [HAVE_SOUP="yes"], [HAVE_SOUP="no"])
+ PKG_CHECK_MODULES(SOUP, libsoup-2.4 >= 2.48, [HAVE_SOUP="yes"], [HAVE_SOUP="no"])
AC_SUBST(SOUP_CFLAGS)
AC_SUBST(SOUP_LIBS)
])
fi
])
+ dnl *** twolame ***
+ translit(dnm, m, l) AM_CONDITIONAL(USE_TWOLAME, true)
+ AG_GST_CHECK_FEATURE(TWOLAME, [twolame], twolame, [
+ PKG_CHECK_MODULES(TWOLAME, twolame >= 0.3.10, [
+ HAVE_TWOLAME="yes"], [
+ HAVE_TWOLAME="no"
+ ])
+ AC_SUBST(TWOLAME_CFLAGS)
+ AC_SUBST(TWOLAME_LIBS)
+ ])
+
+
dnl *** vpx ***
translit(dnm, m, l) AM_CONDITIONAL(USE_VPX, vpx)
AG_GST_CHECK_FEATURE(VPX, [VPX decoder], vpx, [
AC_DEFINE(HAVE_VPX_1_4, 1, [Defined if the VPX library version is 1.4 or bigger])
], [true])
+ PKG_CHECK_MODULES(VPX_180, vpx >= 1.8.0, [
+ AC_DEFINE(HAVE_VPX_1_8, 1, [Defined if the VPX library version is 1.8 or bigger])
+ ], [true])
+
LIBS="$OLD_LIBS"
CFLAGS="$OLD_CFLAGS"
fi
AM_CONDITIONAL(USE_DV1394, false)
AM_CONDITIONAL(USE_FLAC, false)
AM_CONDITIONAL(USE_GDK_PIXBUF, false)
+ AM_CONDITIONAL(USE_GTK3, false)
+ AM_CONDITIONAL(USE_GTK3_GL, false)
AM_CONDITIONAL(USE_JACK, false)
AM_CONDITIONAL(USE_JPEG, false)
+ AM_CONDITIONAL(USE_LAME, false)
AM_CONDITIONAL(USE_LIBCACA, false)
AM_CONDITIONAL(USE_LIBDV, false)
AM_CONDITIONAL(USE_LIBIEC61883, false)
AM_CONDITIONAL(USE_LIBPNG, false)
+ AM_CONDITIONAL(USE_MPG123, false)
AM_CONDITIONAL(USE_OSS, false)
AM_CONDITIONAL(USE_OSS4, false)
AM_CONDITIONAL(USE_OSX_AUDIO, false)
AM_CONDITIONAL(USE_OSX_VIDEO, false)
AM_CONDITIONAL(USE_PULSE, false)
+ AM_CONDITIONAL(USE_QT, false)
AM_CONDITIONAL(USE_SHOUT2, false)
AM_CONDITIONAL(USE_SOUP, false)
AM_CONDITIONAL(USE_SPEEX, false)
- AM_CONDITIONAL(USE_SUNAUDIO, false)
AM_CONDITIONAL(USE_TAGLIB, false)
+ AM_CONDITIONAL(USE_TWOLAME, false)
AM_CONDITIONAL(USE_VPX, false)
AM_CONDITIONAL(USE_WAVEFORM, false)
AM_CONDITIONAL(USE_WAVPACK, false)
AM_CONDITIONAL(USE_X, false)
+AM_CONDITIONAL(USE_XSHM, false)
AM_CONDITIONAL(USE_ZLIB, false)
fi dnl of EXT plugins
fi
AC_SUBST(DEPRECATED_CFLAGS)
+ VISIBILITY_CFLAGS=""
+ AS_COMPILER_FLAG([-fvisibility=hidden], [VISIBILITY_CFLAGS="-fvisibility=hidden"])
+ AC_SUBST(VISIBILITY_CFLAGS)
+
+ VISIBILITY_CXXFLAGS=""
+ if test "x$HAVE_CXX" = "xyes"; then
+ AS_CXX_COMPILER_FLAG([-fvisibility=hidden], [VISIBILITY_CXXFLAGS="-fvisibility=hidden"])
+ fi
+ AC_SUBST(VISIBILITY_CXXFLAGS)
+
+ dnl disable strict aliasing
+ AS_COMPILER_FLAG([-fno-strict-aliasing], [EXTRA_CFLAGS="-fno-strict-aliasing"])
+ AC_SUBST(EXTRA_CFLAGS)
+
dnl every flag in GST_OPTION_CFLAGS, GST_OPTION_CXXFLAGS and GST_OPTION_OBJCFLAGS can be overridden
dnl at make time with e.g. make ERROR_CFLAGS=""
GST_OPTION_CFLAGS="\$(WARNING_CFLAGS) \$(ERROR_CFLAGS) \$(DEBUG_CFLAGS) \$(PROFILE_CFLAGS) \$(GCOV_CFLAGS) \$(OPT_CFLAGS) \$(DEPRECATED_CFLAGS)"
dnl also add builddir include for enumtypes and marshal
dnl add ERROR_CFLAGS, but overridable
GST_CFLAGS="$GST_CFLAGS -DGST_USE_UNSTABLE_API"
- GST_CXXFLAGS="-I\$(top_srcdir)/gst-libs $GST_CFLAGS $GLIB_EXTRA_CFLAGS \$(GST_OPTION_CXXFLAGS)"
- GST_OBJCFLAGS="-I\$(top_srcdir)/gst-libs $GST_CFLAGS $GLIB_EXTRA_CFLAGS \$(GST_OPTION_OBJCFLAGS)"
- GST_CFLAGS="-I\$(top_srcdir)/gst-libs $GST_CFLAGS $GLIB_EXTRA_CFLAGS \$(GST_OPTION_CFLAGS)"
+ GST_CXXFLAGS="-I\$(top_srcdir)/gst-libs $GST_CFLAGS $EXTRA_CFLAGS $GLIB_EXTRA_CFLAGS \$(GST_OPTION_CXXFLAGS) \$(VISIBILITY_CXXFLAGS)"
+ GST_OBJCFLAGS="-I\$(top_srcdir)/gst-libs $GST_CFLAGS $EXTRA_CFLAGS $GLIB_EXTRA_CFLAGS \$(GST_OPTION_OBJCFLAGS)"
+ GST_CFLAGS="-I\$(top_srcdir)/gst-libs $GST_CFLAGS $EXTRA_CFLAGS $GLIB_EXTRA_CFLAGS \$(GST_OPTION_CFLAGS) \$(VISIBILITY_CFLAGS)"
AC_SUBST(GST_CFLAGS)
AC_SUBST(GST_CXXFLAGS)
AC_SUBST(GST_OBJCFLAGS)
dnl this really should only contain flags, not libs - they get added before
dnl whatevertarget_LIBS and -L flags here affect the rest of the linking
- GST_PLUGIN_LDFLAGS="-module -avoid-version -export-symbols-regex '^[_]*gst_plugin_.*' $GST_ALL_LDFLAGS"
+ GST_PLUGIN_LDFLAGS="-module -avoid-version $GST_ALL_LDFLAGS"
AC_SUBST(GST_PLUGIN_LDFLAGS)
dnl *** output files ***
ext/dv/Makefile
ext/flac/Makefile
ext/gdk_pixbuf/Makefile
+ ext/gtk/Makefile
ext/jack/Makefile
ext/jpeg/Makefile
+ ext/lame/Makefile
ext/libcaca/Makefile
ext/libpng/Makefile
+ ext/mpg123/Makefile
ext/pulse/Makefile
+ ext/qt/Makefile
ext/raw1394/Makefile
ext/shout2/Makefile
ext/soup/Makefile
ext/speex/Makefile
ext/taglib/Makefile
+ ext/twolame/Makefile
ext/vpx/Makefile
ext/wavpack/Makefile
sys/Makefile
sys/oss4/Makefile
sys/osxaudio/Makefile
sys/osxvideo/Makefile
- sys/sunaudio/Makefile
sys/v4l2/Makefile
sys/waveform/Makefile
sys/ximage/Makefile
tests/examples/audiofx/Makefile
tests/examples/cairo/Makefile
tests/examples/equalizer/Makefile
+ tests/examples/gtk/Makefile
tests/examples/jack/Makefile
tests/examples/level/Makefile
tests/examples/rtp/Makefile
+ tests/examples/rtsp/Makefile
tests/examples/shapewipe/Makefile
tests/examples/spectrum/Makefile
tests/examples/v4l2/Makefile
pulsedeviceprovider.c \
pulseutil.c
-libgstpulseaudio_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(PULSE_CFLAGS)
+libgstpulseaudio_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(PULSE_CFLAGS) $(GIO_CFLAGS)
libgstpulseaudio_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_API_VERSION) \
-lgstpbutils-$(GST_API_VERSION) \
- $(GST_BASE_LIBS) $(GST_LIBS) $(PULSE_LIBS)
+ $(GST_BASE_LIBS) $(GST_LIBS) $(PULSE_LIBS) $(GIO_LIBS)
libgstpulseaudio_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
- libgstpulseaudio_la_LIBTOOLFLAGS = $(GST_PLUGIN_LIBTOOLFLAGS)
+if PCM_DUMP_ENABLE
+libgstpulseaudio_la_CFLAGS += $(VCONF_CFLAGS) -DPCM_DUMP_ENABLE
+libgstpulseaudio_la_LIBADD += $(VCONF_LIBS)
+endif
+
noinst_HEADERS = \
pulsesink.h \
pulsesrc.h \
#include <gst/pbutils/pbutils.h> /* only used for GST_PLUGINS_BASE_VERSION_* */
#include <gst/glib-compat-private.h>
-
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#include <vconf.h>
+#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
#include "pulsesink.h"
#include "pulseutil.h"
#define DEFAULT_VOLUME 1.0
#define DEFAULT_MUTE FALSE
#define MAX_VOLUME 10.0
+#ifdef __TIZEN__
+#define DEFAULT_AUDIO_LATENCY "mid"
+#endif /* __TIZEN__ */
enum
{
PROP_MUTE,
PROP_CLIENT_NAME,
PROP_STREAM_PROPERTIES,
+#ifdef __TIZEN__
+ PROP_AUDIO_LATENCY,
+#endif /* __TIZEN__ */
PROP_LAST
};
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#define GST_PULSESINK_DUMP_VCONF_KEY "memory/private/sound/pcm_dump"
+#define GST_PULSESINK_DUMP_INPUT_PATH_PREFIX "/tmp/dump_pulsesink_in_"
+#define GST_PULSESINK_DUMP_OUTPUT_PATH_PREFIX "/tmp/dump_pulsesink_out_"
+#define GST_PULSESINK_DUMP_INPUT_FLAG 0x00000400
+#define GST_PULSESINK_DUMP_OUTPUT_FLAG 0x00000800
+#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
+
#define GST_TYPE_PULSERING_BUFFER \
(gst_pulseringbuffer_get_type())
#define GST_PULSERING_BUFFER(obj) \
static guint gst_pulseringbuffer_commit (GstAudioRingBuffer * buf,
guint64 * sample, guchar * data, gint in_samples, gint out_samples,
gint * accum);
+#ifdef __TIZEN__
+static gboolean gst_pulsering_set_corked (GstPulseRingBuffer * pbuf, gboolean corked,
+ gboolean wait);
+#endif
G_DEFINE_TYPE (GstPulseRingBuffer, gst_pulseringbuffer,
GST_TYPE_AUDIO_RING_BUFFER);
gst_pulsering_destroy_context (pbuf);
pa_threaded_mainloop_unlock (mainloop);
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ if (psink->dump_fd_input) {
+ fclose(psink->dump_fd_input);
+ psink->dump_fd_input = NULL;
+ }
+#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
+
GST_LOG_OBJECT (psink, "closed device");
return TRUE;
GST_ELEMENT_ERROR (psink, STREAM, FORMAT, ("Sink format changed"),
("Sink format changed"));
}
+#ifdef __TIZEN__
+ } else if (!strcmp (name, PA_STREAM_EVENT_POP_TIMEOUT)) {
+ GST_WARNING_OBJECT (psink, "got event [%s], cork stream now!!!!", name);
+ gst_pulsering_set_corked (pbuf, TRUE, FALSE);
+#endif
} else {
GST_DEBUG_OBJECT (psink, "got unknown event %s", name);
}
const pa_buffer_attr *actual;
pa_channel_map channel_map;
pa_operation *o = NULL;
+#ifndef __TIZEN__
pa_cvolume v;
+#endif
pa_cvolume *pv = NULL;
pa_stream_flags_t flags;
const gchar *name;
else
name = "Playback Stream";
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ if (psink->need_dump_input == TRUE && psink->dump_fd_input == NULL) {
+ char *suffix , *dump_path;
+ GDateTime *time = g_date_time_new_now_local();
+
+ suffix = g_date_time_format(time, "%m%d_%H%M%S");
+ dump_path = g_strdup_printf("%s%dch_%dhz_%s.pcm", GST_PULSESINK_DUMP_INPUT_PATH_PREFIX, pbuf->channels, spec->info.rate, suffix);
+ GST_WARNING_OBJECT(psink, "pulse-sink dumping enabled: dump path [%s]", dump_path);
+ psink->dump_fd_input = fopen(dump_path, "w+");
+
+ g_free(suffix);
+ g_free(dump_path);
+ g_date_time_unref(time);
+ }
+#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
+
/* create a stream */
formats[0] = pbuf->format;
if (!(pbuf->stream = pa_stream_new_extended (pbuf->context, name, formats, 1,
GST_INFO_OBJECT (psink, "prebuf: %d", wanted.prebuf);
GST_INFO_OBJECT (psink, "minreq: %d", wanted.minreq);
+#ifndef __TIZEN__
/* configure volume when we changed it, else we leave the default */
if (psink->volume_set) {
GST_LOG_OBJECT (psink, "have volume of %f", psink->volume);
} else {
pv = NULL;
}
+#endif
/* construct the flags */
flags = PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE |
PA_STREAM_ADJUST_LATENCY | PA_STREAM_START_CORKED;
+#ifndef __TIZEN__
if (psink->mute_set) {
if (psink->mute)
flags |= PA_STREAM_START_MUTED;
else
flags |= PA_STREAM_START_UNMUTED;
}
+#endif
/* we always start corked (see flags above) */
pbuf->corked = TRUE;
GST_INFO_OBJECT (psink, "negotiated to: %s", print_buf);
#endif
+#ifdef __TIZEN__
+ {
+ uint32_t idx;
+ if ((idx = pa_stream_get_index (pbuf->stream)) == PA_INVALID_INDEX)
+ goto no_index;
+ if (psink->volume_set)
+ gst_pulse_set_volume_ratio (idx, "out", psink->volume);
+ if (psink->mute_set)
+ if (psink->mute)
+ gst_pulse_set_volume_ratio (idx, "out", 0);
+ }
+#endif
/* After we passed the volume off of to PA we never want to set it
again, since it is PA's job to save/restore volumes. */
psink->volume_set = psink->mute_set = FALSE;
pa_strerror (pa_context_errno (pbuf->context))), (NULL));
goto unlock_and_fail;
}
+#ifdef __TIZEN__
+no_index:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("Failed to get stream index: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock_and_fail;
+ }
+#endif
}
/* free the stream that we acquired before */
if (pbuf->paused)
goto was_paused;
+#ifdef __TIZEN__
+ /* ensure running clock for whatever out there */
+ if (pbuf->corked) {
+ if (!gst_pulsering_set_corked (pbuf, FALSE, FALSE))
+ goto uncork_failed;
+ }
+#endif
/* offset is in bytes */
offset = *sample * bpf;
static GstStateChangeReturn gst_pulsesink_change_state (GstElement * element,
GstStateChange transition);
- static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS (PULSE_SINK_TEMPLATE_CAPS));
-
#define gst_pulsesink_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstPulseSink, gst_pulsesink, GST_TYPE_AUDIO_BASE_SINK,
gst_pulsesink_init_contexts ();
}
}
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+static GstPadProbeReturn
+gst_pulsesink_pad_dump_probe (GstPad * pad, GstPadProbeInfo * info, gpointer data)
+{
+ GstPulseSink *psink = GST_PULSESINK_CAST (data);
+ size_t written = 0;
+ GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info);
+ GstMapInfo in_map;
+ if (psink->dump_fd_input) {
+ gst_buffer_map(buffer, &in_map, GST_MAP_READ);
+ written = fwrite(in_map.data, 1, in_map.size, psink->dump_fd_input);
+ if (written != in_map.size)
+ GST_WARNING("failed to write!!! ferror=%d", ferror(psink->dump_fd_input));
+ gst_buffer_unmap(buffer, &in_map);
+ }
+ return GST_PAD_PROBE_OK;
+}
+#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
+
static void
gst_pulsesink_class_init (GstPulseSinkClass * klass)
{
GstBaseSinkClass *bc;
GstAudioBaseSinkClass *gstaudiosink_class = GST_AUDIO_BASE_SINK_CLASS (klass);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GstCaps *caps;
gchar *clientname;
gobject_class->finalize = gst_pulsesink_finalize;
"list of pulseaudio stream properties",
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#ifdef __TIZEN__
+ g_object_class_install_property (gobject_class,
+ PROP_AUDIO_LATENCY,
+ g_param_spec_string ("latency", "Audio Backend Latency",
+ "Audio Backend Latency (\"low\": Low Latency, \"mid\": Mid Latency, \"high\": High Latency)",
+ DEFAULT_AUDIO_LATENCY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif /* __TIZEN__ */
+
gst_element_class_set_static_metadata (gstelement_class,
"PulseAudio Audio Sink",
"Sink/Audio", "Plays audio to a PulseAudio server", "Lennart Poettering");
- gst_element_class_add_static_pad_template (gstelement_class, &pad_template);
+
+ caps =
+ gst_pulse_fix_pcm_caps (gst_caps_from_string (PULSE_SINK_TEMPLATE_CAPS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps));
+ gst_caps_unref (caps);
}
static void
}
for (i = g_list_first (device_info.formats); i; i = g_list_next (i)) {
- gst_caps_append (ret,
- gst_pulse_format_info_to_caps ((pa_format_info *) i->data));
+ GstCaps *caps = gst_pulse_format_info_to_caps ((pa_format_info *) i->data);
+ if (caps)
+ gst_caps_append (ret, caps);
}
unlock:
static void
gst_pulsesink_init (GstPulseSink * pulsesink)
{
+#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ GstPad *sinkpad = NULL;
+ int vconf_dump = 0;
+#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
+
pulsesink->server = NULL;
pulsesink->device = NULL;
pulsesink->device_info.description = NULL;
pulsesink->properties = NULL;
pulsesink->proplist = NULL;
+#ifdef __TIZEN__
+ pulsesink->latency = g_strdup (DEFAULT_AUDIO_LATENCY);
+ pulsesink->proplist = pa_proplist_new();
+ pa_proplist_sets(pulsesink->proplist, PA_PROP_MEDIA_TIZEN_AUDIO_LATENCY, pulsesink->latency);
+#ifdef PCM_DUMP_ENABLE
+ if (vconf_get_int(GST_PULSESINK_DUMP_VCONF_KEY, &vconf_dump)) {
+ GST_WARNING("vconf_get_int %s failed", GST_PULSESINK_DUMP_VCONF_KEY);
+ }
+ pulsesink->need_dump_input = vconf_dump & GST_PULSESINK_DUMP_INPUT_FLAG ? TRUE : FALSE;
+ pulsesink->dump_fd_input = NULL;
+ if (pulsesink->need_dump_input) {
+ sinkpad = gst_element_get_static_pad((GstElement *)pulsesink, "sink");
+ if (sinkpad) {
+ gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_BUFFER, gst_pulsesink_pad_dump_probe, pulsesink, NULL);
+ gst_object_unref (GST_OBJECT(sinkpad));
+ }
+ }
+#endif
+#endif /* __TIZEN__ */
/* override with a custom clock */
if (GST_AUDIO_BASE_SINK (pulsesink)->provided_clock)
if (pulsesink->proplist)
pa_proplist_free (pulsesink->proplist);
+#ifdef __TIZEN__
+ g_free (pulsesink->latency);
+#endif /* __TIZEN__ */
+
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_pulsesink_set_volume (GstPulseSink * psink, gdouble volume)
{
+#ifndef __TIZEN__
pa_cvolume v;
pa_operation *o = NULL;
+#endif
GstPulseRingBuffer *pbuf;
uint32_t idx;
+#ifndef __TIZEN__
if (!mainloop)
goto no_mainloop;
pa_threaded_mainloop_lock (mainloop);
+#endif
GST_DEBUG_OBJECT (psink, "setting volume to %f", volume);
if ((idx = pa_stream_get_index (pbuf->stream)) == PA_INVALID_INDEX)
goto no_index;
+#ifndef __TIZEN__
if (pbuf->is_pcm)
gst_pulse_cvolume_from_linear (&v, pbuf->channels, volume);
else
&v, NULL, NULL)))
goto volume_failed;
+#else
+ if (!psink->mute)
+ gst_pulse_set_volume_ratio (idx, "out", volume);
+ psink->volume = volume;
+#endif
+
/* We don't really care about the result of this call */
unlock:
+#ifndef __TIZEN__
if (o)
pa_operation_unref (o);
pa_threaded_mainloop_unlock (mainloop);
+#endif
return;
/* ERRORS */
+#ifndef __TIZEN__
no_mainloop:
{
psink->volume = volume;
GST_DEBUG_OBJECT (psink, "we have no mainloop");
return;
}
+#endif
no_buffer:
{
psink->volume = volume;
GST_DEBUG_OBJECT (psink, "we don't have a stream index");
goto unlock;
}
+#ifndef __TIZEN__
volume_failed:
{
GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
pa_strerror (pa_context_errno (pbuf->context))), (NULL));
goto unlock;
}
+#endif
}
static void
gst_pulsesink_set_mute (GstPulseSink * psink, gboolean mute)
{
+#ifndef __TIZEN__
pa_operation *o = NULL;
+#endif
GstPulseRingBuffer *pbuf;
uint32_t idx;
+#ifndef __TIZEN__
if (!mainloop)
goto no_mainloop;
pa_threaded_mainloop_lock (mainloop);
+#endif
GST_DEBUG_OBJECT (psink, "setting mute state to %d", mute);
if ((idx = pa_stream_get_index (pbuf->stream)) == PA_INVALID_INDEX)
goto no_index;
+#ifndef __TIZEN__
if (!(o = pa_context_set_sink_input_mute (pbuf->context, idx,
mute, NULL, NULL)))
goto mute_failed;
+#else
+ gst_pulse_set_volume_ratio (idx, "out", mute ? 0 : psink->volume);
+ psink->mute = mute;
+#endif
/* We don't really care about the result of this call */
unlock:
+#ifndef __TIZEN__
if (o)
pa_operation_unref (o);
pa_threaded_mainloop_unlock (mainloop);
+#endif
return;
/* ERRORS */
+#ifndef __TIZEN__
no_mainloop:
{
psink->mute = mute;
GST_DEBUG_OBJECT (psink, "we have no mainloop");
return;
}
+#endif
no_buffer:
{
psink->mute = mute;
GST_DEBUG_OBJECT (psink, "we don't have a stream index");
goto unlock;
}
+#ifndef __TIZEN__
mute_failed:
{
GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
pa_strerror (pa_context_errno (pbuf->context))), (NULL));
goto unlock;
}
+#endif
}
static void
pa_proplist_free (pulsesink->proplist);
pulsesink->proplist = gst_pulse_make_proplist (pulsesink->properties);
break;
+#ifdef __TIZEN__
+ case PROP_AUDIO_LATENCY:
+ g_free (pulsesink->latency);
+ pulsesink->latency = g_value_dup_string (value);
+ /* setting NULL restores the default latency */
+ if (pulsesink->latency == NULL) {
+ pulsesink->latency = g_strdup (DEFAULT_AUDIO_LATENCY);
+ }
+ if (!pulsesink->proplist) {
+ pulsesink->proplist = pa_proplist_new();
+ }
+ pa_proplist_sets(pulsesink->proplist, PA_PROP_MEDIA_TIZEN_AUDIO_LATENCY, pulsesink->latency);
+ GST_DEBUG_OBJECT(pulsesink, "latency(%s)", pulsesink->latency);
+ break;
+#endif /* __TIZEN__ */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
break;
case PROP_VOLUME:
{
+#ifndef __TIZEN__
gdouble volume;
gst_pulsesink_get_sink_input_info (pulsesink, &volume, NULL);
g_value_set_double (value, volume);
+#else
+ g_value_set_double (value, pulsesink->volume);
+#endif
break;
}
case PROP_MUTE:
{
+#ifndef __TIZEN__
gboolean mute;
gst_pulsesink_get_sink_input_info (pulsesink, NULL, &mute);
g_value_set_boolean (value, mute);
+#else
+ g_value_set_boolean (value, pulsesink->mute);
+#endif
break;
}
case PROP_CLIENT_NAME:
case PROP_STREAM_PROPERTIES:
gst_value_set_structure (value, pulsesink->properties);
break;
+#ifdef __TIZEN__
+ case PROP_AUDIO_LATENCY:
+ g_value_set_string (value, pulsesink->latency);
+ break;
+#endif /* __TIZEN__ */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
#include "config.h"
#endif
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#include <stdio.h>
+#endif
- #endif
+
#include <gst/gst.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudiosink.h>
gint notify; /* atomic */
+#ifdef __TIZEN__
+ gchar *latency;
+#ifdef PCM_DUMP_ENABLE
+ gint need_dump_input;
+ FILE *dump_fd_input;
+#endif
+#endif /* __TIZEN__ */
+
const gchar *pa_version;
GstStructure *properties;
#include "pulsesrc.h"
#include "pulseutil.h"
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#include <vconf.h>
+#endif
- #endif
+
GST_DEBUG_CATEGORY_EXTERN (pulse_debug);
#define GST_CAT_DEFAULT pulse_debug
#define DEFAULT_VOLUME 1.0
#define DEFAULT_MUTE FALSE
#define MAX_VOLUME 10.0
+#ifdef __TIZEN__
+#define DEFAULT_AUDIO_LATENCY "mid"
+#endif /* __TIZEN__ */
+/* See the pulsesink code for notes on how we interact with the PA mainloop
+ * thread. */
/* See the pulsesink code for notes on how we interact with the PA mainloop
* thread. */
PROP_SOURCE_OUTPUT_INDEX,
PROP_VOLUME,
PROP_MUTE,
+#ifdef __TIZEN__
+ PROP_AUDIO_LATENCY,
+#endif /* __TIZEN__ */
PROP_LAST
};
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#define GST_PULSESRC_DUMP_VCONF_KEY "memory/private/sound/pcm_dump"
+#define GST_PULSESRC_DUMP_OUTPUT_PATH_PREFIX "/tmp/dump_pulsesrc_out"
+#define GST_PULSESRC_DUMP_OUTPUT_FLAG 0x00200000U
+#endif
- #endif
+
static void gst_pulsesrc_destroy_stream (GstPulseSrc * pulsesrc);
static void gst_pulsesrc_destroy_context (GstPulseSrc * pulsesrc);
static GstClockTime gst_pulsesrc_get_time (GstClock * clock, GstPulseSrc * src);
- static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS (_PULSE_CAPS_PCM)
- );
-
#define gst_pulsesrc_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstPulseSrc, gst_pulsesrc, GST_TYPE_AUDIO_SRC,
G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL));
GstAudioSrcClass *gstaudiosrc_class = GST_AUDIO_SRC_CLASS (klass);
GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GstCaps *caps;
gchar *clientname;
gobject_class->finalize = gst_pulsesrc_finalize;
"PulseAudio Audio Source",
"Source/Audio",
"Captures audio from a PulseAudio server", "Lennart Poettering");
- gst_element_class_add_static_pad_template (gstelement_class, &pad_template);
+
+ caps = gst_pulse_fix_pcm_caps (gst_caps_from_string (_PULSE_CAPS_PCM));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps));
+ gst_caps_unref (caps);
/**
* GstPulseSrc:volume:
PROP_MUTE, g_param_spec_boolean ("mute", "Mute",
"Mute state of this stream",
DEFAULT_MUTE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+#ifdef __TIZEN__
+ g_object_class_install_property (gobject_class,
+ PROP_AUDIO_LATENCY,
+ g_param_spec_string ("latency", "Audio Backend Latency",
+ "Audio Backend Latency (\"low\": Low Latency, \"mid\": Mid Latency, \"high\": High Latency)",
+ DEFAULT_AUDIO_LATENCY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif /* __TIZEN__ */
}
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+static GstPadProbeReturn
+gst_pulsesrc_pad_dump_probe (GstPad *pad, GstPadProbeInfo * info, gpointer data)
+{
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (data);
+ size_t written = 0;
+ GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info);
+ GstMapInfo in_map;
+ if (pulsesrc->dump_fd_output) {
+ gst_buffer_map(buffer, &in_map, GST_MAP_READ);
+ written = fwrite(in_map.data, 1, in_map.size, pulsesrc->dump_fd_output);
+ if (written != in_map.size)
+ GST_WARNING("failed to write!!! ferror=%d", ferror(pulsesrc->dump_fd_output));
+ gst_buffer_unmap(buffer, &in_map);
+ }
+ return GST_PAD_PROBE_OK;
+}
+#endif
- #endif
+
static void
gst_pulsesrc_init (GstPulseSrc * pulsesrc)
{
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ GstPad *srcpad = NULL;
+ int vconf_dump = 0;
+#endif
- #endif
pulsesrc->server = NULL;
pulsesrc->device = NULL;
pulsesrc->client_name = gst_pulse_client_name ();
pulsesrc->properties = NULL;
pulsesrc->proplist = NULL;
-
+#ifdef __TIZEN__
+ pulsesrc->latency = g_strdup (DEFAULT_AUDIO_LATENCY);
+ pulsesrc->proplist = pa_proplist_new();
+ pa_proplist_sets(pulsesrc->proplist, PA_PROP_MEDIA_TIZEN_AUDIO_LATENCY, pulsesrc->latency);
+
+#ifdef PCM_DUMP_ENABLE
+ if (vconf_get_int(GST_PULSESRC_DUMP_VCONF_KEY, &vconf_dump)) {
+ GST_WARNING("vconf_get_int %s failed", GST_PULSESRC_DUMP_VCONF_KEY);
+ }
+ pulsesrc->need_dump_output = vconf_dump & GST_PULSESRC_DUMP_OUTPUT_FLAG ? TRUE : FALSE;
+ pulsesrc->dump_fd_output = NULL;
+ if (pulsesrc->need_dump_output) {
+ srcpad = gst_element_get_static_pad((GstElement *)pulsesrc, "src");
+ gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_BUFFER, gst_pulsesrc_pad_dump_probe, pulsesrc, NULL);
+ }
+#endif /* PCM_DUMP_ENABLE */
+#endif /* __TIZEN__ */
/* this should be the default but it isn't yet */
gst_audio_base_src_set_slave_method (GST_AUDIO_BASE_SRC (pulsesrc),
GST_AUDIO_BASE_SRC_SLAVE_SKEW);
if (pulsesrc->proplist)
pa_proplist_free (pulsesrc->proplist);
+#ifdef __TIZEN__
+ g_free (pulsesrc->latency);
+#endif /* __TIZEN__ */
+
G_OBJECT_CLASS (parent_class)->finalize (object);
}
if (!pulsesrc->mainloop)
goto no_mainloop;
- if (!pulsesrc->source_output_idx)
+ if (pulsesrc->source_output_idx == PA_INVALID_INDEX)
goto no_index;
pa_threaded_mainloop_lock (pulsesrc->mainloop);
if (!pulsesrc->mainloop)
goto no_mainloop;
- if (!pulsesrc->source_output_idx)
+ if (pulsesrc->source_output_idx == PA_INVALID_INDEX)
goto no_index;
pa_threaded_mainloop_lock (pulsesrc->mainloop);
if (!pulsesrc->mainloop)
goto no_mainloop;
- if (!pulsesrc->source_output_idx)
+ if (pulsesrc->source_output_idx == PA_INVALID_INDEX)
goto no_index;
pa_threaded_mainloop_lock (pulsesrc->mainloop);
case PROP_MUTE:
gst_pulsesrc_set_stream_mute (pulsesrc, g_value_get_boolean (value));
break;
+#ifdef __TIZEN__
+ case PROP_AUDIO_LATENCY:
+ g_free (pulsesrc->latency);
+ pulsesrc->latency = g_value_dup_string (value);
+ /* setting NULL restores the default latency */
+ if (pulsesrc->latency == NULL) {
+ pulsesrc->latency = g_strdup (DEFAULT_AUDIO_LATENCY);
+ }
+ if (!pulsesrc->proplist) {
+ pulsesrc->proplist = pa_proplist_new();
+ }
+ pa_proplist_sets(pulsesrc->proplist, PA_PROP_MEDIA_TIZEN_AUDIO_LATENCY, pulsesrc->latency);
+ GST_DEBUG_OBJECT(pulsesrc, "latency(%s)", pulsesrc->latency);
+ break;
+#endif /* __TIZEN__ */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
g_value_set_boolean (value, mute);
break;
}
+#ifdef __TIZEN__
+ case PROP_AUDIO_LATENCY:
+ g_value_set_string (value, pulsesrc->latency);
+ break;
+#endif /* __TIZEN__ */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
pa_threaded_mainloop_lock (pulsesrc->mainloop);
gst_pulsesrc_destroy_context (pulsesrc);
pa_threaded_mainloop_unlock (pulsesrc->mainloop);
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
-
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ if (pulsesrc->dump_fd_output) {
+ fclose(pulsesrc->dump_fd_output);
+ pulsesrc->dump_fd_output = NULL;
+ }
+#endif
- #endif
return TRUE;
}
gst_pulsesrc_set_stream_volume (pulsesrc, pulsesrc->volume);
pulsesrc->volume_set = FALSE;
}
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ if (pulsesrc->need_dump_output) {
+ char *suffix , *dump_path;
+ GDateTime *time = NULL;
+ if (pulsesrc->dump_fd_output) {
+ fclose(pulsesrc->dump_fd_output);
+ pulsesrc->dump_fd_output = NULL;
+ }
+ time = g_date_time_new_now_local();
+ suffix = g_date_time_format(time, "%m%d_%H%M%S");
+ dump_path = g_strdup_printf("%s_%dch_%dhz_%s.pcm", GST_PULSESRC_DUMP_OUTPUT_PATH_PREFIX, pulsesrc->sample_spec.channels, pulsesrc->sample_spec.rate, suffix);
+ GST_WARNING_OBJECT(asrc,"pulse-source dumping enabled: dump path [%s]", dump_path);
+ pulsesrc->dump_fd_output = fopen(dump_path, "w+");
+
+ g_free(suffix);
+ g_free(dump_path);
+ g_date_time_unref(time);
+ }
- #endif /* PCM_DUMP_ENABLE */
+#endif
/* get the actual buffering properties now */
actual = pa_stream_get_buffer_attr (pulsesrc->stream);
#include <pulse/pulseaudio.h>
#include <pulse/thread-mainloop.h>
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+#include <stdio.h>
+#endif
- #endif
+
G_BEGIN_DECLS
#define GST_TYPE_PULSESRC \
gboolean paused:1;
gboolean in_read:1;
+#ifdef __TIZEN__
+ gchar *latency;
+#endif /* __TIZEN__ */
+
GstStructure *properties;
pa_proplist *proplist;
- #ifdef __TIZEN__
- #ifdef PCM_DUMP_ENABLE
+
- #endif
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
+ gint need_dump_output;
+ FILE *dump_fd_output;
+#endif
};
struct _GstPulseSrcClass
case G_TYPE_STRING:
pa_proplist_sets (p, prop_id, g_value_get_string (value));
break;
+#ifdef __TIZEN__
+ case G_TYPE_INT:
+ pa_proplist_setf (p, prop_id, "%d", g_value_get_int (value));
+ break;
+#endif
default:
GST_WARNING ("unmapped property type %s", G_VALUE_TYPE_NAME (value));
break;
if (pa_format_info_get_prop_string (format,
PA_PROP_FORMAT_SAMPLE_FORMAT, &tmp)) {
/* No specific sample format means any sample format */
- ret = gst_caps_from_string (_PULSE_CAPS_PCM);
+ ret = gst_pulse_fix_pcm_caps (gst_caps_from_string (_PULSE_CAPS_PCM));
goto out;
} else if (ss.format == PA_SAMPLE_ALAW) {
ret = gst_caps_from_string (_PULSE_CAPS_LINEAR);
if (sformat)
- gst_caps_set_simple (ret, "format", G_TYPE_STRING, NULL);
+ gst_caps_set_simple (ret, "format", G_TYPE_STRING, sformat, NULL);
}
pa_xfree (tmp);
return ret;
}
- #endif
+#ifdef __TIZEN__
+#include <gio/gio.h>
+#define PA_BUS_NAME "org.pulseaudio.Server"
+#define PA_STREAM_MANAGER_OBJECT_PATH "/org/pulseaudio/StreamManager"
+#define PA_STREAM_MANAGER_INTERFACE "org.pulseaudio.StreamManager"
+#define PA_STREAM_MANAGER_METHOD_NAME_SET_VOLUME_RATIO "SetVolumeRatio"
+void
+gst_pulse_set_volume_ratio (uint32_t stream_index, const char *direction, double ratio)
+{
+ GDBusConnection *conn = NULL;
+ GError *err = NULL;
+ GVariant *result = NULL;
+ const gchar *dbus_ret = NULL;
+
+ conn = g_bus_get_sync (G_BUS_TYPE_SYSTEM, NULL, &err);
+ if (!conn || err) {
+ GST_ERROR ("g_bus_get_sync() error (%s)", err ? err->message : NULL);
+ if (err)
+ g_error_free (err);
+ return;
+ }
+
+ result = g_dbus_connection_call_sync (conn,
+ PA_BUS_NAME,
+ PA_STREAM_MANAGER_OBJECT_PATH,
+ PA_STREAM_MANAGER_INTERFACE,
+ PA_STREAM_MANAGER_METHOD_NAME_SET_VOLUME_RATIO,
+ g_variant_new("(sud)", direction, stream_index, ratio),
+ G_VARIANT_TYPE("(s)"),
+ G_DBUS_CALL_FLAGS_NONE,
+ 1000,
+ NULL,
+ &err);
+ if (!result || err) {
+ GST_ERROR ("g_dbus_connection_call_sync() for SET_VOLUME_RATIO error (%s)", err ? err->message : NULL);
+ if (err)
+ g_error_free (err);
+ goto finish;
+ }
+ g_variant_get (result, "(&s)", &dbus_ret);
+ GST_DEBUG ("SET_VOLUME_RATIO returns value(%s) for stream index(%u), ratio(%f)", dbus_ret, stream_index, ratio);
+
+finish:
+ g_variant_unref(result);
+ g_object_unref(conn);
+
+ return;
+}
++#endif
++
+ GstCaps *
+ gst_pulse_fix_pcm_caps (GstCaps * incaps)
+ {
+ GstCaps *outcaps;
+ int i;
+
+ outcaps = gst_caps_make_writable (incaps);
+
+ for (i = 0; i < gst_caps_get_size (outcaps); i++) {
+ GstStructure *st = gst_caps_get_structure (outcaps, i);
+ const gchar *format = gst_structure_get_name (st);
+ const GValue *value;
+ GValue new_value = G_VALUE_INIT;
+ gint min, max, step;
+
+ if (!(g_str_equal (format, "audio/x-raw") ||
+ g_str_equal (format, "audio/x-alaw") ||
+ g_str_equal (format, "audio/x-mulaw")))
+ continue;
+
+ value = gst_structure_get_value (st, "rate");
+
+ if (!GST_VALUE_HOLDS_INT_RANGE (value))
+ continue;
+
+ min = gst_value_get_int_range_min (value);
+ max = gst_value_get_int_range_max (value);
+ step = gst_value_get_int_range_step (value);
+
+ if (min > PA_RATE_MAX)
+ min = PA_RATE_MAX;
+ if (max > PA_RATE_MAX)
+ max = PA_RATE_MAX;
+
+ g_value_init (&new_value, GST_TYPE_INT_RANGE);
+ gst_value_set_int_range_step (&new_value, min, max, step);
+
+ gst_structure_take_value (st, "rate", &new_value);
+ }
+
+ return outcaps;
+ }
"S24BE, S24LE, S24_32BE, S24_32LE, U8 }"
#endif
+ /* NOTE! that we do NOT actually support rate=MAX. This must be fixed up using
+ * gst_pulse_fix_pcm_caps() before being used. */
#define _PULSE_CAPS_LINEAR \
"audio/x-raw, " \
"format = (string) " _PULSE_FORMATS ", " \
#define _PULSE_CAPS_DTS "audio/x-dts, framed = (boolean) true, " \
"block-size = (int) { 512, 1024, 2048 }; "
#define _PULSE_CAPS_MP3 "audio/mpeg, mpegversion = (int) 1, " \
- "mpegaudioversion = (int) [ 1, 2 ], parsed = (boolean) true;"
+ "mpegaudioversion = (int) [ 1, 3 ], parsed = (boolean) true;"
#define _PULSE_CAPS_AAC "audio/mpeg, mpegversion = (int) { 2, 4 }, " \
"framed = (boolean) true, stream-format = (string) adts;"
GstStructure *gst_pulse_make_structure (pa_proplist *properties);
GstCaps * gst_pulse_format_info_to_caps (pa_format_info * format);
+
+#ifdef __TIZEN__
+void gst_pulse_set_volume_ratio (uint32_t stream_index, const char *direction, double ratio);
+#endif
+ GstCaps * gst_pulse_fix_pcm_caps (GstCaps * incaps);
+
#endif
GST_DEBUG_CATEGORY_STATIC (souphttpsrc_debug);
#define GST_CAT_DEFAULT souphttpsrc_debug
+ #define GST_SOUP_SESSION_CONTEXT "gst.soup.session"
+
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
#define DEFAULT_IRADIO_MODE TRUE
#define DEFAULT_SOUP_LOG_LEVEL SOUP_LOGGER_LOG_HEADERS
#define DEFAULT_COMPRESS FALSE
- #define DEFAULT_KEEP_ALIVE FALSE
+ #define DEFAULT_KEEP_ALIVE TRUE
#define DEFAULT_SSL_STRICT TRUE
#define DEFAULT_SSL_CA_FILE NULL
#define DEFAULT_SSL_USE_SYSTEM_CA_FILE TRUE
#define REDUCE_BLOCKSIZE_LIMIT 0.20
#define REDUCE_BLOCKSIZE_COUNT 2
#define REDUCE_BLOCKSIZE_FACTOR 0.5
+ #define GROW_TIME_LIMIT (1 * GST_SECOND)
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+#define DLNA_OP_TIMED_SEEK 0x02
+#define DLNA_OP_BYTE_SEEK 0x01
+#endif
+
static void gst_soup_http_src_uri_handler_init (gpointer g_iface,
gpointer iface_data);
static void gst_soup_http_src_finalize (GObject * gobject);
static GstStateChangeReturn gst_soup_http_src_change_state (GstElement *
element, GstStateChange transition);
+ static void gst_soup_http_src_set_context (GstElement * element,
+ GstContext * context);
static GstFlowReturn gst_soup_http_src_create (GstPushSrc * psrc,
GstBuffer ** outbuf);
static gboolean gst_soup_http_src_start (GstBaseSrc * bsrc);
"Wouter Cloetens <wouter@mind.be>");
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_soup_http_src_change_state);
+ gstelement_class->set_context =
+ GST_DEBUG_FUNCPTR (gst_soup_http_src_set_context);
gstbasesrc_class->start = GST_DEBUG_FUNCPTR (gst_soup_http_src_start);
gstbasesrc_class->stop = GST_DEBUG_FUNCPTR (gst_soup_http_src_stop);
src->reduce_blocksize_count = 0;
src->increase_blocksize_count = 0;
+ src->last_socket_read_time = 0;
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (src->dash_oldest_segment) {
+ g_free (src->dash_oldest_segment);
+ src->dash_oldest_segment = NULL;
+ }
+ if (src->dash_newest_segment) {
+ g_free (src->dash_newest_segment);
+ src->dash_newest_segment = NULL;
+ }
+ src->dlna_opt = 0;
+#endif
+
g_cancellable_reset (src->cancellable);
+ g_mutex_lock (&src->mutex);
if (src->input_stream) {
g_object_unref (src->input_stream);
src->input_stream = NULL;
}
+ g_mutex_unlock (&src->mutex);
gst_caps_replace (&src->src_caps, NULL);
g_free (src->iradio_name);
src->cookies = NULL;
src->iradio_mode = DEFAULT_IRADIO_MODE;
src->session = NULL;
+ src->external_session = NULL;
+ src->forced_external_session = FALSE;
src->msg = NULL;
src->timeout = DEFAULT_TIMEOUT;
src->log_level = DEFAULT_SOUP_LOG_LEVEL;
+ src->compress = DEFAULT_COMPRESS;
+ src->keep_alive = DEFAULT_KEEP_ALIVE;
src->ssl_strict = DEFAULT_SSL_STRICT;
src->ssl_use_system_ca_file = DEFAULT_SSL_USE_SYSTEM_CA_FILE;
src->tls_database = DEFAULT_TLS_DATABASE;
src->max_retries = DEFAULT_RETRIES;
src->method = DEFAULT_SOUP_METHOD;
src->minimum_blocksize = gst_base_src_get_blocksize (GST_BASE_SRC_CAST (src));
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ src->dash_oldest_segment = NULL;
+ src->dash_newest_segment = NULL;
+ src->received_total = 0;
+ src->dlna_opt = 0;
+#endif
proxy = g_getenv ("http_proxy");
if (!gst_soup_http_src_set_proxy (src, proxy)) {
GST_WARNING_OBJECT (src,
"The proxy in the http_proxy env var (\"%s\") cannot be parsed.",
proxy);
}
-
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ src->cookie_jar = NULL;
+#endif
gst_base_src_set_automatic_eos (GST_BASE_SRC (src), FALSE);
gst_soup_http_src_reset (src);
gst_soup_http_src_session_close (src);
+ if (src->external_session) {
+ g_object_unref (src->external_session);
+ src->external_session = NULL;
+ }
+
G_OBJECT_CLASS (parent_class)->dispose (gobject);
}
break;
}
case PROP_COOKIES:
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ {
+ char **array;
+ SoupURI *base_uri;
+ g_strfreev (src->cookies);
+ src->cookies = g_strdupv (g_value_get_boxed (value));
+
+ if (src->cookie_jar && ((array = src->cookies) != NULL)) {
+ base_uri = soup_uri_new (src->location);
+ GST_INFO_OBJECT (src, "request to set cookies...");
+ while (*array != NULL) {
+ soup_cookie_jar_add_cookie (src->cookie_jar,
+ soup_cookie_parse (*array++, base_uri));
+ }
+ soup_uri_free (base_uri);
+ } else {
+ GST_INFO_OBJECT (src, "set cookies after session creation");
+ }
+ }
+#else
g_strfreev (src->cookies);
src->cookies = g_strdupv (g_value_get_boxed (value));
+#endif
break;
case PROP_IS_LIVE:
gst_base_src_set_live (GST_BASE_SRC (src), g_value_get_boolean (value));
}
break;
case PROP_COOKIES:
- g_value_set_boxed (value, g_strdupv (src->cookies));
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ {
+ GSList *cookie_list, *c;
+ gchar **cookies, **array;
+
+ cookies = NULL;
+ if ((src->cookie_jar) &&
+ ((cookie_list = soup_cookie_jar_all_cookies (src->cookie_jar)) != NULL)) {
+ cookies = g_new0 (gchar *, g_slist_length(cookie_list) + 1);
+ array = cookies;
+ for (c = cookie_list; c; c = c->next) {
+ *array++ = soup_cookie_to_set_cookie_header ((SoupCookie *)(c->data));
+ }
+ soup_cookies_free (cookie_list);
+ }
+ g_value_set_boxed (value, cookies);
+ }
+#else
+ g_value_set_boxed (value, g_strdupv (src->cookies));
+#endif
break;
case PROP_IS_LIVE:
g_value_set_boolean (value, gst_base_src_is_live (GST_BASE_SRC (src)));
gint rc;
soup_message_headers_remove (src->msg->request_headers, "Range");
- if (offset || stop_offset != -1) {
+
+/* This changes are needed to enable Seekable Contents from server.
+ We have observed that , for few specific networks ( VODAFONE ) , without theabove headers ,
+ Youtube is sending non-seekable contents to the Client. */
+#ifndef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (offset || stop_offset != -1)
+#endif
+ {
if (stop_offset != -1) {
g_assert (offset != stop_offset);
stop_offset);
} else {
rc = g_snprintf (buf, sizeof (buf), "bytes=%" G_GUINT64_FORMAT "-",
- offset);
+ offset);
}
if (rc > sizeof (buf) || rc < 0)
return FALSE;
soup_message_headers_append (src->msg->request_headers, "Range", buf);
}
-
src->read_position = offset;
return TRUE;
}
soup_message_headers_append (src->msg->request_headers, field_name,
field_content);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (!g_ascii_strcasecmp(field_name, "Cookie")) {
+ SoupURI *uri = NULL;
+ SoupCookie *cookie_parsed = NULL;
+ gchar *saveptr = NULL;
+
+ if (strlen(field_content) > 0) {
+ gchar *tmp_field = NULL;
+
+ uri = soup_uri_new (src->location);
+
+ tmp_field = strtok_r (field_content, ";", &saveptr);
+
+ while (tmp_field != NULL) {
+ GST_DEBUG_OBJECT (src, "field_content = %s", tmp_field);
+
+ cookie_parsed = soup_cookie_parse(tmp_field, uri);
+ GST_DEBUG_OBJECT (src, "cookie parsed = %p", cookie_parsed);
+
+ if (src->cookie_jar)
+ soup_cookie_jar_add_cookie (src->cookie_jar, cookie_parsed);
+
+ tmp_field = strtok_r (NULL, ";", &saveptr);
+ }
+ soup_uri_free (uri);
+ }
+ }
+#endif
+
g_free (field_content);
return TRUE;
}
if (!src->session) {
- GST_DEBUG_OBJECT (src, "Creating session");
- if (src->proxy == NULL) {
- src->session =
- soup_session_new_with_options (SOUP_SESSION_USER_AGENT,
- src->user_agent, SOUP_SESSION_TIMEOUT, src->timeout,
- SOUP_SESSION_SSL_STRICT, src->ssl_strict, NULL);
- // SOUP_SESSION_TLS_INTERACTION, src->tls_interaction, NULL);
+ GstQuery *query;
+ gboolean can_share = (src->timeout == DEFAULT_TIMEOUT)
+ && (src->ssl_strict == DEFAULT_SSL_STRICT)
+ && (src->tls_interaction == NULL) && (src->proxy == NULL)
+ && (src->tls_database == DEFAULT_TLS_DATABASE)
+ && (src->ssl_ca_file == DEFAULT_SSL_CA_FILE)
+ && (src->ssl_use_system_ca_file == DEFAULT_SSL_USE_SYSTEM_CA_FILE);
+
+ query = gst_query_new_context (GST_SOUP_SESSION_CONTEXT);
+ if (gst_pad_peer_query (GST_BASE_SRC_PAD (src), query)) {
+ GstContext *context;
+
+ gst_query_parse_context (query, &context);
+ gst_element_set_context (GST_ELEMENT_CAST (src), context);
} else {
- src->session =
- soup_session_new_with_options (SOUP_SESSION_PROXY_URI, src->proxy,
- SOUP_SESSION_TIMEOUT, src->timeout,
- SOUP_SESSION_SSL_STRICT, src->ssl_strict,
- SOUP_SESSION_USER_AGENT, src->user_agent, NULL);
- // SOUP_SESSION_TLS_INTERACTION, src->tls_interaction, NULL);
+ GstMessage *message;
+
+ message =
+ gst_message_new_need_context (GST_OBJECT_CAST (src),
+ GST_SOUP_SESSION_CONTEXT);
+ gst_element_post_message (GST_ELEMENT_CAST (src), message);
+ }
+ gst_query_unref (query);
+
+ GST_OBJECT_LOCK (src);
+ if (src->external_session && (can_share || src->forced_external_session)) {
+ GST_DEBUG_OBJECT (src, "Using external session %p",
+ src->external_session);
+ src->session = g_object_ref (src->external_session);
+ src->session_is_shared = TRUE;
+ } else {
+ GST_DEBUG_OBJECT (src, "Creating session (can share %d)", can_share);
+
+ /* We explicitly set User-Agent to NULL here and overwrite it per message
+ * to be able to have the same session with different User-Agents per
+ * source */
+ if (src->proxy == NULL) {
+ src->session =
+ soup_session_new_with_options (SOUP_SESSION_USER_AGENT,
+ NULL, SOUP_SESSION_TIMEOUT, src->timeout,
+ SOUP_SESSION_SSL_STRICT, src->ssl_strict,
+ SOUP_SESSION_TLS_INTERACTION, src->tls_interaction, NULL);
+ } else {
+ src->session =
+ soup_session_new_with_options (SOUP_SESSION_PROXY_URI, src->proxy,
+ SOUP_SESSION_TIMEOUT, src->timeout,
+ SOUP_SESSION_SSL_STRICT, src->ssl_strict,
+ SOUP_SESSION_USER_AGENT, NULL,
+ SOUP_SESSION_TLS_INTERACTION, src->tls_interaction, NULL);
+ }
+
+ if (src->session) {
+ gst_soup_util_log_setup (src->session, src->log_level,
+ GST_ELEMENT (src));
+ soup_session_add_feature_by_type (src->session,
+ SOUP_TYPE_CONTENT_DECODER);
+ soup_session_add_feature_by_type (src->session, SOUP_TYPE_COOKIE_JAR);
+
+ if (can_share) {
+ GstContext *context;
+ GstMessage *message;
+ GstStructure *s;
+
+ GST_DEBUG_OBJECT (src, "Sharing session %p", src->session);
+ src->session_is_shared = TRUE;
+
+ /* Unset the limit the number of maximum allowed connection */
+ g_object_set (src->session, SOUP_SESSION_MAX_CONNS, G_MAXINT,
+ SOUP_SESSION_MAX_CONNS_PER_HOST, G_MAXINT, NULL);
+
+ context = gst_context_new (GST_SOUP_SESSION_CONTEXT, TRUE);
+ s = gst_context_writable_structure (context);
+ gst_structure_set (s, "session", SOUP_TYPE_SESSION, src->session,
+ "force", G_TYPE_BOOLEAN, FALSE, NULL);
+
+ gst_object_ref (src->session);
+ GST_OBJECT_UNLOCK (src);
+ gst_element_set_context (GST_ELEMENT_CAST (src), context);
+ message =
+ gst_message_new_have_context (GST_OBJECT_CAST (src), context);
+ gst_element_post_message (GST_ELEMENT_CAST (src), message);
+ GST_OBJECT_LOCK (src);
+ gst_object_unref (src->session);
+ } else {
+ src->session_is_shared = FALSE;
+ }
+ }
}
if (!src->session) {
GST_ELEMENT_ERROR (src, LIBRARY, INIT,
- (NULL), ("Failed to create async session"));
+ (NULL), ("Failed to create session"));
+ GST_OBJECT_UNLOCK (src);
return FALSE;
}
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ {
+ char **array = NULL;
+ SoupURI *base_uri;
+ SoupCookie *soup_cookie = NULL;
+
+ soup_session_add_feature_by_type (src->session, SOUP_TYPE_COOKIE_JAR);
+ src->cookie_jar = SOUP_COOKIE_JAR (soup_session_get_feature (src->session, SOUP_TYPE_COOKIE_JAR));
+ if ((array = src->cookies) != NULL) {
+ base_uri = soup_uri_new (src->location);
+ while (*array != NULL) {
+ soup_cookie = soup_cookie_parse (*array++, base_uri);
+ if (soup_cookie != NULL) {
+ GST_INFO_OBJECT (src, "adding cookies..");
+ soup_cookie_jar_add_cookie (src->cookie_jar, soup_cookie);
+ }
+ }
+ soup_uri_free (base_uri);
+ }
+ }
+#endif
+
g_signal_connect (src->session, "authenticate",
G_CALLBACK (gst_soup_http_src_authenticate_cb), src);
- /* Set up logging */
- gst_soup_util_log_setup (src->session, src->log_level, GST_ELEMENT (src));
- if (src->tls_database)
- g_object_set (src->session, "tls-database", src->tls_database, NULL);
- else if (src->ssl_ca_file)
- g_object_set (src->session, "ssl-ca-file", src->ssl_ca_file, NULL);
- else
- g_object_set (src->session, "ssl-use-system-ca-file",
- src->ssl_use_system_ca_file, NULL);
+ if (!src->session_is_shared) {
+ if (src->tls_database)
+ g_object_set (src->session, "tls-database", src->tls_database, NULL);
+ else if (src->ssl_ca_file)
+ g_object_set (src->session, "ssl-ca-file", src->ssl_ca_file, NULL);
+ else
+ g_object_set (src->session, "ssl-use-system-ca-file",
+ src->ssl_use_system_ca_file, NULL);
+ }
+ GST_OBJECT_UNLOCK (src);
} else {
GST_DEBUG_OBJECT (src, "Re-using session");
}
- if (src->compress)
- soup_session_add_feature_by_type (src->session, SOUP_TYPE_CONTENT_DECODER);
- else
- soup_session_remove_feature_by_type (src->session,
- SOUP_TYPE_CONTENT_DECODER);
-
return TRUE;
}
}
if (src->session) {
- GST_DEBUG_OBJECT (src, "Removing Cookie Jar instance");
- soup_session_remove_feature_by_type(src->session, SOUP_TYPE_COOKIE_JAR);
- src->cookie_jar = NULL;
+ if (!src->session_is_shared)
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+/* When Playback is ongoing and Browser is moved to background ( Pressing Menu or Home Key ), The Session gets destroyed.
+ But the cookie_jar property remains unfreed. This results in garbage pointer and causes crash.
+ Removing the cookie_jar feature during close session of browser to handle the issue. */
- soup_session_abort (src->session);
++ {
++ GST_DEBUG_OBJECT (src, "Removing Cookie Jar instance");
++ soup_session_remove_feature_by_type(src->session, SOUP_TYPE_COOKIE_JAR);
++ src->cookie_jar = NULL;
++ soup_session_abort (src->session);
++ }
++#else
+ soup_session_abort (src->session);
+#endif
+ g_signal_handlers_disconnect_by_func (src->session,
+ G_CALLBACK (gst_soup_http_src_authenticate_cb), src);
g_object_unref (src->session);
src->session = NULL;
}
+
g_mutex_unlock (&src->mutex);
}
gst_soup_http_src_authenticate_cb (SoupSession * session, SoupMessage * msg,
SoupAuth * auth, gboolean retrying, GstSoupHTTPSrc * src)
{
+ /* Might be from another user of the shared session */
+ if (!GST_IS_SOUP_HTTP_SRC (src) || msg != src->msg)
+ return;
+
if (!retrying) {
/* First time authentication only, if we fail and are called again with retry true fall through */
if (msg->status_code == SOUP_STATUS_UNAUTHORIZED) {
}
}
- if (g_ascii_strcasecmp (name, "Set-Cookie") == 0)
- {
- if (val)
- {
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+static void
+gst_soup_http_src_headers_foreach (const gchar * name, const gchar * val,
+ gpointer src)
+{
+ GST_INFO_OBJECT (src, " %s: %s", name, val);
+
- }
- else if (g_ascii_strcasecmp (name, "Dash-Oldest-Segment") == 0)
- {
- if (val)
- {
++ if (g_ascii_strcasecmp (name, "Set-Cookie") == 0) {
++ if (val) {
+ gboolean bret = FALSE;
+ GstStructure *s = NULL;
+ GstSoupHTTPSrc * tmp = src;
+ SoupURI *uri;
+
+ uri = soup_uri_new (tmp->location);
+
+ /* post current bandwith & uri to application */
+ s = gst_structure_new ("cookies",
+ "updated-cookie", G_TYPE_STRING, val,
+ "updated-url", G_TYPE_STRING, tmp->location, NULL);
+ bret = gst_element_post_message (GST_ELEMENT_CAST (src), gst_message_new_element (GST_OBJECT_CAST (src), s));
+ soup_cookie_jar_set_cookie (tmp->cookie_jar, uri, val);
+ soup_uri_free (uri);
+
+ GST_INFO_OBJECT (src, "request url [%s], posted cookies [%s] msg and returned = %d", tmp->location, val, bret);
+ }
- }
- else if (g_ascii_strcasecmp (name, "Dash-Newest-Segment") == 0)
- {
- if (val)
- {
++ } else if (g_ascii_strcasecmp (name, "Dash-Oldest-Segment") == 0) {
++ if (val) {
+ GstSoupHTTPSrc * tmp = src;
+ tmp->dash_oldest_segment = g_strdup (val);
+ GST_INFO_OBJECT (src, "Dash-Oldest-Segment set as %s ", tmp->dash_oldest_segment);
+ }
++ } else if (g_ascii_strcasecmp (name, "Dash-Newest-Segment") == 0) {
++ if (val) {
+ GstSoupHTTPSrc * tmp = src;
+ tmp->dash_newest_segment = g_strdup (val);
+ GST_INFO_OBJECT (src, "Dash-Newest-Segment set as %s ", tmp->dash_newest_segment);
+ }
+ }
+}
+#endif
+
static GstFlowReturn
gst_soup_http_src_got_headers (GstSoupHTTPSrc * src, SoupMessage * msg)
{
GstEvent *http_headers_event;
GstStructure *http_headers, *headers;
const gchar *accept_ranges;
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ gint64 start = 0, stop = 0, total = 0;
+#endif
- GST_INFO_OBJECT (src, "got headers");
-
+ GST_INFO_OBJECT (src, "got headers : %d", msg->status_code);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ soup_message_headers_foreach (msg->response_headers,
+ gst_soup_http_src_headers_foreach, src);
+#endif
if (msg->status_code == SOUP_STATUS_PROXY_AUTHENTICATION_REQUIRED &&
src->proxy_id && src->proxy_pw) {
/* wait for authenticate callback */
return GST_FLOW_OK;
}
- if (msg->status_code == SOUP_STATUS_UNAUTHORIZED) {
- /* force an error */
- return gst_soup_http_src_parse_status (msg, src);
- }
-
- src->got_headers = TRUE;
- g_cond_broadcast (&src->have_headers_cond);
-
http_headers = gst_structure_new_empty ("http-headers");
- gst_structure_set (http_headers, "uri", G_TYPE_STRING, src->location, NULL);
+ gst_structure_set (http_headers, "uri", G_TYPE_STRING, src->location,
+ "http-status-code", G_TYPE_UINT, msg->status_code, NULL);
if (src->redirection_uri)
gst_structure_set (http_headers, "redirection-uri", G_TYPE_STRING,
src->redirection_uri, NULL);
headers, NULL);
gst_structure_free (headers);
+ gst_element_post_message (GST_ELEMENT_CAST (src),
+ gst_message_new_element (GST_OBJECT_CAST (src),
+ gst_structure_copy (http_headers)));
+
+ if (msg->status_code == SOUP_STATUS_UNAUTHORIZED) {
+ /* force an error */
+ gst_structure_free (http_headers);
+ return gst_soup_http_src_parse_status (msg, src);
+ }
+
+ src->got_headers = TRUE;
+ g_cond_broadcast (&src->have_headers_cond);
+
http_headers_event =
gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, http_headers);
gst_event_replace (&src->http_headers_event, http_headers_event);
gst_event_unref (http_headers_event);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ /* Parse DLNA OP CODE */
+ if ((value = soup_message_headers_get_one
+ (msg->response_headers, "contentFeatures.dlna.org")) != NULL) {
+ gchar **token = NULL;
+ gchar **ptr = NULL;
+
+ GST_DEBUG_OBJECT (src, "DLNA server response");
+
+ token = g_strsplit (value, ";", 0);
+ for (ptr = token ; *ptr ; ptr++) {
+ gchar *tmp = NULL;
+ gchar *op_code = NULL;
+
+ if (!strlen (*ptr))
+ continue;
+
+ tmp = g_ascii_strup (*ptr, strlen (*ptr));
+ if (!strstr (tmp, "DLNA.ORG_OP")) {
+ g_free (tmp);
+ continue;
+ }
+
+ g_free (tmp);
+
+ op_code = strchr (*ptr, '=');
+ if (op_code) {
+ op_code++;
+
+ src->dlna_opt = (atoi (op_code) / 10 << 1) | (atoi (op_code) % 10);
+ GST_DEBUG_OBJECT (src, "dlna op code: %s (0x%X)", op_code, src->dlna_opt);
+ break;
+ }
+ }
+ g_strfreev (token);
+ }
+#endif
+
/* Parse Content-Length. */
if (soup_message_headers_get_encoding (msg->response_headers) ==
SOUP_ENCODING_CONTENT_LENGTH) {
- newsize = src->request_position +
- soup_message_headers_get_content_length (msg->response_headers);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (msg->status_code == SOUP_STATUS_PARTIAL_CONTENT) {
+ newsize = src->request_position +
+ soup_message_headers_get_content_length (msg->response_headers);
+ } else {
+ if (soup_message_headers_get_content_range(msg->response_headers, &start, &stop, &total) && (total > 0)) {
+ GST_DEBUG_OBJECT (src, "get range header : %" G_GINT64_FORMAT
+ "~%" G_GINT64_FORMAT"/%"G_GINT64_FORMAT, start, stop, total);
+ newsize = (guint64)total;
+ } else {
+ if ((src->have_size) && (src->content_size <= src->request_position)) {
+ newsize = src->content_size;
+ } else {
+ newsize = soup_message_headers_get_content_length (msg->response_headers);
+ }
+ }
+ }
+#else
+ newsize = src->request_position +
+ soup_message_headers_get_content_length (msg->response_headers);
+#endif
if (!src->have_size || (src->content_size != newsize)) {
src->content_size = newsize;
src->have_size = TRUE;
if (g_ascii_strcasecmp (accept_ranges, "none") == 0)
src->seekable = FALSE;
}
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ else if (src->dlna_opt & DLNA_OP_BYTE_SEEK) {
+ if (src->have_size) {
+ GST_DEBUG_OBJECT (src, "DLNA server is seekable");
+ src->seekable = TRUE;
+ }
+ }
+ /* The Range request header is always included.
+ * @ref gst_soup_http_src_add_range_header() */
+ else if ((msg->status_code == SOUP_STATUS_OK) &&
+ (soup_message_headers_get_content_range (msg->response_headers, &start, &stop, &total) == FALSE)) {
+ GST_DEBUG_OBJECT (src, "there is no accept range header");
+ src->seekable = FALSE;
+ }
+#endif
/* Icecast stuff */
tag_list = gst_tag_list_new_empty ();
/* when content_size is unknown and we have just finished receiving
* a body message, requests that go beyond the content limits will result
* in an error. Here we convert those to EOS */
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (msg->status_code == SOUP_STATUS_REQUESTED_RANGE_NOT_SATISFIABLE &&
+ ((src->have_body && !src->have_size) ||
+ (src->have_size && src->request_position >= src->content_size))) {
+ GST_DEBUG_OBJECT (src, "Requested range out of limits and received full "
+ "body, returning EOS");
+ return GST_FLOW_EOS;
+ }
+#else
if (msg->status_code == SOUP_STATUS_REQUESTED_RANGE_NOT_SATISFIABLE &&
src->have_body && !src->have_size) {
GST_DEBUG_OBJECT (src, "Requested range out of limits and received full "
"body, returning EOS");
return GST_FLOW_EOS;
}
+#endif
/* FIXME: reason_phrase is not translated and not suitable for user
* error dialog according to libsoup documentation.
("Error parsing URL."), ("URL: %s", src->location));
return FALSE;
}
+
+ /* Duplicating the defaults of libsoup here. We don't want to set a
+ * User-Agent in the session as each source might have its own User-Agent
+ * set */
+ if (!src->user_agent || !*src->user_agent) {
+ gchar *user_agent =
+ g_strdup_printf ("libsoup/%u.%u.%u", soup_get_major_version (),
+ soup_get_minor_version (), soup_get_micro_version ());
+ soup_message_headers_append (src->msg->request_headers, "User-Agent",
+ user_agent);
+ g_free (user_agent);
+ } else if (g_str_has_suffix (src->user_agent, " ")) {
+ gchar *user_agent = g_strdup_printf ("%slibsoup/%u.%u.%u", src->user_agent,
+ soup_get_major_version (),
+ soup_get_minor_version (), soup_get_micro_version ());
+ soup_message_headers_append (src->msg->request_headers, "User-Agent",
+ user_agent);
+ g_free (user_agent);
+ } else {
+ soup_message_headers_append (src->msg->request_headers, "User-Agent",
+ src->user_agent);
+ }
+
if (!src->keep_alive) {
soup_message_headers_append (src->msg->request_headers, "Connection",
"close");
soup_message_headers_append (src->msg->request_headers, "icy-metadata",
"1");
}
+
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+/* This changes are needed to enable Seekable Contents from server.
+ We have observed that , for few specific networks ( VODAFONE ) , without theabove headers ,
+ Youtube is sending non-seekable contents to the Client. */
+ soup_message_headers_append (src->msg->request_headers, "Accept-Ranges","bytes");
+
+ if (src->cookie_jar) {
+ GSList *cookie_list, *c;
+ gchar *header;
+
+ SoupURI *uri = NULL;
+ SoupCookie *cookie;
+ uri = soup_uri_new (src->location);
+
+ if ((cookie_list = soup_cookie_jar_all_cookies (src->cookie_jar)) != NULL) {
+ for (c = cookie_list; c; c = c->next) {
+ cookie = (SoupCookie *)c->data;
+ if (soup_cookie_applies_to_uri(cookie, uri)) {
+ header = soup_cookie_to_cookie_header (cookie);
+ soup_message_headers_append (src->msg->request_headers, "Cookie", header);
+ g_free (header);
+ }
+ }
+ }
+ soup_cookies_free (cookie_list);
+ soup_uri_free (uri);
+ }
+#else
if (src->cookies) {
gchar **cookie;
*cookie);
}
}
+#endif
+ if (!src->compress)
+ soup_message_disable_feature (src->msg, SOUP_TYPE_CONTENT_DECODER);
+
soup_message_set_flags (src->msg, SOUP_MESSAGE_OVERWRITE_CHUNKS |
(src->automatic_redirect ? 0 : SOUP_MESSAGE_NO_REDIRECT));
gst_soup_http_src_add_extra_headers (src);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ soup_message_headers_foreach (src->msg->request_headers,
+ gst_soup_http_src_headers_foreach, src);
+#endif
+
return TRUE;
}
+ /* Lock taken */
static GstFlowReturn
gst_soup_http_src_send_message (GstSoupHTTPSrc * src)
{
GError *error = NULL;
g_return_val_if_fail (src->msg != NULL, GST_FLOW_ERROR);
+ g_assert (src->input_stream == NULL);
src->input_stream =
soup_session_send (src->session, src->msg, src->cancellable, &error);
if (src->msg && src->request_position > 0) {
gst_soup_http_src_add_range_header (src, src->request_position,
src->stop_position);
- }
+ } else if (src->msg && src->request_position == 0)
+ soup_message_headers_remove (src->msg->request_headers, "Range");
+
+ /* add_range_header() has the side effect of setting read_position to
+ * the requested position. This *needs* to be set regardless of having
+ * a message or not. Failure to do so would result in calculation being
+ * done with stale/wrong read position */
+ src->read_position = src->request_position;
if (!src->msg) {
if (!gst_soup_http_src_build_message (src, method)) {
ret = gst_soup_http_src_send_message (src);
/* Check if Range header was respected. */
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (ret == GST_FLOW_OK && src->request_position > 0 &&
+ (src->msg->status_code != SOUP_STATUS_PARTIAL_CONTENT) &&
+ (src->request_position < src->content_size)) {
+#else
if (ret == GST_FLOW_OK && src->request_position > 0 &&
src->msg->status_code != SOUP_STATUS_PARTIAL_CONTENT) {
+#endif
src->seekable = FALSE;
GST_ELEMENT_ERROR_WITH_DETAILS (src, RESOURCE, SEEK,
(_("Server does not support seeking.")),
{
guint blocksize = gst_base_src_get_blocksize (GST_BASE_SRC_CAST (src));
- GST_LOG_OBJECT (src, "Checking to update blocksize. Read:%" G_GINT64_FORMAT
- " blocksize:%u", bytes_read, blocksize);
+ gint64 time_since_last_read =
+ g_get_monotonic_time () * GST_USECOND - src->last_socket_read_time;
- if (bytes_read >= blocksize * GROW_BLOCKSIZE_LIMIT) {
+ GST_LOG_OBJECT (src, "Checking to update blocksize. Read: %" G_GINT64_FORMAT
+ " bytes, blocksize: %u bytes, time since last read: %" GST_TIME_FORMAT,
+ bytes_read, blocksize, GST_TIME_ARGS (time_since_last_read));
+
+ if (bytes_read >= blocksize * GROW_BLOCKSIZE_LIMIT
+ && time_since_last_read <= GROW_TIME_LIMIT) {
src->reduce_blocksize_count = 0;
src->increase_blocksize_count++;
gst_base_src_set_blocksize (GST_BASE_SRC_CAST (src), blocksize);
src->increase_blocksize_count = 0;
}
- } else if (bytes_read < blocksize * REDUCE_BLOCKSIZE_LIMIT) {
+ } else if (bytes_read < blocksize * REDUCE_BLOCKSIZE_LIMIT
+ || time_since_last_read > GROW_TIME_LIMIT) {
src->reduce_blocksize_count++;
src->increase_blocksize_count = 0;
GST_BUFFER_OFFSET (*outbuf) = bsrc->segment.position;
ret = GST_FLOW_OK;
gst_soup_http_src_update_position (src, read_bytes);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ src->received_total += read_bytes;
+#endif
/* Got some data, reset retry counter */
src->retry_count = 0;
gst_soup_http_src_check_update_blocksize (src, read_bytes);
+ src->last_socket_read_time = g_get_monotonic_time () * GST_USECOND;
+
/* If we're at the end of a range request, read again to let libsoup
* finalize the request. This allows to reuse the connection again later,
* otherwise we would have to cancel the message and close the connection
goto retry;
}
}
+
+ if (ret == GST_FLOW_FLUSHING) {
+ g_mutex_lock (&src->mutex);
+ src->retry_count = 0;
+ g_mutex_unlock (&src->mutex);
+ }
+
return ret;
}
GST_DEBUG_OBJECT (src, "start(\"%s\")", src->location);
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (src->dash_oldest_segment) {
+ g_free (src->dash_oldest_segment);
+ src->dash_oldest_segment = NULL;
+ }
+ if (src->dash_newest_segment) {
+ g_free (src->dash_newest_segment);
+ src->dash_newest_segment = NULL;
+ }
+#endif
return gst_soup_http_src_session_open (src);
}
src = GST_SOUP_HTTP_SRC (bsrc);
GST_DEBUG_OBJECT (src, "stop()");
- if (src->keep_alive && !src->msg)
+ if (src->keep_alive && !src->msg && !src->session_is_shared)
gst_soup_http_src_cancel_message (src);
else
gst_soup_http_src_session_close (src);
src = GST_SOUP_HTTP_SRC (element);
switch (transition) {
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ GST_WARNING_OBJECT (src, "Last read pos"
+ ": %" G_GINT64_FORMAT ", received total : %" G_GINT64_FORMAT,
+ src->read_position, src->received_total);
+ break;
+#endif
case GST_STATE_CHANGE_READY_TO_NULL:
gst_soup_http_src_session_close (src);
break;
return ret;
}
+ static void
+ gst_soup_http_src_set_context (GstElement * element, GstContext * context)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (element);
+
+ if (g_strcmp0 (gst_context_get_context_type (context),
+ GST_SOUP_SESSION_CONTEXT) == 0) {
+ const GstStructure *s = gst_context_get_structure (context);
+
+ GST_OBJECT_LOCK (src);
+ if (src->external_session)
+ g_object_unref (src->external_session);
+ src->external_session = NULL;
+ gst_structure_get (s, "session", SOUP_TYPE_SESSION, &src->external_session,
+ NULL);
+ src->forced_external_session = FALSE;
+ gst_structure_get (s, "force", G_TYPE_BOOLEAN,
+ &src->forced_external_session, NULL);
+
+ GST_DEBUG_OBJECT (src, "Setting external session %p (force: %d)",
+ src->external_session, src->forced_external_session);
+ GST_OBJECT_UNLOCK (src);
+ }
+
+ GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
+ }
+
/* Interrupt a blocking request. */
static gboolean
gst_soup_http_src_unlock (GstBaseSrc * bsrc)
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_SCHEDULING:
-
gst_query_parse_scheduling (query, &flags, &minsize, &maxsize, &align);
flags |= GST_SCHEDULING_FLAG_BANDWIDTH_LIMITED;
+
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ if (gst_soup_http_src_is_seekable(bsrc)) {
+ GST_DEBUG_OBJECT (src, "set seekable flag");
+ flags |= GST_SCHEDULING_FLAG_SEEKABLE;
+ }
+#endif
gst_query_set_scheduling (query, flags, minsize, maxsize, align);
-
break;
default:
break;
if (uri == NULL || *uri == '\0')
return TRUE;
- if (g_str_has_prefix (uri, "http://")) {
+ if (g_strstr_len (uri, -1, "://")) {
src->proxy = soup_uri_new (uri);
} else {
gchar *new_uri = g_strconcat ("http://", uri, NULL);
return (src->proxy != NULL);
}
- static guint
+ static GstURIType
gst_soup_http_src_uri_get_type (GType type)
{
return GST_URI_SRC;
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more
+ * Library General Public License for more
*/
#ifndef __GST_SOUP_HTTP_SRC_H__
gchar *proxy_pw; /* Authentication user password for proxy URI. */
gchar **cookies; /* HTTP request cookies. */
SoupSession *session; /* Async context. */
+ gboolean session_is_shared;
+ SoupSession *external_session; /* Shared via GstContext */
+ gboolean forced_external_session; /* If session was explicitly set from application */
SoupMessage *msg; /* Request message. */
gint retry_count; /* Number of retries since we received data */
gint max_retries; /* Maximum number of retries */
gchar *iradio_url;
GstStructure *extra_headers;
-
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ SoupCookieJar *cookie_jar;
+#endif
SoupLoggerLogLevel log_level;/* Soup HTTP session logger level */
gboolean compress;
GCond have_headers_cond;
GstEvent *http_headers_event;
- #endif
+
+ gint64 last_socket_read_time;
++
+#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
+ gchar *dash_oldest_segment;
+ gchar *dash_newest_segment;
+ guint64 received_total; /* temp: for debugging */
+ guint dlna_opt; /* DLNA server option */
++#endif
};
struct _GstSoupHTTPSrcClass {
#define ADIF_MAX_SIZE 40 /* Should be enough */
#define ADTS_MAX_SIZE 10 /* Should be enough */
#define LOAS_MAX_SIZE 3 /* Should be enough */
+ #define RAW_MAX_SIZE 1 /* Correct framing is required */
#define ADTS_HEADERS_LENGTH 7UL /* Total byte-length of fixed and variable
headers prepended during raw to ADTS
conversion */
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION /* to get more accurate duration */
+#define AAC_MAX_ESTIMATE_DURATION_BUF (1024 * 1024) /* use first 1 Mbyte */
+#define AAC_SAMPLE_PER_FRAME 1024
+
+#define AAC_MAX_PULL_RANGE_BUF (1 * 1024 * 1024) /* 1 MByte */
+#define AAC_LARGE_FILE_SIZE (2 * 1024 * 1024) /* 2 MByte */
+#define gst_aac_parse_parent_class parent_class
+#endif
+
#define AAC_FRAME_DURATION(parse) (GST_SECOND/parse->frames_per_sec)
static const gint loas_sample_rate_table[16] = {
aacparse, GstBitReader * br, gint * object_type, gint * sample_rate,
gint * channels, gint * frame_samples);
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+static guint gst_aac_parse_adts_get_fast_frame_len (const guint8 * data);
+/* make full aac(adts) index table when seek */
+static gboolean gst_aac_parse_adts_src_eventfunc (GstBaseParse * parse,
+ GstEvent * event);
+int get_aac_parse_get_adts_frame_length (const unsigned char *data,
+ gint64 offset);
+static gboolean gst_aac_parse_estimate_duration (GstBaseParse * parse);
+#endif
#define gst_aac_parse_parent_class parent_class
G_DEFINE_TYPE (GstAacParse, gst_aac_parse, GST_TYPE_BASE_PARSE);
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+static inline gint
+gst_aac_parse_get_sample_rate_from_index (guint sr_idx)
+{
+ static const guint aac_sample_rates[] = { 96000, 88200, 64000, 48000, 44100,
+ 32000, 24000, 22050, 16000, 12000, 11025, 8000
+ };
+
+ if (sr_idx < G_N_ELEMENTS (aac_sample_rates))
+ return aac_sample_rates[sr_idx];
+ GST_WARNING ("Invalid sample rate index %u", sr_idx);
+ return 0;
+}
+#endif
/**
* gst_aac_parse_class_init:
* @klass: #GstAacParseClass.
aacparse->last_parsed_sample_rate = 0;
aacparse->last_parsed_channels = 0;
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+ /* to get more correct duration */
+ aacparse->first_frame = TRUE;
+#endif
}
gst_buffer_fill (codec_data_buffer, 0, codec_data, 2);
gst_caps_set_simple (src_caps, "codec_data", GST_TYPE_BUFFER,
codec_data_buffer, NULL);
+ gst_buffer_unref (codec_data_buffer);
}
} else if (aacparse->header_type == DSPAAC_HEADER_NONE) {
GST_DEBUG_OBJECT (GST_BASE_PARSE (aacparse)->srcpad,
gst_aac_parse_set_src_caps (aacparse, caps);
if (aacparse->header_type == aacparse->output_header_type)
gst_base_parse_set_passthrough (parse, TRUE);
+
+ /* input is already correctly framed */
+ gst_base_parse_set_min_frame_size (parse, RAW_MAX_SIZE);
} else {
return FALSE;
}
adts_headers[0] = 0xFFU;
adts_headers[1] = 0xF0U | (id << 3) | 0x1U;
adts_headers[2] = (profile << 6) | (sampling_frequency_index << 2) | 0x2U |
- (channel_configuration & 0x4U);
+ ((channel_configuration & 0x4U) >> 2);
adts_headers[3] = ((channel_configuration & 0x3U) << 6) | 0x30U |
(guint8) (frame_size >> 11);
adts_headers[4] = (guint8) ((frame_size >> 3) & 0x00FF);
gst_base_parse_set_frame_rate (GST_BASE_PARSE (aacparse),
aacparse->sample_rate, aacparse->frame_samples, 2, 2);
}
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+ if (aacparse->first_frame == TRUE) {
+ gboolean ret = FALSE;
+ aacparse->first_frame = FALSE;
+
+ ret = gst_aac_parse_estimate_duration (parse);
+ if (!ret) {
+ GST_WARNING_OBJECT (aacparse, "can not estimate total duration");
+ ret = GST_FLOW_NOT_SUPPORTED;
+ }
+ }
+#endif
} else if (aacparse->header_type == DSPAAC_HEADER_LOAS) {
gboolean setcaps = FALSE;
aacparse->sample_rate, aacparse->frame_samples, 2, 2);
}
}
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+ else if (aacparse->header_type == DSPAAC_HEADER_ADIF) {
+ /* to get more correct duration */
+ float estimated_duration = 0;
+ gint64 total_file_size;
+ gst_base_parse_get_upstream_size (parse, &total_file_size);
+ estimated_duration =
+ ((total_file_size * 8) / (float) (aacparse->bitrate * 1000)) *
+ GST_SECOND;
+ gst_base_parse_set_duration (parse, GST_FORMAT_TIME,
+ estimated_duration * 1000, 0);
+ }
+#endif
if (aacparse->header_type == DSPAAC_HEADER_NONE
&& aacparse->output_header_type == DSPAAC_HEADER_ADTS) {
aacparse->last_parsed_channels = 0;
aacparse->last_parsed_sample_rate = 0;
}
-
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+ GST_DEBUG ("Entering gst_aac_parse_src_event header type = %d",
+ aacparse->header_type);
+ if (aacparse->header_type == DSPAAC_HEADER_ADTS)
+ return gst_aac_parse_adts_src_eventfunc (parse, event);
+#endif
return GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+
+}
+
+#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
+/**
+ * get_aac_parse_get_adts_framelength:
+ * @data: #GstBufferData.
+ * @offset: #GstBufferData offset
+ *
+ * Implementation to get adts framelength by using first some frame.
+ *
+ * Returns: frame size
+ */
+int
+get_aac_parse_get_adts_frame_length (const unsigned char *data, gint64 offset)
+{
+ const gint adts_header_length_no_crc = 7;
+ const gint adts_header_length_with_crc = 9;
+ gint frame_size = 0;
+ gint protection_absent;
+ gint head_size;
+
+ /* check of syncword */
+ if ((data[offset + 0] != 0xff) || ((data[offset + 1] & 0xf6) != 0xf0)) {
+ GST_ERROR ("check sync word is fail\n");
+ return -1;
+ }
+
+ /* check of protection absent */
+ protection_absent = (data[offset + 1] & 0x01);
+
+ /*check of frame length */
+ frame_size =
+ (data[offset + 3] & 0x3) << 11 | data[offset + 4] << 3 | data[offset +
+ 5] >> 5;
+
+ /* check of header size */
+ /* protectionAbsent is 0 if there is CRC */
+ head_size =
+ protection_absent ? adts_header_length_no_crc :
+ adts_header_length_with_crc;
+ if (head_size > frame_size) {
+ GST_ERROR ("return frame length as 0 (frameSize %u < headSize %u)",
+ frame_size, head_size);
+ return 0;
+ }
+
+ return frame_size;
+}
+
+/**
+ * gst_aac_parse_estimate_duration:
+ * @parse: #GstBaseParse.
+ *
+ * Implementation to get estimated total duration by using first some frame.
+ *
+ * Returns: TRUE if we can get estimated total duraion
+ */
+static gboolean
+gst_aac_parse_estimate_duration (GstBaseParse * parse)
+{
+ gboolean ret = FALSE;
+ GstFlowReturn res = GST_FLOW_OK;
+ gint64 pull_size = 0, file_size = 0, offset = 0, num_frames = 0, duration = 0;
+ guint sample_rate_index = 0, sample_rate = 0, channel = 0;
+ guint frame_size = 0, frame_duration_us = 0, estimated_bitrate = 0;
+ guint lost_sync_count = 0;
+ GstClockTime estimated_duration = GST_CLOCK_TIME_NONE;
+ GstBuffer *buffer = NULL;
+ guint8 *buf = NULL;
+ gint i = 0;
+ GstPadMode pad_mode = GST_PAD_MODE_NONE;
+ GstAacParse *aacparse;
+ gint64 buffer_size = 0;
+ GstMapInfo map;
+
+ aacparse = GST_AAC_PARSE (parse);
+ GST_LOG_OBJECT (aacparse, "gst_aac_parse_estimate_duration enter");
+
+ /* check baseparse define these fuction */
+ gst_base_parse_get_pad_mode (parse, &pad_mode);
+ if (pad_mode != GST_PAD_MODE_PULL) {
+ GST_INFO_OBJECT (aacparse,
+ "aac parser is not pull mode. can not estimate duration");
+ return FALSE;
+ }
+
+ gst_base_parse_get_upstream_size (parse, &file_size);
+
+ if (file_size < ADIF_MAX_SIZE) {
+ GST_ERROR_OBJECT (aacparse, "file size is too short");
+ return FALSE;
+ }
+
+ pull_size = MIN (file_size, AAC_MAX_ESTIMATE_DURATION_BUF);
+
+ res = gst_pad_pull_range (parse->sinkpad, 0, pull_size, &buffer);
+ if (res != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (aacparse, "gst_pad_pull_range failed!");
+ return FALSE;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ buf = map.data;
+ buffer_size = map.size;
+ if (buffer_size != pull_size) {
+ GST_ERROR_OBJECT (aacparse,
+ "We got different buffer_size(%" G_GINT64_FORMAT ") with pull_size(%"
+ G_GINT64_FORMAT ").", buffer_size, pull_size);
+ }
+
+ /* MODIFICATION : add defence codes for real buffer_size is different with pull_size */
+ for (i = 0; i < buffer_size; i++) {
+ if ((buf[i] == 0xff) && ((buf[i + 1] & 0xf6) == 0xf0)) { /* aac sync word */
+ //guint profile = (buf[i+2] >> 6) & 0x3;
+ sample_rate_index = (buf[i + 2] >> 2) & 0xf;
+ sample_rate =
+ gst_aac_parse_get_sample_rate_from_index (sample_rate_index);
+ if (sample_rate == 0) {
+ GST_WARNING_OBJECT (aacparse, "Invalid sample rate index (0)");
+ goto EXIT;
+ }
+ channel = (buf[i + 2] & 0x1) << 2 | (buf[i + 3] >> 6);
+
+ GST_INFO_OBJECT (aacparse, "found sync. aac fs=%d, ch=%d", sample_rate,
+ channel);
+
+ /* count number of frames */
+ /* MODIFICATION : add defence codes for real buffer_size is different with pull_size */
+ //while (offset < pull_size) {
+ while (offset < buffer_size) {
+ frame_size = get_aac_parse_get_adts_frame_length (buf, i + offset);
+ if (frame_size == 0) {
+ GST_ERROR_OBJECT (aacparse,
+ "framesize error at offset %" G_GINT64_FORMAT, offset);
+ break;
+ } else if (frame_size == -1) {
+ offset++;
+ lost_sync_count++; // lost sync count limmitation 2K Bytes
+ if (lost_sync_count > (1024 * 2)) {
+ GST_WARNING_OBJECT (aacparse,
+ "lost_sync_count is larger than 2048");
+ goto EXIT;
+ }
+ } else {
+ offset += frame_size;
+ num_frames++;
+ lost_sync_count = 0;
+ }
+ } /* while */
+
+ /* if we can got full file, we can calculate the accurate duration */
+ /* MODIFICATION : add defence codes for real buffer_size is different with pull_size */
+ //if (pull_size == file_size) {
+ if (buffer_size == file_size) {
+ gfloat duration_for_one_frame = 0;
+ GstClockTime calculated_duration = GST_CLOCK_TIME_NONE;
+
+ GST_INFO_OBJECT (aacparse,
+ "we got total file (%" G_GINT64_FORMAT
+ " bytes). do not estimate but make Accurate total duration.",
+ pull_size);
+
+ duration_for_one_frame =
+ (gfloat) AAC_SAMPLE_PER_FRAME / (gfloat) sample_rate;
+ calculated_duration =
+ num_frames * duration_for_one_frame * 1000 * 1000 * 1000;
+
+ GST_INFO_OBJECT (aacparse, "duration_for_one_frame %f ms",
+ duration_for_one_frame);
+ GST_INFO_OBJECT (aacparse, "calculated duration = %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (calculated_duration));
+ /* 0 means disable estimate */
+ gst_base_parse_set_duration (parse, GST_FORMAT_TIME,
+ calculated_duration, 0);
+
+ } else {
+ GST_INFO_OBJECT (aacparse,
+ "we got %" G_GUINT64_FORMAT " bytes in total file (%"
+ G_GINT64_FORMAT "). can not make accurate duration but Estimate.",
+ pull_size, file_size);
+ frame_duration_us =
+ (1024 * 1000000ll + (sample_rate - 1)) / sample_rate;
+ duration = num_frames * frame_duration_us;
+
+ if (duration == 0) {
+ GST_WARNING_OBJECT (aacparse, "Invalid duration");
+ goto EXIT;
+ }
+ estimated_bitrate =
+ (gint) ((gfloat) (offset * 8) / (gfloat) (duration / 1000));
+
+ if (estimated_bitrate == 0) {
+ GST_WARNING_OBJECT (aacparse, "Invalid estimated_bitrate");
+ goto EXIT;
+ }
+ estimated_duration =
+ (GstClockTime) ((file_size * 8) / (estimated_bitrate * 1000)) *
+ GST_SECOND;
+
+ GST_INFO_OBJECT (aacparse, "number of frame = %" G_GINT64_FORMAT,
+ num_frames);
+ GST_INFO_OBJECT (aacparse, "duration = %" G_GINT64_FORMAT,
+ duration / 1000000);
+ GST_INFO_OBJECT (aacparse, "byte = %" G_GINT64_FORMAT, offset);
+ GST_INFO_OBJECT (aacparse, "estimated bitrate = %d bps",
+ estimated_bitrate);
+ GST_INFO_OBJECT (aacparse, "estimated duration = %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (estimated_duration));
+
+ gst_base_parse_set_average_bitrate (parse, estimated_bitrate * 1000);
+ /* set update_interval as duration(sec)/2 */
+ gst_base_parse_set_duration (parse, GST_FORMAT_TIME, estimated_duration,
+ (gint) (duration / 2));
+ }
+
+ break;
+ }
+ }
+ ret = TRUE;
+
+EXIT:
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return ret;
+}
+
+
+/* perform seek in push based mode:
+ find BYTE position to move to based on time and delegate to upstream
+*/
+static gboolean
+gst_aac_audio_parse_do_push_seek (GstBaseParse * parse,
+ GstPad * pad, GstEvent * event)
+{
+ GstAacParse *aacparse = GST_AAC_PARSE (parse);
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 cur, stop;
+ gboolean res;
+ gint64 byte_cur;
+ gint64 esimate_byte;
+ gint32 frame_dur;
+ gint64 upstream_total_bytes = 0;
+ GstFormat fmt = GST_FORMAT_BYTES;
+
+ GST_INFO_OBJECT (parse, "doing aac push-based seek");
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
+ &stop_type, &stop);
+
+ /* FIXME, always play to the end */
+ stop = -1;
+
+ /* only forward streaming and seeking is possible */
+ if (rate <= 0)
+ goto unsupported_seek;
+
+ if (cur == 0) {
+ /* handle rewind only */
+ cur_type = GST_SEEK_TYPE_SET;
+ byte_cur = 0;
+ stop_type = GST_SEEK_TYPE_NONE;
+ stop = -1;
+ flags |= GST_SEEK_FLAG_FLUSH;
+ } else {
+ /* handle normal seek */
+ cur_type = GST_SEEK_TYPE_SET;
+ stop_type = GST_SEEK_TYPE_NONE;
+ stop = -1;
+ flags |= GST_SEEK_FLAG_FLUSH;
+
+ esimate_byte = (cur / (1000 * 1000)) * aacparse->frame_byte;
+ if (aacparse->sample_rate > 0)
+ frame_dur = (aacparse->spf * 1000) / aacparse->sample_rate;
+ else
+ goto unsupported_seek;
+ if (frame_dur > 0)
+ byte_cur = esimate_byte / (frame_dur);
+ else
+ goto unsupported_seek;
+
+ GST_INFO_OBJECT (parse, "frame_byte(%d) spf(%d) rate (%d) ",
+ aacparse->frame_byte, aacparse->spf, aacparse->sample_rate);
+ GST_INFO_OBJECT (parse,
+ "seek cur (%" G_GINT64_FORMAT ") = (%" GST_TIME_FORMAT ") ", cur,
+ GST_TIME_ARGS (cur));
+ GST_INFO_OBJECT (parse,
+ "esimate_byte(%" G_GINT64_FORMAT ") esimate_byte (%d)", esimate_byte,
+ frame_dur);
+ }
+
+ /* obtain real upstream total bytes */
+ if (!gst_pad_peer_query_duration (parse->sinkpad, fmt, &upstream_total_bytes))
+ upstream_total_bytes = 0;
+ GST_INFO_OBJECT (aacparse,
+ "gst_pad_query_peer_duration -upstream_total_bytes (%" G_GUINT64_FORMAT
+ ")", upstream_total_bytes);
+ aacparse->file_size = upstream_total_bytes;
+
+ if ((byte_cur == -1) || (byte_cur > aacparse->file_size)) {
+ GST_INFO_OBJECT (parse,
+ "[WEB-ERROR] seek cur (%" G_GINT64_FORMAT ") > file_size (%"
+ G_GINT64_FORMAT ") ", cur, aacparse->file_size);
+ goto abort_seek;
+ }
+
+ GST_INFO_OBJECT (parse,
+ "Pushing BYTE seek rate %g, " "start %" G_GINT64_FORMAT ", stop %"
+ G_GINT64_FORMAT, rate, byte_cur, stop);
+
+ if (!(flags & GST_SEEK_FLAG_KEY_UNIT)) {
+ GST_INFO_OBJECT (parse,
+ "Requested seek time: %" GST_TIME_FORMAT ", calculated seek offset: %"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (cur), byte_cur);
+ }
+
+ /* BYTE seek event */
+ event =
+ gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, cur_type, byte_cur,
+ stop_type, stop);
+ res = gst_pad_push_event (parse->sinkpad, event);
+
+ return res;
+
+ /* ERRORS */
+
+abort_seek:
+ {
+ GST_DEBUG_OBJECT (parse,
+ "could not determine byte position to seek to, " "seek aborted.");
+ return FALSE;
+ }
+
+unsupported_seek:
+ {
+ GST_DEBUG_OBJECT (parse, "unsupported seek, seek aborted.");
+ return FALSE;
+ }
+}
+
+
+static guint
+gst_aac_parse_adts_get_fast_frame_len (const guint8 * data)
+{
+ int length;
+ if ((data[0] == 0xff) && ((data[1] & 0xf6) == 0xf0)) {
+ length =
+ ((data[3] & 0x03) << 11) | (data[4] << 3) | ((data[5] & 0xe0) >> 5);
+ } else {
+ length = 0;
+ }
+ return length;
+}
+
+/**
+ * gst_aac_parse_adts_src_eventfunc:
+ * @parse: #GstBaseParse. #event
+ *
+ * before baseparse handles seek event, make full amr index table.
+ *
+ * Returns: TRUE on success.
+ */
+static gboolean
+gst_aac_parse_adts_src_eventfunc (GstBaseParse * parse, GstEvent * event)
+{
+ gboolean handled = FALSE;
+ GstAacParse *aacparse = GST_AAC_PARSE (parse);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ GstFlowReturn res = GST_FLOW_OK;
+ gint64 base_offset = 0, cur = 0;
+ gint32 frame_count = 1; /* do not add first frame because it is already in index table */
+ gint64 second_count = 0; /* initial 1 second */
+ gint64 total_file_size = 0, start_offset = 0;
+ GstClockTime current_ts = GST_CLOCK_TIME_NONE;
+ GstPadMode pad_mode = GST_PAD_MODE_NONE;
+
+ /* check baseparse define these fuction */
+ gst_base_parse_get_pad_mode (parse, &pad_mode);
+ if (pad_mode != GST_PAD_MODE_PULL) {
+ gboolean ret = FALSE;
+ GstPad *srcpad = parse->srcpad;
+ GST_INFO_OBJECT (aacparse, "aac parser is PUSH MODE.");
+ /* check NULL */
+ ret = gst_aac_audio_parse_do_push_seek (parse, srcpad, event);
+ gst_object_unref (srcpad);
+ return ret;
+ }
+ gst_base_parse_get_upstream_size (parse, &total_file_size);
+ gst_base_parse_get_index_last_offset (parse, &start_offset);
+ gst_base_parse_get_index_last_ts (parse, ¤t_ts);
+
+ if (total_file_size > AAC_LARGE_FILE_SIZE) {
+ gst_base_parse_set_seek_mode (parse, 0);
+ GST_INFO_OBJECT (aacparse, "larger than big size (2MB).");
+ goto aac_seek_null_exit;
+ }
+
+ GST_DEBUG ("gst_aac_parse_adts_src_eventfunc GST_EVENT_SEEK enter");
+
+ if (total_file_size == 0 || start_offset >= total_file_size) {
+ GST_ERROR ("last index offset %" G_GINT64_FORMAT
+ " is larger than file size %" G_GINT64_FORMAT, start_offset,
+ total_file_size);
+ break;
+ }
+
+ gst_event_parse_seek (event, NULL, NULL, NULL, NULL, &cur, NULL, NULL);
+ if (cur <= current_ts) {
+ GST_INFO ("seek to %" GST_TIME_FORMAT " within index table %"
+ GST_TIME_FORMAT ". do not make index table", GST_TIME_ARGS (cur),
+ GST_TIME_ARGS (current_ts));
+ break;
+ } else {
+ GST_INFO ("seek to %" GST_TIME_FORMAT " without index table %"
+ GST_TIME_FORMAT ". make index table", GST_TIME_ARGS (cur),
+ GST_TIME_ARGS (current_ts));
+ }
+
+ GST_INFO ("make AAC(ADTS) Index Table. file_size = %" G_GINT64_FORMAT
+ " last idx offset=%" G_GINT64_FORMAT ", last idx ts=%"
+ GST_TIME_FORMAT, total_file_size, start_offset,
+ GST_TIME_ARGS (current_ts));
+
+ base_offset = start_offset; /* set base by start offset */
+ second_count = current_ts + GST_SECOND; /* 1sec */
+
+ /************************************/
+ /* STEP 0: Setting parse information */
+ /************************************/
+ aacparse->spf = aacparse->frame_samples;
+ aacparse->frame_duration = (aacparse->spf * 1000 * 100) / aacparse->sample_rate; /* duration per frame (msec) */
+ aacparse->frame_per_sec = (aacparse->sample_rate) / aacparse->spf; /* frames per second (ea) */
+
+ /************************************/
+ /* STEP 1: MAX_PULL_RANGE_BUF cycle */
+ /************************************/
+ while (total_file_size - base_offset >= AAC_MAX_PULL_RANGE_BUF) {
+ gint64 offset = 0;
+ GstBuffer *buffer = NULL;
+ guint8 *buf = NULL;
+ GstMapInfo map;
+ GST_INFO ("gst_pad_pull_range %d bytes (from %" G_GINT64_FORMAT
+ ") use max size", AAC_MAX_PULL_RANGE_BUF, base_offset);
+ res =
+ gst_pad_pull_range (parse->sinkpad, base_offset,
+ base_offset + AAC_MAX_PULL_RANGE_BUF, &buffer);
+ if (res != GST_FLOW_OK) {
+ GST_ERROR ("gst_pad_pull_range failed!");
+ break;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ buf = map.data;
+ if (buf == NULL) {
+ gst_buffer_unmap (buffer, &map);
+ GST_WARNING ("buffer is NULL in make aac seek table's STEP1");
+ gst_buffer_unref (buffer);
+ goto aac_seek_null_exit;
+ }
+
+ while (offset <= AAC_MAX_PULL_RANGE_BUF) {
+ gint frame_size = 0;
+
+ /* make sure the values in the frame header look sane */
+ frame_size = gst_aac_parse_adts_get_fast_frame_len (buf);
+
+ if ((frame_size > 0)
+ && (frame_size < (AAC_MAX_PULL_RANGE_BUF - offset))) {
+ if (current_ts > second_count) { /* 1 sec == xx frames. we make idx per sec */
+ gst_base_parse_add_index_entry (parse, base_offset + offset, current_ts, TRUE, TRUE); /* force */
+ GST_DEBUG ("Adding index ts=%" GST_TIME_FORMAT " offset %"
+ G_GINT64_FORMAT, GST_TIME_ARGS (current_ts),
+ base_offset + offset);
+ second_count += GST_SECOND; /* 1sec */
+ }
+
+ current_ts += (aacparse->frame_duration * GST_MSECOND) / 100; /* each frame is (frame_duration) ms */
+ offset += frame_size;
+ buf += frame_size;
+ frame_count++;
+ } else if (frame_size >= (AAC_MAX_PULL_RANGE_BUF - offset)) {
+ GST_DEBUG ("we need refill buffer");
+ break;
+ } else {
+ GST_WARNING ("we lost sync");
+ buf++;
+ offset++;
+ }
+ } /* while */
+
+ base_offset = base_offset + offset;
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ } /* end MAX buffer cycle */
+
+ /*******************************/
+ /* STEP 2: Remain Buffer cycle */
+ /*******************************/
+ if (total_file_size - base_offset > 0) {
+ gint64 offset = 0;
+ GstBuffer *buffer = NULL;
+ guint8 *buf = NULL;
+ GstMapInfo map;
+
+ GST_INFO ("gst_pad_pull_range %" G_GINT64_FORMAT " bytes (from %"
+ G_GINT64_FORMAT ") use remain_buf size",
+ total_file_size - base_offset, base_offset);
+ res =
+ gst_pad_pull_range (parse->sinkpad, base_offset, total_file_size,
+ &buffer);
+ if (res != GST_FLOW_OK) {
+ GST_ERROR ("gst_pad_pull_range failed!");
+ break;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ buf = map.data;
+ if (buf == NULL) {
+ gst_buffer_unmap (buffer, &map);
+ GST_WARNING ("buffer is NULL in make aac seek table's STEP2");
+ gst_buffer_unref (buffer);
+ goto aac_seek_null_exit;
+ }
+
+ while (base_offset + offset < total_file_size) {
+ gint frame_size = 0;
+
+ /* make sure the values in the frame header look sane */
+ frame_size = gst_aac_parse_adts_get_fast_frame_len (buf);
+
+ if ((frame_size > 0)
+ && (frame_size <= (total_file_size - (base_offset + offset)))) {
+ if (current_ts > second_count) { /* 1 sec == xx frames. we make idx per sec */
+ gst_base_parse_add_index_entry (parse, base_offset + offset, current_ts, TRUE, TRUE); /* force */
+ GST_DEBUG ("Adding index ts=%" GST_TIME_FORMAT " offset %"
+ G_GINT64_FORMAT, GST_TIME_ARGS (current_ts),
+ base_offset + offset);
+ second_count += GST_SECOND; /* 1sec */
+ }
+
+ current_ts += (aacparse->frame_duration * GST_MSECOND) / 100; /* each frame is (frame_duration) ms */
+ offset += frame_size;
+ buf += frame_size;
+ frame_count++;
+ } else if (frame_size == 0) {
+ GST_DEBUG ("Frame size is 0 so, Decoding end..");
+ break;
+ } else {
+ GST_WARNING ("we lost sync");
+ buf++;
+ offset++;
+ }
+ } /* while */
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ }
+ /* end remain_buf buffer cycle */
+ GST_DEBUG ("gst_aac_parse_adts_src_eventfunc GST_EVENT_SEEK leave");
+ }
+ break;
+
+ default:
+ break;
+ }
+
+aac_seek_null_exit:
+
+ /* call baseparse src_event function to handle event */
+ handled = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+ return handled;
}
- #endif //end of #ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
++#endif /* TIZEN_FEATURE_AACPARSE_MODIFICATION */
#define MIN_FRAME_SIZE 6
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+#define DEFAULT_CHECK_HTTP_SEEK FALSE
+
+/* Property */
+enum
+{
+ PROP_0,
+ PROP_CHECK_HTTP_SEEK
+};
+#endif
+
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
static gboolean gst_mpeg_audio_parse_start (GstBaseParse * parse);
static gboolean gst_mpeg_audio_parse_stop (GstBaseParse * parse);
+
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+static void gst_mpeg_audio_parse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_mpeg_audio_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static gboolean gst_mpeg_audio_parse_src_eventfunc (GstBaseParse * parse,
+ GstEvent * event);
+#endif
+
static GstFlowReturn gst_mpeg_audio_parse_handle_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, gint * skipsize);
static GstFlowReturn gst_mpeg_audio_parse_pre_push_frame (GstBaseParse * parse,
parse_class->get_sink_caps =
GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_get_sink_caps);
-
-
-
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+ object_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_set_property);
+ object_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_get_property);
+
+ g_object_class_install_property (object_class, PROP_CHECK_HTTP_SEEK,
+ g_param_spec_boolean ("http-pull-mp3dec", "enable/disable",
+ "enable/disable mp3dec http seek pull mode",
+ DEFAULT_CHECK_HTTP_SEEK, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /* T.B.D : make full mp3 index table when seek */
+ parse_class->src_event = gst_mpeg_audio_parse_src_eventfunc;
+#endif
+
/* register tags */
#define GST_TAG_CRC "has-crc"
#define GST_TAG_MODE "channel-mode"
gst_mpeg_audio_parse_reset (mp3parse);
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+ if (mp3parse->http_seek_flag) {
+ /* Don't need Accurate Seek table (in http pull mode) */
+ GST_INFO_OBJECT (parse, "Enable (1) : mp3parse->http_seek_flag");
+ } else {
+ GST_INFO_OBJECT (parse, "Disable (0) : mp3parse->http_seek_flag");
+ }
+#endif
+
return TRUE;
}
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+static void
+gst_mpeg_audio_parse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (object);
+ GST_INFO_OBJECT (mp3parse, "set_property() START- prop_id(%d)", prop_id);
+ switch (prop_id) {
+ case PROP_CHECK_HTTP_SEEK:
+ mp3parse->http_seek_flag = g_value_get_boolean (value);
+ GST_INFO_OBJECT (mp3parse, "http_seek_flag(%d)",
+ mp3parse->http_seek_flag);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_mpeg_audio_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (object);
+ GST_INFO_OBJECT (mp3parse, "get_property() START- prop_id(%d)", prop_id);
+ switch (prop_id) {
+ case PROP_CHECK_HTTP_SEEK:
+ g_value_set_boolean (value, mp3parse->http_seek_flag);
+ GST_INFO_OBJECT (mp3parse, "http_seek_flag(%d)",
+ mp3parse->http_seek_flag);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+#endif
+
static gboolean
gst_mpeg_audio_parse_stop (GstBaseParse * parse)
{
return res;
}
+
+#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
+/**
+ * gst_mpeg_audio_parse_src_eventfunc:
+ * @parse: #GstBaseParse. #event
+ *
+ * before baseparse handles seek event, check any mode and flag.
+ *
+ * Returns: TRUE on success.
+ */
+static gboolean
+gst_mpeg_audio_parse_src_eventfunc (GstBaseParse * parse, GstEvent * event)
+{
+ gboolean handled = FALSE;
+ GstMpegAudioParse *mp3parse;
+ mp3parse = GST_MPEG_AUDIO_PARSE (parse);
+
+ GST_DEBUG_OBJECT (parse, "handling event %d, %s", GST_EVENT_TYPE (event),
+ GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ GST_INFO_OBJECT (mp3parse, "GST_EVENT_SEEK enter");
+ if (mp3parse->http_seek_flag) {
+ GST_INFO_OBJECT (mp3parse,
+ "souphttpsrc is PULL MODE (so accurate seek mode is OFF)");
+ /* Check the declaration of this function in the baseparse */
+ gst_base_parse_set_seek_mode (parse, 0);
+ goto mp3_seek_null_exit;
+ }
+ GST_INFO_OBJECT (mp3parse, "GST_EVENT_SEEK leave");
+ break;
+ }
+ default:
+ break;
+ }
+
+mp3_seek_null_exit:
+ /* call baseparse src_event function to handle event */
+ handled = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+
+ return handled;
+}
+#endif
"audio/mpeg, mpegversion = (int) 4, stream-format = (string) raw, framed = (boolean) TRUE; "
"audio/x-nellymoser, channels = (int) { 1, 2 }, rate = (int) { 5512, 8000, 11025, 16000, 22050, 44100 }; "
"audio/x-raw, format = (string) { U8, S16LE }, layout = (string) interleaved, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
- "audio/x-alaw, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
- "audio/x-mulaw, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
+ "audio/x-alaw, channels = (int) { 1, 2 }, rate = (int) 8000; "
+ "audio/x-mulaw, channels = (int) { 1, 2 }, rate = (int) 8000; "
"audio/x-speex, channels = (int) 1, rate = (int) 16000;")
);
} else if (!strcmp (tag_name, "title")) {
gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
GST_TAG_TITLE, s, NULL);
- } else if (!strcmp (tag_name, "metadatacreator")) {
+ } else if (!strcmp (tag_name, "metadatacreator")
+ || !strcmp (tag_name, "encoder")) {
gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
GST_TAG_ENCODER, s, NULL);
} else {
GstCaps *caps = NULL, *old_caps;
gboolean ret = FALSE;
guint adjusted_rate = rate;
+ guint adjusted_channels = channels;
GstEvent *event;
gchar *stream_id;
} else {
adjusted_rate = rate;
}
+
+ adjusted_channels =
+ gst_codec_utils_aac_get_channels (map.data, map.size);
+
+ if (adjusted_channels && (channels != adjusted_channels)) {
+ GST_LOG_OBJECT (demux, "Ajusting AAC channels %d -> %d", channels,
+ adjusted_channels);
+ } else {
+ adjusted_channels = channels;
+ }
}
gst_buffer_unmap (demux->audio_codec_data, &map);
}
gst_caps_set_simple (caps, "rate", G_TYPE_INT, adjusted_rate,
- "channels", G_TYPE_INT, channels, NULL);
+ "channels", G_TYPE_INT, adjusted_channels, NULL);
if (demux->audio_codec_data) {
gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER,
gst_flv_demux_push_src_event (demux,
gst_event_new_tag (gst_tag_list_copy (demux->taglist)));
+#ifdef TIZEN_FEATURE_FLVDEMUX_MODIFICATION
+ GST_DEBUG_OBJECT (demux, "post tag msg %" GST_PTR_FORMAT, demux->taglist);
+
+ /* post message flv tag (for early recive application) */
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_tag (GST_OBJECT_CAST (demux),
+ gst_tag_list_copy (demux->taglist)));
+#endif
+
if (demux->audio_pad) {
GST_DEBUG_OBJECT (demux->audio_pad, "pushing audio %" GST_PTR_FORMAT,
demux->audio_tags);
}
/* codec tags with special rates */
- if (codec_tag == 5 || codec_tag == 14)
+ if (codec_tag == 5 || codec_tag == 14 || codec_tag == 7 || codec_tag == 8)
rate = 8000;
else if ((codec_tag == 4) || (codec_tag == 11))
rate = 16000;
goto beach;
}
- gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
- demux->par_x, demux->par_y, NULL);
+ if (demux->got_par) {
+ gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ demux->par_x, demux->par_y, NULL);
+ }
if (G_LIKELY (demux->w)) {
gst_caps_set_simple (caps, "width", G_TYPE_INT, demux->w, NULL);
cts = GST_READ_UINT24_BE (data + 9);
cts = (cts + 0xff800000) ^ 0xff800000;
+ if (cts < 0 && ABS (cts) > dts) {
+ GST_ERROR_OBJECT (demux, "Detected a negative composition time offset "
+ "'%d' that would lead to negative PTS, fixing", cts);
+ cts += ABS (cts) - dts;
+ }
+
GST_LOG_OBJECT (demux, "got cts %d", cts);
}
switch (avc_packet_type) {
case 0:
{
+ if (demux->tag_data_size < codec_data) {
+ GST_ERROR_OBJECT (demux, "Got invalid H.264 codec, ignoring.");
+ break;
+ }
+
/* AVCDecoderConfigurationRecord data */
GST_LOG_OBJECT (demux, "got an H.264 codec data packet");
if (demux->video_codec_data) {
gst_buffer_unref (buffer);
buffer = NULL;
+ if (G_UNLIKELY (offset < tag_size))
+ goto exit;
+
offset -= tag_size;
if (GST_FLOW_OK != gst_flv_demux_pull_range (demux, demux->sinkpad, offset,
12, &buffer))
}
/* pause if something went wrong or at end */
- if (G_UNLIKELY (ret != GST_FLOW_OK))
+ if (G_UNLIKELY (ret != GST_FLOW_OK) && !(ret == GST_FLOW_NOT_LINKED
+ && !demux->no_more_pads))
goto pause;
gst_object_unref (demux);
#define FOURCC_avc1 GST_MAKE_FOURCC('a','v','c','1')
#define FOURCC_avc3 GST_MAKE_FOURCC('a','v','c','3')
#define FOURCC_avcC GST_MAKE_FOURCC('a','v','c','C')
+ #define FOURCC_c608 GST_MAKE_FOURCC('c','6','0','8')
+ #define FOURCC_c708 GST_MAKE_FOURCC('c','7','0','8')
+ #define FOURCC_ccdp GST_MAKE_FOURCC('c','c','d','p')
+ #define FOURCC_cdat GST_MAKE_FOURCC('c','d','a','t')
+ #define FOURCC_cdt2 GST_MAKE_FOURCC('c','d','t','2')
+ #define FOURCC_clcp GST_MAKE_FOURCC('c','l','c','p')
#define FOURCC_clip GST_MAKE_FOURCC('c','l','i','p')
#define FOURCC_cmov GST_MAKE_FOURCC('c','m','o','v')
#define FOURCC_cmvd GST_MAKE_FOURCC('c','m','v','d')
#define FOURCC_fiel GST_MAKE_FOURCC('f','i','e','l')
#define FOURCC_pcst GST_MAKE_FOURCC('p','c','s','t')
#define FOURCC_pgap GST_MAKE_FOURCC('p','g','a','p')
+ #define FOURCC_png GST_MAKE_FOURCC('p','n','g',' ')
#define FOURCC_pnot GST_MAKE_FOURCC('p','n','o','t')
#define FOURCC_qt__ GST_MAKE_FOURCC('q','t',' ',' ')
#define FOURCC_qtim GST_MAKE_FOURCC('q','t','i','m')
#define FOURCC_sbtl GST_MAKE_FOURCC('s','b','t','l')
#define FOURCC_sdp_ GST_MAKE_FOURCC('s','d','p',' ')
#define FOURCC_sidx GST_MAKE_FOURCC('s','i','d','x')
+ #define FOURCC_skip GST_MAKE_FOURCC('s','k','i','p')
#define FOURCC_smhd GST_MAKE_FOURCC('s','m','h','d')
#define FOURCC_soaa GST_MAKE_FOURCC('s','o','a','a')
#define FOURCC_soal GST_MAKE_FOURCC('s','o','a','l')
#define FOURCC_vc_1 GST_MAKE_FOURCC('v','c','-','1')
#define FOURCC_vide GST_MAKE_FOURCC('v','i','d','e')
#define FOURCC_vmhd GST_MAKE_FOURCC('v','m','h','d')
+ #define FOURCC_vp08 GST_MAKE_FOURCC('v','p','0','8')
+ #define FOURCC_vp09 GST_MAKE_FOURCC('v','p','0','9')
#define FOURCC_xvid GST_MAKE_FOURCC('x','v','i','d')
#define FOURCC_wave GST_MAKE_FOURCC('w','a','v','e')
#define FOURCC_wide GST_MAKE_FOURCC('w','i','d','e')
#define FOURCC_zlib GST_MAKE_FOURCC('z','l','i','b')
+ #define FOURCC_lpcm GST_MAKE_FOURCC('l','p','c','m')
+ #define FOURCC_av01 GST_MAKE_FOURCC('a','v','0','1')
+ #define FOURCC_av1C GST_MAKE_FOURCC('a','v','1','C')
+ #define FOURCC_av1f GST_MAKE_FOURCC('a','v','1','f')
+ #define FOURCC_av1m GST_MAKE_FOURCC('a','v','1','m')
+ #define FOURCC_av1s GST_MAKE_FOURCC('a','v','1','s')
+ #define FOURCC_av1M GST_MAKE_FOURCC('a','v','1','M')
#define FOURCC_cfhd GST_MAKE_FOURCC('C','F','H','D')
#define FOURCC_ap4x GST_MAKE_FOURCC('a','p','4','x')
#define FOURCC_tenc GST_MAKE_FOURCC('t','e','n','c')
#define FOURCC_cenc GST_MAKE_FOURCC('c','e','n','c')
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+/* Spatial Audio */
+#define FOURCC_SA3D GST_MAKE_FOURCC('S','A','3','D')
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
G_END_DECLS
#endif /* __FOURCC_H__ */
* The fragmented file features defined (only) in ISO Base Media are used by
* ISMV files making up (a.o.) Smooth Streaming (ismlmux).
*
- * A few properties (#GstQTMux:movie-timescale, #GstQTMux:trak-timescale) allow
- * adjusting some technical parameters, which might be useful in (rare) cases to
- * resolve compatibility issues in some situations.
+ * A few properties (#GstQTMux:movie-timescale, #GstQTMux:trak-timescale,
+ * #GstQTMuxPad:trak-timescale) allow adjusting some technical parameters,
+ * which might be useful in (rare) cases to resolve compatibility issues in
+ * some situations.
*
* Some other properties influence the result more fundamentally.
* A typical mov/mp4 file's metadata (aka moov) is located at the end of the
* #GstQTMux::reserved-duration-remaining property to see how close to full
* the reserved space is becoming.
*
+ * Applications that wish to be able to use/edit a file while it is being
+ * written to by live content, can use the "Robust Prefill Muxing" mode. That
+ * mode is a variant of the "Robust Muxing" mode in that it will pre-allocate a
+ * completely valid header from the start for all tracks (i.e. it appears as
+ * though the file is "reserved-max-duration" long with all samples
+ * present). This mode can be enabled by setting the
+ * #GstQTMux::reserved-moov-update-period and #GstQTMux::reserved-prefill
+ * properties. Note that this mode is only possible with input streams that have
+ * a fixed sample size (such as raw audio and Prores Video) and that don't
+ * have reordered samples.
+ *
* <refsect2>
* <title>Example pipelines</title>
* |[
GST_DEBUG_CATEGORY_STATIC (gst_qt_mux_debug);
#define GST_CAT_DEFAULT gst_qt_mux_debug
+ #ifndef ABSDIFF
+ #define ABSDIFF(a, b) ((a) > (b) ? (a) - (b) : (b) - (a))
+ #endif
+
/* Hacker notes.
*
* The basic building blocks of MP4 files are:
(gst_qt_mux_dts_method_get_type ())
#endif
+ enum
+ {
+ PROP_PAD_0,
+ PROP_PAD_TRAK_TIMESCALE,
+ };
+
+ #define DEFAULT_PAD_TRAK_TIMESCALE 0
+
+ GType gst_qt_mux_pad_get_type (void);
+
+ #define GST_TYPE_QT_MUX_PAD \
+ (gst_qt_mux_pad_get_type())
+ #define GST_QT_MUX_PAD(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_QT_MUX_PAD, GstQTMuxPad))
+ #define GST_QT_MUX_PAD_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_QT_MUX_PAD, GstQTMuxPadClass))
+ #define GST_IS_QT_MUX_PAD(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_QT_MUX_PAD))
+ #define GST_IS_QT_MUX_PAD_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_QT_MUX_PAD))
+ #define GST_QT_MUX_PAD_CAST(obj) \
+ ((GstQTMuxPad *)(obj))
+
+ typedef struct _GstQTMuxPad GstQTMuxPad;
+ typedef struct _GstQTMuxPadClass GstQTMuxPadClass;
+
+ struct _GstQTMuxPad
+ {
+ GstPad parent;
+
+ guint32 trak_timescale;
+ };
+
+ struct _GstQTMuxPadClass
+ {
+ GstPadClass parent;
+ };
+
+ G_DEFINE_TYPE (GstQTMuxPad, gst_qt_mux_pad, GST_TYPE_PAD);
+
+ static void
+ gst_qt_mux_pad_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+ GstQTMuxPad *pad = GST_QT_MUX_PAD_CAST (object);
+
+ GST_OBJECT_LOCK (pad);
+ switch (prop_id) {
+ case PROP_PAD_TRAK_TIMESCALE:
+ pad->trak_timescale = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (pad);
+ }
+
+ static void
+ gst_qt_mux_pad_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+ GstQTMuxPad *pad = GST_QT_MUX_PAD_CAST (object);
+
+ GST_OBJECT_LOCK (pad);
+ switch (prop_id) {
+ case PROP_PAD_TRAK_TIMESCALE:
+ g_value_set_uint (value, pad->trak_timescale);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (pad);
+ }
+
+ static void
+ gst_qt_mux_pad_class_init (GstQTMuxPadClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ gobject_class->get_property = gst_qt_mux_pad_get_property;
+ gobject_class->set_property = gst_qt_mux_pad_set_property;
+
+ g_object_class_install_property (gobject_class, PROP_PAD_TRAK_TIMESCALE,
+ g_param_spec_uint ("trak-timescale", "Track timescale",
+ "Timescale to use for this pad's trak (units per second, 0 is automatic)",
+ 0, G_MAXUINT32, DEFAULT_PAD_TRAK_TIMESCALE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ }
+
+ static void
+ gst_qt_mux_pad_init (GstQTMuxPad * pad)
+ {
+ pad->trak_timescale = DEFAULT_PAD_TRAK_TIMESCALE;
+ }
+
+ static guint32
+ gst_qt_mux_pad_get_timescale (GstQTMuxPad * pad)
+ {
+ guint32 timescale;
+
+ GST_OBJECT_LOCK (pad);
+ timescale = pad->trak_timescale;
+ GST_OBJECT_UNLOCK (pad);
+
+ return timescale;
+ }
+
/* QTMux signals and args */
enum
{
PROP_RESERVED_DURATION_REMAINING,
PROP_RESERVED_MOOV_UPDATE_PERIOD,
PROP_RESERVED_BYTES_PER_SEC,
+ PROP_RESERVED_PREFILL,
#ifndef GST_REMOVE_DEPRECATED
PROP_DTS_METHOD,
#endif
PROP_DO_CTTS,
PROP_INTERLEAVE_BYTES,
PROP_INTERLEAVE_TIME,
- #endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+ PROP_MAX_RAW_AUDIO_DRIFT,
+ PROP_START_GAP_THRESHOLD,
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ PROP_EXPECTED_TRAILER_SIZE,
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
};
/* some spare for header size as well */
#define DEFAULT_RESERVED_MAX_DURATION GST_CLOCK_TIME_NONE
#define DEFAULT_RESERVED_MOOV_UPDATE_PERIOD GST_CLOCK_TIME_NONE
#define DEFAULT_RESERVED_BYTES_PER_SEC_PER_TRAK 550
+ #define DEFAULT_RESERVED_PREFILL FALSE
#define DEFAULT_INTERLEAVE_BYTES 0
#define DEFAULT_INTERLEAVE_TIME 250*GST_MSECOND
+ #define DEFAULT_MAX_RAW_AUDIO_DRIFT 40 * GST_MSECOND
+ #define DEFAULT_START_GAP_THRESHOLD 0
static void gst_qt_mux_finalize (GObject * object);
static GstFlowReturn
gst_qt_mux_robust_recording_rewrite_moov (GstQTMux * qtmux);
+ static void gst_qt_mux_update_global_statistics (GstQTMux * qtmux);
+ static void gst_qt_mux_update_edit_lists (GstQTMux * qtmux);
+
static GstElementClass *parent_class = NULL;
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+/*
+ [[ Metadata Size ]]
+ 1. Common
+ free = 8
+ moov = 8
+ mvhd = 108
+ -------------
+ total : 124
+
+ 2. Video
+ i. Video common
+ trak = 8
+ tkhd = 92
+ mdia = 8
+ mdhd = 32
+ hdlr = 45
+ minf = 8
+ vmhd = 20
+ dinf = 36 (8, dref : 16 , url : 12)
+ stbl = 8
+ ---------------
+ total : 257
+
+ ii. Variation in file format
+ - MP4
+ ftyp = 32
+ udta = 61
+ - 3GP
+ ftyp = 28
+ udta = 8
+
+ iii. Variation in codec
+ - MPEG4
+ stsd = 137(16, mp4v : 86, esds : 35)
+
+ - H.264 = 487(or 489) + (8*stts_count) + (8*frame) + (4*I-frame)
+ stsd = 134 (SPS 9, PPS 4) or 136 (SPS 111, PPS 4)
+
+ - H.263 = 470 + + (8*stts_count) + (8*frame) + (4*I-frame)
+ stsd = 102 -> different from H.264
+
+ iv. Variation in frame
+ stts = 16 + (8*stts_count)
+ stss = 16 + (4*I-frame)
+ stsc = 28
+ stsz = 20 + (4*frame)
+ stco = 16 + (4*frame)
+
+ 3. Audio
+ i. Audio common
+ trak = 8
+ tkhd = 92
+ mdia = 8
+ mdhd = 32
+ hdlr = 45
+ minf = 8
+ smhd = 16
+ dinf = 36 (8, dref : 16, url : 12)
+ stbl = 8
+ ---------------
+ total : 253
+
+ stts = 16
+ stsz = 20
+ stco = 16
+ ------------
+ total : 52
+
+ ii. Variation in file format
+ - MP4
+ udta = 61
+ - 3GP
+ udta = 8
+
+ iii. Variation in codec
+ - Common
+ stts = 16 + (8*stts_count)
+ stsc = 28
+ stsz = 20 + (4*frame)
+ stco = 16 + (4*frame)
+
+ - AAC
+ stsd = 94 (16, mp4a : 78(36 ,esds : 42))
+
+ - AMR
+ stsd = 69 (16, samr : 53(36, damr : 17))
+*/
+
+/* trailer entry size */
+#define ENTRY_SIZE_VIDEO_STTS 8
+#define ENTRY_SIZE_VIDEO_STSS 4
+#define ENTRY_SIZE_VIDEO_STSZ 4
+#define ENTRY_SIZE_VIDEO_STCO 4
+#define ENTRY_SIZE_AUDIO_STTS 8
+#define ENTRY_SIZE_AUDIO_STSZ 4
+#define ENTRY_SIZE_AUDIO_STCO 4
+
+#define ENTRY_SIZE_VIDEO_MPEG4_STSD 137
+#define ENTRY_SIZE_VIDEO_H263P_STSD 102
+#define ENTRY_SIZE_AUDIO_AAC_STSD 94
+#define ENTRY_SIZE_AUDIO_AMR_STSD 69
+
+#define ENTRY_SIZE_STSC 28
+#define ENTRY_SIZE_VIDEO_ST 68 /*atom size (stss + stts + stsc + stsz + stco ) * (size + atom + version + flags + sample count)+stsz(sample size) */
+#define ENTRY_SIZE_AUDIO_ST 52 /*atom size (stss + stsc + stsz + stco ) * (size + atom + version + flags + sample count)+stsz(sample size) */
+
+/* common */
+#define MUX_COMMON_SIZE_HEADER 124 /* free + moov + moov.mvhd*/
+
+#define MUX_COMMON_SIZE_VIDEO_HEADER 257
+#define MUX_COMMON_SIZE_AUDIO_HEADER 253
+
+#define MUX_COMMON_SIZE_MP4_FTYP 32
+#define MUX_COMMON_SIZE_3GP_FTYP 28
+
+#define MUX_COMMON_SIZE_MP4_UDTA 61
+#define MUX_COMMON_SIZE_3GP_UDTA 8
+
+static void
+gst_qt_mux_update_expected_trailer_size (GstQTMux *qtmux, GstQTPad *pad)
+{
+ guint nb_video_frames = 0;
+ guint nb_video_i_frames = 0;
+ guint nb_video_stts_entry = 0;
+ guint nb_audio_frames = 0;
+ guint nb_audio_stts_entry = 0;
+ gboolean video_stream = FALSE;
+ gboolean audio_stream = FALSE;
+ guint exp_size = 0;
+ GstQTMuxClass *qtmux_klass = NULL;
+
+ if (qtmux == NULL || pad == NULL) {
+ GST_ERROR_OBJECT (qtmux, "Invalid parameter");
+ return;
+ }
+
+ qtmux_klass = (GstQTMuxClass *)(G_OBJECT_GET_CLASS(qtmux));
+
+ if (!strncmp(GST_PAD_NAME(pad->collect.pad), "video", 5)) {
+ nb_video_frames += pad->trak->mdia.minf.stbl.stsz.table_size;
+ nb_video_i_frames += pad->trak->mdia.minf.stbl.stss.entries.len;
+ nb_video_stts_entry += pad->trak->mdia.minf.stbl.stts.entries.len;
+
+ video_stream = TRUE;
+ } else if (!strncmp(GST_PAD_NAME(pad->collect.pad), "audio", 5)) {
+ nb_audio_frames += pad->trak->mdia.minf.stbl.stsz.table_size;
+ nb_audio_stts_entry += pad->trak->mdia.minf.stbl.stts.entries.len;
+
+ audio_stream = TRUE;
+ }
+
+ /* free + moov + mvhd */
+ qtmux->expected_trailer_size = MUX_COMMON_SIZE_HEADER;
+
+ /* ftyp + udta * 3 (There is 3 udta fields and it's same size) */
+ switch (qtmux_klass->format) {
+ case GST_QT_MUX_FORMAT_MP4:
+ qtmux->expected_trailer_size += MUX_COMMON_SIZE_MP4_FTYP + MUX_COMMON_SIZE_MP4_UDTA * 3;
+ break;
+ case GST_QT_MUX_FORMAT_3GP:
+ qtmux->expected_trailer_size += MUX_COMMON_SIZE_3GP_FTYP + MUX_COMMON_SIZE_3GP_UDTA * 3;
+ break;
+ default:
+ break;
+ }
+
+ /* Calculate trailer size for video stream */
+ if (video_stream) {
+ switch (pad->fourcc) {
+ case FOURCC_h263:
+ case FOURCC_s263:
+ exp_size += MUX_COMMON_SIZE_VIDEO_HEADER + ENTRY_SIZE_VIDEO_H263P_STSD;
+ break;
+ case FOURCC_mp4v:
+ case FOURCC_MP4V:
+ case FOURCC_fmp4:
+ case FOURCC_FMP4:
+ case FOURCC_3gp4:
+ case FOURCC_3gp6:
+ case FOURCC_3gg6:
+ exp_size += MUX_COMMON_SIZE_VIDEO_HEADER + ENTRY_SIZE_VIDEO_MPEG4_STSD;
+ break;
+ default:
+ break;
+ }
+
+ /* frame related */
+ exp_size += ENTRY_SIZE_VIDEO_ST + (ENTRY_SIZE_VIDEO_STTS * nb_video_stts_entry) +
+ (ENTRY_SIZE_VIDEO_STSS * nb_video_i_frames) + (ENTRY_SIZE_STSC) +
+ ((ENTRY_SIZE_VIDEO_STSZ + ENTRY_SIZE_VIDEO_STCO) * nb_video_frames);
+
+ qtmux->video_expected_trailer_size = exp_size;
+ }
+
+ /* Calculate trailer size for audio stream */
+ if (audio_stream) {
+ exp_size += MUX_COMMON_SIZE_AUDIO_HEADER + ENTRY_SIZE_AUDIO_ST + (ENTRY_SIZE_AUDIO_STTS * nb_audio_stts_entry) +
+ (ENTRY_SIZE_STSC) + ((ENTRY_SIZE_AUDIO_STSZ + ENTRY_SIZE_AUDIO_STCO) * nb_audio_frames);
+
+ if (pad->fourcc == FOURCC_samr)
+ exp_size += ENTRY_SIZE_AUDIO_AMR_STSD;
+ else
+ exp_size += ENTRY_SIZE_AUDIO_AAC_STSD;
+
+ qtmux->audio_expected_trailer_size = exp_size;
+ }
+
+ qtmux->expected_trailer_size += qtmux->video_expected_trailer_size + qtmux->audio_expected_trailer_size;
+
+ /*
+ GST_INFO_OBJECT (qtmux, "pad type %s", GST_PAD_NAME(pad->collect.pad));
+ GST_INFO_OBJECT (qtmux, "VIDEO : stts-entry=[%d], i-frame=[%d], video-sample=[%d]", nb_video_stts_entry, nb_video_i_frames, nb_video_frames);
+ GST_INFO_OBJECT (qtmux, "AUDIO : stts-entry=[%d], audio-sample=[%d]", nb_audio_stts_entry, nb_audio_frames);
+ GST_INFO_OBJECT (qtmux, "expected trailer size %d", qtmux->expected_trailer_size);
+ */
+
+ return;
+}
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+
static void
gst_qt_mux_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstQTMuxClass *klass = (GstQTMuxClass *) g_class;
GstQTMuxClassParams *params;
- GstPadTemplate *videosinktempl, *audiosinktempl, *subtitlesinktempl;
+ GstPadTemplate *videosinktempl, *audiosinktempl, *subtitlesinktempl,
+ *captionsinktempl;
GstPadTemplate *srctempl;
gchar *longname, *description;
gst_element_class_add_pad_template (element_class, srctempl);
if (params->audio_sink_caps) {
- audiosinktempl = gst_pad_template_new ("audio_%u",
- GST_PAD_SINK, GST_PAD_REQUEST, params->audio_sink_caps);
+ audiosinktempl = gst_pad_template_new_with_gtype ("audio_%u",
+ GST_PAD_SINK, GST_PAD_REQUEST, params->audio_sink_caps,
+ GST_TYPE_QT_MUX_PAD);
gst_element_class_add_pad_template (element_class, audiosinktempl);
}
if (params->video_sink_caps) {
- videosinktempl = gst_pad_template_new ("video_%u",
- GST_PAD_SINK, GST_PAD_REQUEST, params->video_sink_caps);
+ videosinktempl = gst_pad_template_new_with_gtype ("video_%u",
+ GST_PAD_SINK, GST_PAD_REQUEST, params->video_sink_caps,
+ GST_TYPE_QT_MUX_PAD);
gst_element_class_add_pad_template (element_class, videosinktempl);
}
if (params->subtitle_sink_caps) {
- subtitlesinktempl = gst_pad_template_new ("subtitle_%u",
- GST_PAD_SINK, GST_PAD_REQUEST, params->subtitle_sink_caps);
+ subtitlesinktempl = gst_pad_template_new_with_gtype ("subtitle_%u",
+ GST_PAD_SINK, GST_PAD_REQUEST, params->subtitle_sink_caps,
+ GST_TYPE_QT_MUX_PAD);
gst_element_class_add_pad_template (element_class, subtitlesinktempl);
}
+ if (params->caption_sink_caps) {
+ captionsinktempl = gst_pad_template_new_with_gtype ("caption_%u",
+ GST_PAD_SINK, GST_PAD_REQUEST, params->caption_sink_caps,
+ GST_TYPE_QT_MUX_PAD);
+ gst_element_class_add_pad_template (element_class, captionsinktempl);
+ }
+
klass->format = params->prop->format;
}
GParamFlags streamable_flags;
const gchar *streamable_desc;
gboolean streamable;
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ GParamSpec *tspec = NULL;
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
#define STREAMABLE_DESC "If set to true, the output should be as if it is to "\
"be streamed and hence no indexes written or duration written."
"Multiplier for converting reserved-max-duration into bytes of header to reserve, per second, per track",
0, 10000, DEFAULT_RESERVED_BYTES_PER_SEC_PER_TRAK,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RESERVED_PREFILL,
+ g_param_spec_boolean ("reserved-prefill",
+ "Reserved Prefill Samples Table",
+ "Prefill samples table of reserved duration",
+ DEFAULT_RESERVED_PREFILL,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INTERLEAVE_BYTES,
g_param_spec_uint64 ("interleave-bytes", "Interleave (bytes)",
"Interleave between streams in bytes",
"Interleave between streams in nanoseconds",
0, G_MAXUINT64, DEFAULT_INTERLEAVE_TIME,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MAX_RAW_AUDIO_DRIFT,
+ g_param_spec_uint64 ("max-raw-audio-drift", "Max Raw Audio Drift",
+ "Maximum allowed drift of raw audio samples vs. timestamps in nanoseconds",
+ 0, G_MAXUINT64, DEFAULT_MAX_RAW_AUDIO_DRIFT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_START_GAP_THRESHOLD,
+ g_param_spec_uint64 ("start-gap-threshold", "Start Gap Threshold",
+ "Threshold for creating an edit list for gaps at the start in nanoseconds",
+ 0, G_MAXUINT64, DEFAULT_START_GAP_THRESHOLD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ tspec = g_param_spec_uint("expected-trailer-size", "Expected Trailer Size",
+ "Expected trailer size (bytes)",
+ 0, G_MAXUINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS);
+ if (tspec)
+ g_object_class_install_property(gobject_class, PROP_EXPECTED_TRAILER_SIZE, tspec);
+ else
+ GST_ERROR("g_param_spec failed for \"expected-trailer-size\"");
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+
gstelement_class->request_new_pad =
GST_DEBUG_FUNCPTR (gst_qt_mux_request_new_pad);
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_qt_mux_change_state);
qtpad->sample_size = 0;
qtpad->sync = FALSE;
qtpad->last_dts = 0;
+ qtpad->sample_offset = 0;
qtpad->dts_adjustment = GST_CLOCK_TIME_NONE;
qtpad->first_ts = GST_CLOCK_TIME_NONE;
qtpad->first_dts = GST_CLOCK_TIME_NONE;
qtpad->traf = NULL;
}
atom_array_clear (&qtpad->fragment_buffers);
+ if (qtpad->samples)
+ g_array_unref (qtpad->samples);
+ qtpad->samples = NULL;
/* reference owned elsewhere */
qtpad->tfra = NULL;
if (qtpad->first_tc)
gst_video_time_code_free (qtpad->first_tc);
qtpad->first_tc = NULL;
+
+ if (qtpad->raw_audio_adapter)
+ gst_object_unref (qtpad->raw_audio_adapter);
+ qtpad->raw_audio_adapter = NULL;
}
/*
if (alloc) {
qtmux->moov = atom_moov_new (qtmux->context);
+#ifndef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
/* ensure all is as nice and fresh as request_new_pad would provide it */
for (walk = qtmux->sinkpads; walk; walk = g_slist_next (walk)) {
GstQTPad *qtpad = (GstQTPad *) walk->data;
qtpad->trak = atom_trak_new (qtmux->context);
atom_moov_add_trak (qtmux->moov, qtpad->trak);
}
+#endif
}
qtmux->current_pad = NULL;
qtmux->last_moov_update = GST_CLOCK_TIME_NONE;
qtmux->muxed_since_last_update = 0;
qtmux->reserved_duration_remaining = GST_CLOCK_TIME_NONE;
+
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ qtmux->expected_trailer_size = 0;
+ qtmux->video_expected_trailer_size = 0;
+ qtmux->audio_expected_trailer_size = 0;
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
}
static void
DEFAULT_RESERVED_BYTES_PER_SEC_PER_TRAK;
qtmux->interleave_bytes = DEFAULT_INTERLEAVE_BYTES;
qtmux->interleave_time = DEFAULT_INTERLEAVE_TIME;
+ qtmux->max_raw_audio_drift = DEFAULT_MAX_RAW_AUDIO_DRIFT;
+ qtmux->start_gap_threshold = DEFAULT_START_GAP_THRESHOLD;
/* always need this */
qtmux->context =
return newbuf;
}
+ static gsize
+ extract_608_field_from_s334_1a (const guint8 * ccdata, gsize ccdata_size,
+ guint field, guint8 ** res)
+ {
+ guint8 *storage;
+ gsize storage_size = 128;
+ gsize i, res_size = 0;
+
+ storage = g_malloc0 (storage_size);
+
+ /* Iterate over the ccdata and put the corresponding tuples for the given field
+ * in the storage */
+ for (i = 0; i < ccdata_size; i += 3) {
+ if ((field == 1 && (ccdata[i * 3] & 0x80)) ||
+ (field == 2 && !(ccdata[i * 3] & 0x80))) {
+ GST_DEBUG ("Storing matching cc for field %d : 0x%02x 0x%02x", field,
+ ccdata[i * 3 + 1], ccdata[i * 3 + 2]);
+ if (res_size >= storage_size) {
+ storage_size += 128;
+ storage = g_realloc (storage, storage_size);
+ }
+ storage[res_size] = ccdata[i * 3 + 1];
+ storage[res_size + 1] = ccdata[i * 3 + 2];
+ res_size += 2;
+ }
+ }
+
+ if (res_size == 0) {
+ g_free (storage);
+ *res = NULL;
+ return 0;
+ }
+
+ *res = storage;
+ return res_size;
+ }
+
+
+ static GstBuffer *
+ gst_qt_mux_prepare_caption_buffer (GstQTPad * qtpad, GstBuffer * buf,
+ GstQTMux * qtmux)
+ {
+ GstBuffer *newbuf = NULL;
+ GstMapInfo map, inmap;
+ gsize size;
+ gboolean in_prefill;
+
+ if (buf == NULL)
+ return NULL;
+
+ in_prefill = (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL);
+
+ size = gst_buffer_get_size (buf);
+ gst_buffer_map (buf, &inmap, GST_MAP_READ);
+
+ GST_LOG_OBJECT (qtmux,
+ "Preparing caption buffer %" GST_FOURCC_FORMAT " size:%" G_GSIZE_FORMAT,
+ GST_FOURCC_ARGS (qtpad->fourcc), size);
+
+ switch (qtpad->fourcc) {
+ case FOURCC_c608:
+ {
+ guint8 *cdat, *cdt2;
+ gsize cdat_size, cdt2_size, total_size = 0;
+ gsize write_offs = 0;
+
+ cdat_size =
+ extract_608_field_from_s334_1a (inmap.data, inmap.size, 1, &cdat);
+ cdt2_size =
+ extract_608_field_from_s334_1a (inmap.data, inmap.size, 2, &cdt2);
+
+ if (cdat_size)
+ total_size += cdat_size + 8;
+ if (cdt2_size)
+ total_size += cdt2_size + 8;
+ if (total_size == 0) {
+ GST_DEBUG_OBJECT (qtmux, "No 608 data ?");
+ /* FIXME : We might want to *always* store something, even if
+ * it's "empty" CC (i.e. 0x80 0x80) */
+ break;
+ }
+
+ newbuf = gst_buffer_new_and_alloc (in_prefill ? 20 : total_size);
+ /* Let's copy over all metadata and not the memory */
+ gst_buffer_copy_into (newbuf, buf, GST_BUFFER_COPY_METADATA, 0, size);
+
+ gst_buffer_map (newbuf, &map, GST_MAP_WRITE);
+ if (cdat_size || in_prefill) {
+ GST_WRITE_UINT32_BE (map.data, in_prefill ? 10 : cdat_size + 8);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_cdat);
+ if (cdat_size)
+ memcpy (map.data + 8, cdat, in_prefill ? 2 : cdat_size);
+ else {
+ /* Write 'empty' CC */
+ map.data[8] = 0x80;
+ map.data[9] = 0x80;
+ }
+ write_offs = in_prefill ? 10 : cdat_size + 8;
+ if (cdat_size)
+ g_free (cdat);
+ }
+
+ if (cdt2_size || in_prefill) {
+ GST_WRITE_UINT32_BE (map.data + write_offs,
+ in_prefill ? 10 : cdt2_size + 8);
+ GST_WRITE_UINT32_LE (map.data + write_offs + 4, FOURCC_cdt2);
+ if (cdt2_size)
+ memcpy (map.data + write_offs + 8, cdt2, in_prefill ? 2 : cdt2_size);
+ else {
+ /* Write 'empty' CC */
+ map.data[write_offs + 8] = 0x80;
+ map.data[write_offs + 9] = 0x80;
+ }
+ if (cdt2_size)
+ g_free (cdt2);
+ }
+ gst_buffer_unmap (newbuf, &map);
+ break;
+ }
+ break;
+ case FOURCC_c708:
+ {
+ /* Take the whole CDP */
+ if (in_prefill && size > 256) {
+ GST_ERROR_OBJECT (qtmux, "Input C708 CDP too big for prefill mode !");
+ break;
+ }
+ newbuf = gst_buffer_new_and_alloc (in_prefill ? 256 + 8 : size + 8);
+
+ /* Let's copy over all metadata and not the memory */
+ gst_buffer_copy_into (newbuf, buf, GST_BUFFER_COPY_METADATA, 0, size);
+
+ gst_buffer_map (newbuf, &map, GST_MAP_WRITE);
+
+ GST_WRITE_UINT32_BE (map.data, size + 8);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_ccdp);
+ memcpy (map.data + 8, inmap.data, inmap.size);
+
+ gst_buffer_unmap (newbuf, &map);
+ break;
+ }
+ default:
+ /* theoretically this should never happen, but let's keep this here in case */
+ GST_WARNING_OBJECT (qtmux, "Unknown caption format");
+ break;
+ }
+
+ gst_buffer_unmap (buf, &inmap);
+ gst_buffer_unref (buf);
+
+ return newbuf;
+ }
+
static GstBuffer *
gst_qt_mux_prepare_tx3g_buffer (GstQTPad * qtpad, GstBuffer * buf,
GstQTMux * qtmux)
qtmux->moov_recov_file = NULL;
}
+ static guint64
+ prefill_get_block_index (GstQTMux * qtmux, GstQTPad * qpad)
+ {
+ switch (qpad->fourcc) {
+ case FOURCC_apch:
+ case FOURCC_apcn:
+ case FOURCC_apcs:
+ case FOURCC_apco:
+ case FOURCC_ap4h:
+ case FOURCC_ap4x:
+ case FOURCC_c608:
+ case FOURCC_c708:
+ return qpad->sample_offset;
+ case FOURCC_sowt:
+ case FOURCC_twos:
+ return gst_util_uint64_scale_ceil (qpad->sample_offset,
+ qpad->expected_sample_duration_n,
+ qpad->expected_sample_duration_d *
+ atom_trak_get_timescale (qpad->trak));
+ default:
+ return -1;
+ }
+ }
+
+ static guint
+ prefill_get_sample_size (GstQTMux * qtmux, GstQTPad * qpad)
+ {
+ switch (qpad->fourcc) {
+ case FOURCC_apch:
+ if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 480) {
+ return 300000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 576) {
+ return 350000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 720) {
+ return 525000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 1080) {
+ return 1050000;
+ } else {
+ return 4150000;
+ }
+ break;
+ case FOURCC_apcn:
+ if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 480) {
+ return 200000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 576) {
+ return 250000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 720) {
+ return 350000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 1080) {
+ return 700000;
+ } else {
+ return 2800000;
+ }
+ break;
+ case FOURCC_apcs:
+ if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 480) {
+ return 150000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 576) {
+ return 200000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 720) {
+ return 250000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 1080) {
+ return 500000;
+ } else {
+ return 2800000;
+ }
+ break;
+ case FOURCC_apco:
+ if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 480) {
+ return 80000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 576) {
+ return 100000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 720) {
+ return 150000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 1080) {
+ return 250000;
+ } else {
+ return 900000;
+ }
+ break;
+ case FOURCC_c608:
+ /* We always write both cdat and cdt2 atom in prefill mode */
+ return 20;
+ case FOURCC_c708:
+ /* We're cheating a bit by always allocating 256 bytes plus 8 bytes for the atom header
+ * even if we use less */
+ return 256 + 8;
+ case FOURCC_sowt:
+ case FOURCC_twos:{
+ guint64 block_idx;
+ guint64 next_sample_offset;
+
+ block_idx = prefill_get_block_index (qtmux, qpad);
+ next_sample_offset =
+ gst_util_uint64_scale (block_idx + 1,
+ qpad->expected_sample_duration_d *
+ atom_trak_get_timescale (qpad->trak),
+ qpad->expected_sample_duration_n);
+
+ return (next_sample_offset - qpad->sample_offset) * qpad->sample_size;
+ }
+ case FOURCC_ap4h:
+ case FOURCC_ap4x:
+ default:
+ GST_ERROR_OBJECT (qtmux, "unsupported codec for pre-filling");
+ return -1;
+ }
+
+ return -1;
+ }
+
+ static GstClockTime
+ prefill_get_next_timestamp (GstQTMux * qtmux, GstQTPad * qpad)
+ {
+ switch (qpad->fourcc) {
+ case FOURCC_apch:
+ case FOURCC_apcn:
+ case FOURCC_apcs:
+ case FOURCC_apco:
+ case FOURCC_ap4h:
+ case FOURCC_ap4x:
+ case FOURCC_c608:
+ case FOURCC_c708:
+ return gst_util_uint64_scale (qpad->sample_offset + 1,
+ qpad->expected_sample_duration_d * GST_SECOND,
+ qpad->expected_sample_duration_n);
+ case FOURCC_sowt:
+ case FOURCC_twos:{
+ guint64 block_idx;
+ guint64 next_sample_offset;
+
+ block_idx = prefill_get_block_index (qtmux, qpad);
+ next_sample_offset =
+ gst_util_uint64_scale (block_idx + 1,
+ qpad->expected_sample_duration_d *
+ atom_trak_get_timescale (qpad->trak),
+ qpad->expected_sample_duration_n);
+
+ return gst_util_uint64_scale (next_sample_offset, GST_SECOND,
+ atom_trak_get_timescale (qpad->trak));
+ }
+ default:
+ GST_ERROR_OBJECT (qtmux, "unsupported codec for pre-filling");
+ return -1;
+ }
+
+ return -1;
+ }
+
+ static GstBuffer *
+ prefill_raw_audio_prepare_buf_func (GstQTPad * qtpad, GstBuffer * buf,
+ GstQTMux * qtmux)
+ {
+ guint64 block_idx;
+ guint64 nsamples;
+ GstClockTime input_timestamp;
+ guint64 input_timestamp_distance;
+
+ if (buf)
+ gst_adapter_push (qtpad->raw_audio_adapter, buf);
+
+ block_idx = gst_util_uint64_scale_ceil (qtpad->raw_audio_adapter_offset,
+ qtpad->expected_sample_duration_n,
+ qtpad->expected_sample_duration_d *
+ atom_trak_get_timescale (qtpad->trak));
+ nsamples =
+ gst_util_uint64_scale (block_idx + 1,
+ qtpad->expected_sample_duration_d * atom_trak_get_timescale (qtpad->trak),
+ qtpad->expected_sample_duration_n) - qtpad->raw_audio_adapter_offset;
+
+ if ((!GST_COLLECT_PADS_STATE_IS_SET (&qtpad->collect,
+ GST_COLLECT_PADS_STATE_EOS)
+ && gst_adapter_available (qtpad->raw_audio_adapter) <
+ nsamples * qtpad->sample_size)
+ || gst_adapter_available (qtpad->raw_audio_adapter) == 0) {
+ return NULL;
+ }
+
+ input_timestamp =
+ gst_adapter_prev_pts (qtpad->raw_audio_adapter,
+ &input_timestamp_distance);
+ if (input_timestamp != GST_CLOCK_TIME_NONE)
+ input_timestamp +=
+ gst_util_uint64_scale (input_timestamp_distance, GST_SECOND,
+ qtpad->sample_size * atom_trak_get_timescale (qtpad->trak));
+
+ buf =
+ gst_adapter_take_buffer (qtpad->raw_audio_adapter,
+ !GST_COLLECT_PADS_STATE_IS_SET (&qtpad->collect,
+ GST_COLLECT_PADS_STATE_EOS) ? nsamples *
+ qtpad->sample_size : gst_adapter_available (qtpad->raw_audio_adapter));
+ GST_BUFFER_PTS (buf) = input_timestamp;
+ GST_BUFFER_DTS (buf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DURATION (buf) = GST_CLOCK_TIME_NONE;
+
+ qtpad->raw_audio_adapter_offset += nsamples;
+
+ /* Check if we have yet another block of raw audio in the adapter */
+ nsamples =
+ gst_util_uint64_scale (block_idx + 2,
+ qtpad->expected_sample_duration_d * atom_trak_get_timescale (qtpad->trak),
+ qtpad->expected_sample_duration_n) - qtpad->raw_audio_adapter_offset;
+ if (gst_adapter_available (qtpad->raw_audio_adapter) >=
+ nsamples * qtpad->sample_size) {
+ input_timestamp =
+ gst_adapter_prev_pts (qtpad->raw_audio_adapter,
+ &input_timestamp_distance);
+ if (input_timestamp != GST_CLOCK_TIME_NONE)
+ input_timestamp +=
+ gst_util_uint64_scale (input_timestamp_distance, GST_SECOND,
+ qtpad->sample_size * atom_trak_get_timescale (qtpad->trak));
+ qtpad->raw_audio_adapter_pts = input_timestamp;
+ } else {
+ qtpad->raw_audio_adapter_pts = GST_CLOCK_TIME_NONE;
+ }
+
+ return buf;
+ }
+
+ static void
+ find_video_sample_duration (GstQTMux * qtmux, guint * dur_n, guint * dur_d)
+ {
+ GSList *walk;
+
+ /* Find the (first) video track and assume that we have to output
+ * in that size */
+ for (walk = qtmux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstCollectData *cdata = (GstCollectData *) walk->data;
+ GstQTPad *tmp_qpad = (GstQTPad *) cdata;
+
+ if (tmp_qpad->trak->is_video) {
+ *dur_n = tmp_qpad->expected_sample_duration_n;
+ *dur_d = tmp_qpad->expected_sample_duration_d;
+ break;
+ }
+ }
+
+ if (walk == NULL) {
+ GST_INFO_OBJECT (qtmux,
+ "Found no video framerate, using 40ms audio buffers");
+ *dur_n = 25;
+ *dur_d = 1;
+ }
+ }
+
+ /* Called when all pads are prerolled to adjust and */
+ static gboolean
+ prefill_update_sample_size (GstQTMux * qtmux, GstQTPad * qpad)
+ {
+ switch (qpad->fourcc) {
+ case FOURCC_apch:
+ case FOURCC_apcn:
+ case FOURCC_apcs:
+ case FOURCC_apco:
+ case FOURCC_ap4h:
+ case FOURCC_ap4x:
+ {
+ guint sample_size = prefill_get_sample_size (qtmux, qpad);
+ atom_trak_set_constant_size_samples (qpad->trak, sample_size);
+ return TRUE;
+ }
+ case FOURCC_c608:
+ case FOURCC_c708:
+ {
+ guint sample_size = prefill_get_sample_size (qtmux, qpad);
+ /* We need a "valid" duration */
+ find_video_sample_duration (qtmux, &qpad->expected_sample_duration_n,
+ &qpad->expected_sample_duration_d);
+ atom_trak_set_constant_size_samples (qpad->trak, sample_size);
+ return TRUE;
+ }
+ case FOURCC_sowt:
+ case FOURCC_twos:{
+ find_video_sample_duration (qtmux, &qpad->expected_sample_duration_n,
+ &qpad->expected_sample_duration_d);
+ /* Set a prepare_buf_func that ensures this */
+ qpad->prepare_buf_func = prefill_raw_audio_prepare_buf_func;
+ qpad->raw_audio_adapter = gst_adapter_new ();
+ qpad->raw_audio_adapter_offset = 0;
+ qpad->raw_audio_adapter_pts = GST_CLOCK_TIME_NONE;
+
+ return TRUE;
+ }
+ default:
+ return TRUE;
+ }
+ }
+
+ /* Only called at startup when doing the "fake" iteration of all tracks in order
+ * to prefill the sample tables in the header. */
+ static GstQTPad *
+ find_best_pad_prefill_start (GstQTMux * qtmux)
+ {
+ GSList *walk;
+ GstQTPad *best_pad = NULL;
+
+ /* If interleave limits have been specified and the current pad is within
+ * those interleave limits, pick that one, otherwise let's try to figure out
+ * the next best one. */
+ if (qtmux->current_pad &&
+ (qtmux->interleave_bytes != 0 || qtmux->interleave_time != 0) &&
+ (qtmux->interleave_bytes == 0
+ || qtmux->current_chunk_size <= qtmux->interleave_bytes)
+ && (qtmux->interleave_time == 0
+ || qtmux->current_chunk_duration <= qtmux->interleave_time)
+ && qtmux->mux_mode != GST_QT_MUX_MODE_FRAGMENTED
+ && qtmux->mux_mode != GST_QT_MUX_MODE_FRAGMENTED_STREAMABLE) {
+
+ if (qtmux->current_pad->total_duration < qtmux->reserved_max_duration) {
+ best_pad = qtmux->current_pad;
+ }
+ } else if (qtmux->collect->data->next) {
+ /* Attempt to try another pad if we have one. Otherwise use the only pad
+ * present */
+ best_pad = qtmux->current_pad = NULL;
+ }
+
+ /* The next best pad is the one which has the lowest timestamp and hasn't
+ * exceeded the reserved max duration */
+ if (!best_pad) {
+ GstClockTime best_time = GST_CLOCK_TIME_NONE;
+
+ for (walk = qtmux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstCollectData *cdata = (GstCollectData *) walk->data;
+ GstQTPad *qtpad = (GstQTPad *) cdata;
+ GstClockTime timestamp;
+
+ if (qtpad->total_duration >= qtmux->reserved_max_duration)
+ continue;
+
+ timestamp = qtpad->total_duration;
+
+ if (best_pad == NULL ||
+ !GST_CLOCK_TIME_IS_VALID (best_time) || timestamp < best_time) {
+ best_pad = qtpad;
+ best_time = timestamp;
+ }
+ }
+ }
+
+ return best_pad;
+ }
+
+ /* Called when starting the file in prefill_mode to figure out all the entries
+ * of the header based on the input stream and reserved maximum duration.
+ *
+ * The _actual_ header (i.e. with the proper duration and trimmed sample tables)
+ * will be updated and written on EOS. */
+ static gboolean
+ gst_qt_mux_prefill_samples (GstQTMux * qtmux)
+ {
+ GstQTPad *qpad;
+ GSList *walk;
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+
+ /* Update expected sample sizes/durations as needed, this is for raw
+ * audio where samples are actual audio samples. */
+ for (walk = qtmux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstCollectData *cdata = (GstCollectData *) walk->data;
+ GstQTPad *qpad = (GstQTPad *) cdata;
+
+ if (!prefill_update_sample_size (qtmux, qpad))
+ return FALSE;
+ }
+
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_QT) {
+ /* For the first sample check/update timecode as needed. We do that before
+ * all actual samples as the code in gst_qt_mux_add_buffer() does it with
+ * initial buffer directly, not with last_buf */
+ for (walk = qtmux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstCollectData *cdata = (GstCollectData *) walk->data;
+ GstQTPad *qpad = (GstQTPad *) cdata;
+ GstBuffer *buffer =
+ gst_collect_pads_peek (qtmux->collect, (GstCollectData *) qpad);
+ GstVideoTimeCodeMeta *tc_meta;
+
+ if (buffer && (tc_meta = gst_buffer_get_video_time_code_meta (buffer))
+ && qpad->trak->is_video) {
+ GstVideoTimeCode *tc = &tc_meta->tc;
+
+ qpad->tc_trak = atom_trak_new (qtmux->context);
+ atom_moov_add_trak (qtmux->moov, qpad->tc_trak);
+
+ qpad->trak->tref = atom_tref_new (FOURCC_tmcd);
+ atom_tref_add_entry (qpad->trak->tref, qpad->tc_trak->tkhd.track_ID);
+
+ atom_trak_set_timecode_type (qpad->tc_trak, qtmux->context,
+ qpad->trak->mdia.mdhd.time_info.timescale, tc);
+
+ atom_trak_add_samples (qpad->tc_trak, 1, 1, 4,
+ qtmux->mdat_size, FALSE, 0);
+
+ qpad->tc_pos = qtmux->mdat_size;
+ qpad->first_tc = gst_video_time_code_copy (tc);
+ qpad->first_pts = GST_BUFFER_PTS (buffer);
+
+ qtmux->current_chunk_offset = -1;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ qtmux->mdat_size += 4;
+ }
+ if (buffer)
+ gst_buffer_unref (buffer);
+ }
+ }
+
+ while ((qpad = find_best_pad_prefill_start (qtmux))) {
+ GstClockTime timestamp, next_timestamp, duration;
+ guint nsamples, sample_size;
+ guint64 chunk_offset;
+ gint64 scaled_duration;
+ gint64 pts_offset = 0;
+ gboolean sync = FALSE;
+ TrakBufferEntryInfo sample_entry;
+
+ sample_size = prefill_get_sample_size (qtmux, qpad);
+
+ if (sample_size == -1) {
+ return FALSE;
+ }
+
+ if (!qpad->samples)
+ qpad->samples = g_array_new (FALSE, FALSE, sizeof (TrakBufferEntryInfo));
+
+ timestamp = qpad->total_duration;
+ next_timestamp = prefill_get_next_timestamp (qtmux, qpad);
+ duration = next_timestamp - timestamp;
+
+ if (qpad->first_ts == GST_CLOCK_TIME_NONE)
+ qpad->first_ts = timestamp;
+ if (qpad->first_dts == GST_CLOCK_TIME_NONE)
+ qpad->first_dts = timestamp;
+
+ if (qtmux->current_pad != qpad || qtmux->current_chunk_offset == -1) {
+ qtmux->current_pad = qpad;
+ if (qtmux->current_chunk_offset == -1)
+ qtmux->current_chunk_offset = qtmux->mdat_size;
+ else
+ qtmux->current_chunk_offset += qtmux->current_chunk_size;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ }
+ if (qpad->sample_size)
+ nsamples = sample_size / qpad->sample_size;
+ else
+ nsamples = 1;
+ qpad->last_dts = timestamp;
+ scaled_duration = gst_util_uint64_scale_round (timestamp + duration,
+ atom_trak_get_timescale (qpad->trak),
+ GST_SECOND) - gst_util_uint64_scale_round (timestamp,
+ atom_trak_get_timescale (qpad->trak), GST_SECOND);
+
+ qtmux->current_chunk_size += sample_size;
+ qtmux->current_chunk_duration += duration;
+ qpad->total_bytes += sample_size;
+
+ chunk_offset = qtmux->current_chunk_offset;
+
+ /* I-frame only, no frame reordering */
+ sync = FALSE;
+ pts_offset = 0;
+
+ if (qtmux->current_chunk_duration > qtmux->longest_chunk
+ || !GST_CLOCK_TIME_IS_VALID (qtmux->longest_chunk)) {
+ qtmux->longest_chunk = qtmux->current_chunk_duration;
+ }
+
+ sample_entry.track_id = qpad->trak->tkhd.track_ID;
+ sample_entry.nsamples = nsamples;
+ sample_entry.delta = scaled_duration / nsamples;
+ sample_entry.size = sample_size / nsamples;
+ sample_entry.chunk_offset = chunk_offset;
+ sample_entry.pts_offset = pts_offset;
+ sample_entry.sync = sync;
+ sample_entry.do_pts = TRUE;
+ g_array_append_val (qpad->samples, sample_entry);
+ atom_trak_add_samples (qpad->trak, nsamples, scaled_duration / nsamples,
+ sample_size / nsamples, chunk_offset, sync, pts_offset);
+
+ qpad->total_duration = next_timestamp;
+ qtmux->mdat_size += sample_size;
+ qpad->sample_offset += nsamples;
+ }
+
+ return TRUE;
+ }
+
static GstFlowReturn
gst_qt_mux_start_file (GstQTMux * qtmux)
{
gchar s_id[32];
GstClockTime reserved_max_duration;
guint reserved_bytes_per_sec_per_trak;
+ GSList *walk;
GST_DEBUG_OBJECT (qtmux, "starting file");
} else if (qtmux->fast_start) {
qtmux->mux_mode = GST_QT_MUX_MODE_FAST_START;
} else if (reserved_max_duration != GST_CLOCK_TIME_NONE) {
- qtmux->mux_mode = GST_QT_MUX_MODE_ROBUST_RECORDING;
+ if (qtmux->reserved_prefill)
+ qtmux->mux_mode = GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL;
+ else
+ qtmux->mux_mode = GST_QT_MUX_MODE_ROBUST_RECORDING;
}
switch (qtmux->mux_mode) {
(NULL));
return GST_FLOW_ERROR;
}
+ if (qtmux->reserved_moov_update_period == GST_CLOCK_TIME_NONE) {
+ GST_WARNING_OBJECT (qtmux,
+ "Robust muxing requires reserved-moov-update-period to be set");
+ }
break;
case GST_QT_MUX_MODE_FAST_START:
case GST_QT_MUX_MODE_FRAGMENTED_STREAMABLE:
g_object_notify (G_OBJECT (qtmux), "streamable");
}
break;
+ case GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL:
+ if (!gst_qt_mux_downstream_is_seekable (qtmux)) {
+ GST_WARNING_OBJECT (qtmux,
+ "downstream is not seekable, will not be able "
+ "to trim samples table at the end if less than reserved-duration is "
+ "recorded");
+ }
+ break;
}
/* let downstream know we think in BYTES and expect to do seeking later on */
qtmux->timescale = suggested_timescale;
}
+ /* Set width/height/timescale of any closed caption tracks to that of the
+ * first video track */
+ {
+ guint video_width = 0, video_height = 0;
+ guint32 video_timescale = 0;
+ GSList *walk;
+
+ for (walk = qtmux->sinkpads; walk; walk = g_slist_next (walk)) {
+ GstCollectData *cdata = (GstCollectData *) walk->data;
+ GstQTPad *qpad = (GstQTPad *) cdata;
+
+ if (!qpad->trak)
+ continue;
+
+ /* Not closed caption */
+ if (qpad->trak->mdia.hdlr.handler_type != FOURCC_clcp)
+ continue;
+
+ if (video_width == 0 || video_height == 0 || video_timescale == 0) {
+ GSList *walk2;
+
+ for (walk2 = qtmux->sinkpads; walk2; walk2 = g_slist_next (walk2)) {
+ GstCollectData *cdata2 = (GstCollectData *) walk2->data;
+ GstQTPad *qpad2 = (GstQTPad *) cdata2;
+
+ if (!qpad2->trak)
+ continue;
+
+ /* not video */
+ if (!qpad2->trak->mdia.minf.vmhd)
+ continue;
+
+ video_width = qpad2->trak->tkhd.width;
+ video_height = qpad2->trak->tkhd.height;
+ video_timescale = qpad2->trak->mdia.mdhd.time_info.timescale;
+ }
+ }
+
+ qpad->trak->tkhd.width = video_width << 16;
+ qpad->trak->tkhd.height = video_height << 16;
+ qpad->trak->mdia.mdhd.time_info.timescale = video_timescale;
+ }
+ }
+
/* initialize our moov recovery file */
if (qtmux->moov_recov_file_path) {
gst_qt_mux_prepare_moov_recovery (qtmux);
FALSE);
break;
case GST_QT_MUX_MODE_ROBUST_RECORDING:
-
ret = gst_qt_mux_prepare_and_send_ftyp (qtmux);
if (ret != GST_FLOW_OK)
break;
gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size, 0, TRUE,
FALSE);
break;
+ case GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL:
+ ret = gst_qt_mux_prepare_and_send_ftyp (qtmux);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ /* Store this as the moov offset for later updating.
+ * We record mdat position below */
+ qtmux->moov_pos = qtmux->header_size;
+
+ if (!gst_qt_mux_prefill_samples (qtmux)) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("Unsupported codecs or configuration for prefill mode"), (NULL));
+
+ return GST_FLOW_ERROR;
+ }
+
+ gst_qt_mux_update_global_statistics (qtmux);
+ gst_qt_mux_configure_moov (qtmux);
+ gst_qt_mux_update_edit_lists (qtmux);
+ gst_qt_mux_setup_metadata (qtmux);
+
+ /* Moov header with pre-filled samples */
+ ret = gst_qt_mux_send_moov (qtmux, &qtmux->header_size, 0, FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* last_moov_size now contains the full size of the moov, moov_pos the
+ * position. This allows us to rewrite it in the very end as needed */
+ qtmux->reserved_moov_size =
+ qtmux->last_moov_size + 12 * g_slist_length (qtmux->sinkpads) + 8;
+
+ /* Send an additional free atom at the end so we definitely have space
+ * to rewrite the moov header at the end and remove the samples that
+ * were not actually written */
+ ret =
+ gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size,
+ 12 * g_slist_length (qtmux->sinkpads) + 8, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* extra atoms go after the free/moov(s), before the mdat */
+ ret =
+ gst_qt_mux_send_extra_atoms (qtmux, TRUE, &qtmux->header_size, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ qtmux->mdat_pos = qtmux->header_size;
+
+ /* And now send the mdat header */
+ ret =
+ gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size,
+ qtmux->mdat_size, TRUE, FALSE);
+
+ /* chunks position is set relative to the first byte of the
+ * MDAT atom payload. Set the overall offset into the file */
+ atom_moov_chunks_set_offset (qtmux->moov, qtmux->header_size);
+
+ {
+ GstSegment segment;
+
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = qtmux->moov_pos;
+ gst_pad_push_event (qtmux->srcpad, gst_event_new_segment (&segment));
+
+ ret = gst_qt_mux_send_moov (qtmux, NULL, 0, FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ segment.start = qtmux->header_size;
+ gst_pad_push_event (qtmux->srcpad, gst_event_new_segment (&segment));
+ }
+
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ qtmux->current_chunk_offset = -1;
+ qtmux->mdat_size = 0;
+ qtmux->current_pad = NULL;
+ qtmux->longest_chunk = GST_CLOCK_TIME_NONE;
+
+ for (walk = qtmux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstCollectData *cdata = (GstCollectData *) walk->data;
+ GstQTPad *qtpad = (GstQTPad *) cdata;
+
+ qtpad->total_bytes = 0;
+ qtpad->total_duration = 0;
+ qtpad->first_dts = qtpad->first_ts = GST_CLOCK_TIME_NONE;
+ qtpad->last_dts = GST_CLOCK_TIME_NONE;
+ qtpad->sample_offset = 0;
+ }
+
+ break;
case GST_QT_MUX_MODE_FAST_START:
GST_OBJECT_LOCK (qtmux);
qtmux->fast_start_file = g_fopen (qtmux->fast_start_file_path, "wb+");
/* having flushed above, can check for buffers now */
if (GST_CLOCK_TIME_IS_VALID (qtpad->first_ts)) {
+ GstClockTime first_pts_in = qtpad->first_ts;
+ /* it should be, since we got first_ts by adding adjustment
+ * to a positive incoming PTS */
+ if (qtpad->dts_adjustment <= first_pts_in)
+ first_pts_in -= qtpad->dts_adjustment;
/* determine max stream duration */
if (!GST_CLOCK_TIME_IS_VALID (qtmux->last_dts)
|| qtpad->last_dts > qtmux->last_dts) {
qtmux->last_dts = qtpad->last_dts;
}
if (!GST_CLOCK_TIME_IS_VALID (qtmux->first_ts)
- || qtpad->first_ts < qtmux->first_ts) {
- qtmux->first_ts = qtpad->first_ts;
+ || first_pts_in < qtmux->first_ts) {
+ /* we need the original incoming PTS here, as this first_ts
+ * is used in update_edit_lists to construct the edit list that arrange
+ * for sync'ed streams. The first_ts is most likely obtained from
+ * some (audio) stream with 0 dts_adjustment and initial 0 PTS,
+ * so it makes no difference, though it matters in other cases */
+ qtmux->first_ts = first_pts_in;
}
}
has_gap = (qtpad->first_ts > (qtmux->first_ts + qtpad->dts_adjustment));
if (has_gap) {
- GstClockTime diff;
+ GstClockTime diff, trak_lateness;
diff = qtpad->first_ts - (qtmux->first_ts + qtpad->dts_adjustment);
lateness = gst_util_uint64_scale_round (diff,
qtmux->timescale, GST_SECOND);
- if (lateness > 0) {
+ /* Allow up to 1 trak timescale unit of lateness, Such a small
+ * timestamp/duration can't be represented by the trak-specific parts
+ * of the headers anyway, so it's irrelevantly small */
+ trak_lateness = gst_util_uint64_scale (diff,
+ atom_trak_get_timescale (qtpad->trak), GST_SECOND);
+
+ if (trak_lateness > 0 && diff > qtmux->start_gap_threshold) {
GST_DEBUG_OBJECT (qtmux,
"Pad %s is a late stream by %" GST_TIME_FORMAT,
GST_PAD_NAME (qtpad->collect.pad), GST_TIME_ARGS (diff));
* mvhd should be consistent with empty moov
* (but TODO maybe some clients do not handle that well ?) */
qtmux->moov->mvex.mehd.fragment_duration =
- gst_util_uint64_scale (qtmux->last_dts, qtmux->timescale, GST_SECOND);
- GST_DEBUG_OBJECT (qtmux, "rewriting moov with mvex duration %"
- GST_TIME_FORMAT, GST_TIME_ARGS (qtmux->last_dts));
+ gst_util_uint64_scale_round (qtmux->last_dts, qtmux->timescale,
+ GST_SECOND);
+ GST_DEBUG_OBJECT (qtmux,
+ "rewriting moov with mvex duration %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (qtmux->last_dts));
/* seek and rewrite the header */
gst_segment_init (&segment, GST_FORMAT_BYTES);
segment.start = qtmux->moov_pos;
return gst_qt_mux_update_mdat_size (qtmux, qtmux->mdat_pos,
qtmux->mdat_size, NULL, TRUE);
}
+ case GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL:{
+ GSList *walk;
+ guint32 next_track_id = qtmux->moov->mvhd.next_track_id;
+
+ for (walk = qtmux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstCollectData *cdata = (GstCollectData *) walk->data;
+ GstQTPad *qpad = (GstQTPad *) cdata;
+ guint64 block_idx;
+ AtomSTBL *stbl = &qpad->trak->mdia.minf.stbl;
+
+ /* Get the block index of the last sample we wrote, not of the next
+ * sample we would write */
+ block_idx = prefill_get_block_index (qtmux, qpad);
+
+ /* stts */
+ if (block_idx > 0) {
+ STTSEntry *entry;
+ guint64 nsamples = 0;
+ gint i, n;
+
+ n = atom_array_get_len (&stbl->stts.entries);
+ for (i = 0; i < n; i++) {
+ entry = &atom_array_index (&stbl->stts.entries, i);
+ if (nsamples + entry->sample_count >= qpad->sample_offset) {
+ entry->sample_count = qpad->sample_offset - nsamples;
+ stbl->stts.entries.len = i + 1;
+ break;
+ }
+ nsamples += entry->sample_count;
+ }
+ g_assert (i < n);
+ } else {
+ stbl->stts.entries.len = 0;
+ }
+
+ /* stsz */
+ {
+ g_assert (stbl->stsz.entries.len == 0);
+ stbl->stsz.table_size = qpad->sample_offset;
+ }
+
+ /* stco/stsc */
+ {
+ gint i, n;
+ guint64 nsamples = 0;
+ gint chunk_index = 0;
+ const TrakBufferEntryInfo *sample_entry;
+
+ if (block_idx > 0) {
+ sample_entry =
+ &g_array_index (qpad->samples, TrakBufferEntryInfo,
+ block_idx - 1);
+
+ n = stbl->stco64.entries.len;
+ for (i = 0; i < n; i++) {
+ guint64 *entry = &atom_array_index (&stbl->stco64.entries, i);
+
+ if (*entry == sample_entry->chunk_offset) {
+ stbl->stco64.entries.len = i + 1;
+ chunk_index = i + 1;
+ break;
+ }
+ }
+ g_assert (i < n);
+ g_assert (chunk_index > 0);
+
+ n = stbl->stsc.entries.len;
+ for (i = 0; i < n; i++) {
+ STSCEntry *entry = &atom_array_index (&stbl->stsc.entries, i);
+
+ if (entry->first_chunk >= chunk_index)
+ break;
+
+ if (i > 0) {
+ nsamples +=
+ (entry->first_chunk - atom_array_index (&stbl->stsc.entries,
+ i -
+ 1).first_chunk) * atom_array_index (&stbl->stsc.entries,
+ i - 1).samples_per_chunk;
+ }
+ }
+ g_assert (i <= n);
+
+ if (i > 0) {
+ STSCEntry *prev_entry =
+ &atom_array_index (&stbl->stsc.entries, i - 1);
+ nsamples +=
+ (chunk_index -
+ prev_entry->first_chunk) * prev_entry->samples_per_chunk;
+ if (qpad->sample_offset - nsamples > 0) {
+ stbl->stsc.entries.len = i;
+ atom_stsc_add_new_entry (&stbl->stsc, chunk_index,
+ qpad->sample_offset - nsamples);
+ } else {
+ stbl->stsc.entries.len = i;
+ stbl->stco64.entries.len--;
+ }
+ } else {
+ /* Everything in a single chunk */
+ stbl->stsc.entries.len = 0;
+ atom_stsc_add_new_entry (&stbl->stsc, chunk_index,
+ qpad->sample_offset);
+ }
+ } else {
+ stbl->stco64.entries.len = 0;
+ stbl->stsc.entries.len = 0;
+ }
+ }
+
+ {
+ GList *walk2;
+
+ for (walk2 = qtmux->moov->mvex.trexs; walk2; walk2 = walk2->next) {
+ AtomTREX *trex = walk2->data;
+
+ if (trex->track_ID == qpad->trak->tkhd.track_ID) {
+ trex->track_ID = next_track_id;
+ break;
+ }
+ }
+
+ qpad->trak->tkhd.track_ID = next_track_id++;
+ }
+ }
+ qtmux->moov->mvhd.next_track_id = next_track_id;
+
+ gst_qt_mux_update_global_statistics (qtmux);
+ gst_qt_mux_configure_moov (qtmux);
+
+ gst_qt_mux_update_edit_lists (qtmux);
+
+ /* Check if any gap edit lists were added. We don't have any space
+ * reserved for this in the moov and the pre-finalized moov would have
+ * broken A/V synchronization. Error out here now
+ */
+ for (walk = qtmux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstCollectData *cdata = (GstCollectData *) walk->data;
+ GstQTPad *qpad = (GstQTPad *) cdata;
+
+ if (qpad->trak->edts
+ && g_slist_length (qpad->trak->edts->elst.entries) > 1) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Can't support gaps in prefill mode"));
+
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ gst_qt_mux_setup_metadata (qtmux);
+ atom_moov_chunks_set_offset (qtmux->moov, qtmux->header_size);
+
+ {
+ GstSegment segment;
+
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = qtmux->moov_pos;
+ gst_pad_push_event (qtmux->srcpad, gst_event_new_segment (&segment));
+
+ ret =
+ gst_qt_mux_send_moov (qtmux, NULL, qtmux->reserved_moov_size, FALSE,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ if (qtmux->reserved_moov_size > qtmux->last_moov_size) {
+ ret =
+ gst_qt_mux_send_free_atom (qtmux, NULL,
+ qtmux->reserved_moov_size - qtmux->last_moov_size, TRUE);
+ }
+
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+
+ ret = gst_qt_mux_update_mdat_size (qtmux, qtmux->mdat_pos,
+ qtmux->mdat_size, NULL, FALSE);
+ return ret;
+ }
default:
break;
}
guint64 mdat_offset = qtmux->mdat_pos + 16 + qtmux->mdat_size;
GST_OBJECT_LOCK (qtmux);
+
+ /* Update the offset of how much we've muxed, so the
+ * report of remaining space keeps counting down */
+ if (position > qtmux->last_moov_update &&
+ position - qtmux->last_moov_update > qtmux->muxed_since_last_update) {
+ GST_LOG_OBJECT (qtmux,
+ "Muxed time %" G_GUINT64_FORMAT " since last moov update",
+ qtmux->muxed_since_last_update);
+ qtmux->muxed_since_last_update = position - qtmux->last_moov_update;
+ }
+
+ /* Next, check if we're supposed to send periodic moov updates downstream */
if (qtmux->reserved_moov_update_period == GST_CLOCK_TIME_NONE) {
GST_OBJECT_UNLOCK (qtmux);
return GST_FLOW_OK;
(position <= qtmux->last_moov_update ||
(position - qtmux->last_moov_update) <
qtmux->reserved_moov_update_period)) {
- /* Update the offset of how much we've muxed, so the
- * report of remaining space keeps counting down */
- if (position > qtmux->last_moov_update &&
- position - qtmux->last_moov_update > qtmux->muxed_since_last_update) {
- GST_LOG_OBJECT (qtmux,
- "Muxed time %" G_GUINT64_FORMAT " since last moov update",
- qtmux->muxed_since_last_update);
- qtmux->muxed_since_last_update = position - qtmux->last_moov_update;
- }
GST_OBJECT_UNLOCK (qtmux);
return GST_FLOW_OK; /* No update needed yet */
}
}
switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL:{
+ const TrakBufferEntryInfo *sample_entry;
+ guint64 block_idx = prefill_get_block_index (qtmux, pad);
+
+ if (block_idx >= pad->samples->len) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Unexpected sample %" G_GUINT64_FORMAT ", expected up to %u",
+ block_idx, pad->samples->len));
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ /* Check if all values are as expected */
+ sample_entry =
+ &g_array_index (pad->samples, TrakBufferEntryInfo, block_idx);
+
+ /* Allow +/- 1 difference for the scaled_duration to allow
+ * for some rounding errors
+ */
+ if (sample_entry->nsamples != nsamples
+ || ABSDIFF (sample_entry->delta, scaled_duration) > 1
+ || sample_entry->size != sample_size
+ || sample_entry->chunk_offset != chunk_offset
+ || sample_entry->pts_offset != pts_offset
+ || sample_entry->sync != sync) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Unexpected values in sample %" G_GUINT64_FORMAT,
+ pad->sample_offset + 1));
+ GST_ERROR_OBJECT (qtmux, "Expected: samples %u, delta %u, size %u, "
+ "chunk offset %" G_GUINT64_FORMAT ", "
+ "pts offset %" G_GUINT64_FORMAT ", sync %d",
+ sample_entry->nsamples,
+ sample_entry->delta,
+ sample_entry->size,
+ sample_entry->chunk_offset,
+ sample_entry->pts_offset, sample_entry->sync);
+ GST_ERROR_OBJECT (qtmux, "Got: samples %u, delta %u, size %u, "
+ "chunk offset %" G_GUINT64_FORMAT ", "
+ "pts offset %" G_GUINT64_FORMAT ", sync %d",
+ nsamples,
+ (guint) scaled_duration,
+ sample_size, chunk_offset, pts_offset, sync);
+
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ ret = gst_qt_mux_send_buffer (qtmux, buffer, &qtmux->mdat_size, TRUE);
+ break;
+ }
case GST_QT_MUX_MODE_MOOV_AT_END:
case GST_QT_MUX_MODE_FAST_START:
case GST_QT_MUX_MODE_ROBUST_RECORDING:
return ret;
}
+ static void
+ gst_qt_mux_register_buffer_in_chunk (GstQTMux * qtmux, GstQTPad * pad,
+ guint buffer_size, GstClockTime duration)
+ {
+ /* not that much happens here,
+ * but updating any of this very likely needs to happen all in sync,
+ * unless there is a very good reason not to */
+
+ /* for computing the avg bitrate */
+ pad->total_bytes += buffer_size;
+ pad->total_duration += duration;
+ /* for keeping track of where we are in chunk;
+ * ensures that data really is located as recorded in atoms */
+ qtmux->current_chunk_size += buffer_size;
+ qtmux->current_chunk_duration += duration;
+ }
+
static GstFlowReturn
gst_qt_mux_check_and_update_timecode (GstQTMux * qtmux, GstQTPad * pad,
GstBuffer * buf, GstFlowReturn ret)
guint32 frames_since_daily_jam;
GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ if (!pad->trak->is_video)
+ return ret;
+
if (qtmux_klass->format != GST_QT_MUX_FORMAT_QT)
return ret;
g_free (tc_str);
#endif
g_assert (pad->tc_trak == NULL);
- tc_buf = gst_buffer_new_allocate (NULL, 4, NULL);
pad->first_tc = gst_video_time_code_copy (tc);
/* If frames are out of order, the frame we're currently getting might
* not be the first one. Just write a 0 timecode for now and wait
pad->trak->tref = atom_tref_new (FOURCC_tmcd);
atom_tref_add_entry (pad->trak->tref, pad->tc_trak->tkhd.track_ID);
- atom_trak_set_timecode_type (pad->tc_trak, qtmux->context, pad->first_tc);
+ atom_trak_set_timecode_type (pad->tc_trak, qtmux->context,
+ pad->trak->mdia.mdhd.time_info.timescale, pad->first_tc);
+ tc_buf = gst_buffer_new_allocate (NULL, 4, NULL);
szret = gst_buffer_fill (tc_buf, 0, &frames_since_daily_jam, 4);
g_assert (szret == 4);
qtmux->current_chunk_offset = -1;
qtmux->current_chunk_size = 0;
qtmux->current_chunk_duration = 0;
+ } else if (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL) {
+ frames_since_daily_jam =
+ gst_video_time_code_frames_since_daily_jam (pad->first_tc);
+ frames_since_daily_jam = GUINT32_TO_BE (frames_since_daily_jam);
+
+ tc_buf = gst_buffer_new_allocate (NULL, 4, NULL);
+ szret = gst_buffer_fill (tc_buf, 0, &frames_since_daily_jam, 4);
+ g_assert (szret == 4);
+
+ ret = gst_qt_mux_send_buffer (qtmux, tc_buf, &qtmux->mdat_size, TRUE);
+ pad->tc_pos = -1;
+
+ qtmux->current_chunk_offset = -1;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
} else if (pad->is_out_of_order) {
/* Check for a lower timecode than the one stored */
g_assert (pad->tc_trak != NULL);
gint64 pts_offset = 0;
gboolean sync = FALSE;
GstFlowReturn ret = GST_FLOW_OK;
+ guint buffer_size;
if (!pad->fourcc)
goto not_negotiated;
/* if this pad has a prepare function, call it */
if (pad->prepare_buf_func != NULL) {
- buf = pad->prepare_buf_func (pad, buf, qtmux);
+ GstBuffer *new_buf;
+
+ new_buf = pad->prepare_buf_func (pad, buf, qtmux);
+ if (buf && !new_buf)
+ return GST_FLOW_OK;
+ buf = new_buf;
}
ret = gst_qt_mux_check_and_update_timecode (qtmux, pad, buf, ret);
GST_PAD_NAME (pad->collect.pad));
}
#endif
- qtmux->current_pad = pad;
goto exit;
}
GST_BUFFER_DTS (buf) = GST_BUFFER_DTS (last_buf);
}
+ buffer_size = gst_buffer_get_size (last_buf);
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL) {
+ guint required_buffer_size = prefill_get_sample_size (qtmux, pad);
+ guint fill_size = required_buffer_size - buffer_size;
+ GstMemory *mem;
+ GstMapInfo map;
+
+ if (required_buffer_size < buffer_size) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Sample size %u bigger than expected maximum %u", buffer_size,
+ required_buffer_size));
+ goto bail;
+ }
+
+ if (fill_size > 0) {
+ GST_DEBUG_OBJECT (qtmux,
+ "Padding buffer by %u bytes to reach required %u bytes", fill_size,
+ required_buffer_size);
+ mem = gst_allocator_alloc (NULL, fill_size, NULL);
+ gst_memory_map (mem, &map, GST_MAP_WRITE);
+ memset (map.data, 0, map.size);
+ gst_memory_unmap (mem, &map);
+ last_buf = gst_buffer_make_writable (last_buf);
+ gst_buffer_append_memory (last_buf, mem);
+ buffer_size = required_buffer_size;
+ }
+ }
+
/* duration actually means time delta between samples, so we calculate
* the duration based on the difference in DTS or PTS, falling back
* to DURATION if the other two don't exist, such as with the last
/* fragments only deal with 1 buffer == 1 chunk (== 1 sample) */
if (pad->sample_size && !qtmux->fragment_sequence) {
+ GstClockTime expected_timestamp;
+
/* Constant size packets: usually raw audio (with many samples per
buffer (= chunk)), but can also be fixed-packet-size codecs like ADPCM
*/
sample_size = pad->sample_size;
- if (gst_buffer_get_size (last_buf) % sample_size != 0)
+ if (buffer_size % sample_size != 0)
goto fragmented_sample;
+
/* note: qt raw audio storage warps it implicitly into a timewise
- * perfect stream, discarding buffer times */
+ * perfect stream, discarding buffer times.
+ * If the difference between the current PTS and the expected one
+ * becomes too big, we error out: there was a gap and we have no way to
+ * represent that, causing A/V sync to be off */
+ expected_timestamp =
+ gst_util_uint64_scale (pad->sample_offset, GST_SECOND,
+ atom_trak_get_timescale (pad->trak)) + pad->first_ts;
+ if (ABSDIFF (GST_BUFFER_DTS_OR_PTS (last_buf),
+ expected_timestamp) > qtmux->max_raw_audio_drift)
+ goto raw_audio_timestamp_drift;
+
if (GST_BUFFER_DURATION (last_buf) != GST_CLOCK_TIME_NONE) {
nsamples = gst_util_uint64_scale_round (GST_BUFFER_DURATION (last_buf),
atom_trak_get_timescale (pad->trak), GST_SECOND);
duration = GST_BUFFER_DURATION (last_buf);
} else {
- nsamples = gst_buffer_get_size (last_buf) / sample_size;
+ nsamples = buffer_size / sample_size;
duration =
gst_util_uint64_scale_round (nsamples, GST_SECOND,
atom_trak_get_timescale (pad->trak));
/* timescale = samplerate */
scaled_duration = 1;
- pad->last_dts += duration;
+ pad->last_dts =
+ pad->first_dts + gst_util_uint64_scale_round (pad->sample_offset +
+ nsamples, GST_SECOND, atom_trak_get_timescale (pad->trak));
} else {
nsamples = 1;
- sample_size = gst_buffer_get_size (last_buf);
- if ((buf && GST_BUFFER_DTS_IS_VALID (buf))
- || GST_BUFFER_DTS_IS_VALID (last_buf)) {
+ sample_size = buffer_size;
+ if (!pad->sparse && ((buf && GST_BUFFER_DTS_IS_VALID (buf))
+ || GST_BUFFER_DTS_IS_VALID (last_buf))) {
gint64 scaled_dts;
if (buf && GST_BUFFER_DTS_IS_VALID (buf)) {
pad->last_dts = GST_BUFFER_DTS (buf);
}
}
- /* for computing the avg bitrate */
- pad->total_bytes += gst_buffer_get_size (last_buf);
- pad->total_duration += duration;
- qtmux->current_chunk_size += gst_buffer_get_size (last_buf);
- qtmux->current_chunk_duration += duration;
+ gst_qt_mux_register_buffer_in_chunk (qtmux, pad, buffer_size, duration);
chunk_offset = qtmux->current_chunk_offset;
qtmux->longest_chunk = qtmux->current_chunk_duration;
}
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL) {
+ const TrakBufferEntryInfo *sample_entry;
+ guint64 block_idx = prefill_get_block_index (qtmux, pad);
+
+ if (block_idx >= pad->samples->len) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Unexpected sample %" G_GUINT64_FORMAT ", expected up to %u",
+ block_idx, pad->samples->len));
+ goto bail;
+ }
+
+ /* Check if all values are as expected */
+ sample_entry =
+ &g_array_index (pad->samples, TrakBufferEntryInfo, block_idx);
+
+ if (chunk_offset < sample_entry->chunk_offset) {
+ guint fill_size = sample_entry->chunk_offset - chunk_offset;
+ GstBuffer *fill_buf;
+
+ fill_buf = gst_buffer_new_allocate (NULL, fill_size, NULL);
+ gst_buffer_memset (fill_buf, 0, 0, fill_size);
+
+ ret = gst_qt_mux_send_buffer (qtmux, fill_buf, &qtmux->mdat_size, TRUE);
+ if (ret != GST_FLOW_OK)
+ goto bail;
+ qtmux->current_chunk_offset = chunk_offset = sample_entry->chunk_offset;
+ qtmux->current_chunk_size = buffer_size;
+ qtmux->current_chunk_duration = duration;
+ } else if (chunk_offset != sample_entry->chunk_offset) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Unexpected chunk offset %" G_GUINT64_FORMAT ", expected up to %"
+ G_GUINT64_FORMAT, chunk_offset, sample_entry->chunk_offset));
+ goto bail;
+ }
+ }
+
/* now we go and register this buffer/sample all over */
ret = gst_qt_mux_register_and_push_sample (qtmux, pad, last_buf,
buf == NULL, nsamples, last_dts, scaled_duration, sample_size,
chunk_offset, sync, TRUE, pts_offset);
+ pad->sample_offset += nsamples;
/* if this is sparse and we have a next buffer, check if there is any gap
* between them to insert an empty sample */
gint64 empty_duration =
GST_BUFFER_PTS (buf) - (GST_BUFFER_PTS (last_buf) + duration);
gint64 empty_duration_scaled;
+ guint empty_size;
empty_buf = pad->create_empty_buffer (pad, empty_duration);
- empty_duration_scaled = gst_util_uint64_scale_round (empty_duration,
- atom_trak_get_timescale (pad->trak), GST_SECOND);
+ pad->last_dts = GST_BUFFER_PTS (buf);
+ empty_duration_scaled = gst_util_uint64_scale_round (pad->last_dts,
+ atom_trak_get_timescale (pad->trak), GST_SECOND)
+ - (last_dts + scaled_duration);
+ empty_size = gst_buffer_get_size (empty_buf);
- pad->total_bytes += gst_buffer_get_size (empty_buf);
- pad->total_duration += duration;
+ gst_qt_mux_register_buffer_in_chunk (qtmux, pad, empty_size,
+ empty_duration);
ret =
gst_qt_mux_register_and_push_sample (qtmux, pad, empty_buf, FALSE, 1,
last_dts + scaled_duration, empty_duration_scaled,
- gst_buffer_get_size (empty_buf), chunk_offset, sync, TRUE, 0);
- } else {
- /* our only case currently is tx3g subtitles, so there is no reason to fill this yet */
+ empty_size, chunk_offset, sync, TRUE, 0);
+ } else if (pad->fourcc != FOURCC_c608 && pad->fourcc != FOURCC_c708) {
+ /* This assert is kept here to make sure implementors of new
+ * sparse input format decide whether there needs to be special
+ * gap handling or not */
g_assert_not_reached ();
GST_WARNING_OBJECT (qtmux,
"no empty buffer creation function found for pad %s",
}
}
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ gst_qt_mux_update_expected_trailer_size(qtmux, pad);
+#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+
exit:
return ret;
("Audio buffer contains fragmented sample."));
goto bail;
}
+ raw_audio_timestamp_drift:
+ {
+ /* TODO: Could in theory be implemented with edit lists */
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Audio stream timestamps are drifting (got %" GST_TIME_FORMAT
+ ", expected %" GST_TIME_FORMAT "). This is not supported yet!",
+ GST_TIME_ARGS (GST_BUFFER_DTS_OR_PTS (last_buf)),
+ GST_TIME_ARGS (gst_util_uint64_scale (pad->sample_offset,
+ GST_SECOND,
+ atom_trak_get_timescale (pad->trak)) + pad->first_ts)));
+ goto bail;
+ }
no_pts:
{
GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL), ("Buffer has no PTS."));
GSList *walk;
GstQTPad *best_pad = NULL;
- if (qtmux->current_pad &&
- (qtmux->interleave_bytes != 0 || qtmux->interleave_time != 0) &&
- (qtmux->interleave_bytes == 0
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL) {
+ guint64 smallest_offset = G_MAXUINT64;
+ guint64 chunk_offset = 0;
+
+ for (walk = qtmux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstCollectData *cdata = (GstCollectData *) walk->data;
+ GstQTPad *qtpad = (GstQTPad *) cdata;
+ const TrakBufferEntryInfo *sample_entry;
+ guint64 block_idx, current_block_idx;
+ guint64 chunk_offset_offset = 0;
+ GstBuffer *tmp_buf =
+ gst_collect_pads_peek (pads, (GstCollectData *) qtpad);
+
+ /* Check for EOS pads and just skip them */
+ if (!tmp_buf && !qtpad->last_buf && (!qtpad->raw_audio_adapter
+ || gst_adapter_available (qtpad->raw_audio_adapter) == 0))
+ continue;
+ if (tmp_buf)
+ gst_buffer_unref (tmp_buf);
+
+ /* Find the exact offset where the next sample of this track is supposed
+ * to be written at */
+ block_idx = current_block_idx = prefill_get_block_index (qtmux, qtpad);
+ sample_entry =
+ &g_array_index (qtpad->samples, TrakBufferEntryInfo, block_idx);
+ while (block_idx > 0) {
+ const TrakBufferEntryInfo *tmp =
+ &g_array_index (qtpad->samples, TrakBufferEntryInfo, block_idx - 1);
+
+ if (tmp->chunk_offset != sample_entry->chunk_offset)
+ break;
+ chunk_offset_offset += tmp->size * tmp->nsamples;
+ block_idx--;
+ }
+
+ /* Except for the previously selected pad being EOS we always have
+ * qtmux->current_chunk_offset + qtmux->current_chunk_size
+ * ==
+ * sample_entry->chunk_offset + chunk_offset_offset
+ * for the best pad. Instead of checking that, we just return the
+ * pad that has the smallest offset for the next to-be-written sample.
+ */
+ if (sample_entry->chunk_offset + chunk_offset_offset < smallest_offset) {
+ smallest_offset = sample_entry->chunk_offset + chunk_offset_offset;
+ best_pad = qtpad;
+ chunk_offset = sample_entry->chunk_offset;
+ }
+ }
+
+ if (chunk_offset != qtmux->current_chunk_offset) {
+ qtmux->current_pad = NULL;
+ }
+
+ return best_pad;
+ }
+
+ if (qtmux->current_pad && (qtmux->interleave_bytes != 0
+ || qtmux->interleave_time != 0) && (qtmux->interleave_bytes == 0
|| qtmux->current_chunk_size <= qtmux->interleave_bytes)
&& (qtmux->interleave_time == 0
|| qtmux->current_chunk_duration <= qtmux->interleave_time)
/* clipping already converted to running time */
if (best_pad != NULL) {
- GstBuffer *buf = gst_collect_pads_pop (pads, (GstCollectData *) best_pad);
+ GstBuffer *buf = NULL;
+
+ if (qtmux->mux_mode != GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL ||
+ best_pad->raw_audio_adapter == NULL ||
+ best_pad->raw_audio_adapter_pts == GST_CLOCK_TIME_NONE)
+ buf = gst_collect_pads_pop (pads, (GstCollectData *) best_pad);
+
+ g_assert (buf || best_pad->last_buf || (best_pad->raw_audio_adapter
+ && gst_adapter_available (best_pad->raw_audio_adapter) > 0));
- g_assert (buf || best_pad->last_buf);
if (buf)
gst_qt_pad_adjust_buffer_dts (qtmux, best_pad,
(GstCollectData *) best_pad, &buf);
return gst_structure_foreach (sub_s, check_field, sup_s);
}
+ /* will unref @qtmux */
+ static gboolean
+ gst_qt_mux_can_renegotiate (GstQTMux * qtmux, GstPad * pad, GstCaps * caps)
+ {
+ GstCaps *current_caps;
+
+ /* does not go well to renegotiate stream mid-way, unless
+ * the old caps are a subset of the new one (this means upstream
+ * added more info to the caps, as both should be 'fixed' caps) */
+ current_caps = gst_pad_get_current_caps (pad);
+ g_assert (caps != NULL);
+
+ if (!gst_qtmux_caps_is_subset_full (qtmux, current_caps, caps)) {
+ gst_caps_unref (current_caps);
+ GST_WARNING_OBJECT (qtmux,
+ "pad %s refused renegotiation to %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ gst_object_unref (qtmux);
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (qtmux,
+ "pad %s accepted renegotiation to %" GST_PTR_FORMAT " from %"
+ GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, current_caps);
+ gst_object_unref (qtmux);
+ gst_caps_unref (current_caps);
+
+ return TRUE;
+ }
+
static gboolean
gst_qt_mux_audio_sink_set_caps (GstQTPad * qtpad, GstCaps * caps)
{
AtomInfo *ext_atom = NULL;
gint constant_size = 0;
const gchar *stream_format;
+ guint32 timescale;
- qtpad->prepare_buf_func = NULL;
-
- /* does not go well to renegotiate stream mid-way, unless
- * the old caps are a subset of the new one (this means upstream
- * added more info to the caps, as both should be 'fixed' caps) */
- if (qtpad->fourcc) {
- GstCaps *current_caps;
-
- current_caps = gst_pad_get_current_caps (pad);
- g_assert (caps != NULL);
-
- if (!gst_qtmux_caps_is_subset_full (qtmux, current_caps, caps)) {
- gst_caps_unref (current_caps);
- goto refuse_renegotiation;
- }
- GST_DEBUG_OBJECT (qtmux,
- "pad %s accepted renegotiation to %" GST_PTR_FORMAT " from %"
- GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, current_caps);
- gst_caps_unref (current_caps);
- }
+ if (qtpad->fourcc)
+ return gst_qt_mux_can_renegotiate (qtmux, pad, caps);
GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
GST_DEBUG_PAD_NAME (pad), caps);
+ qtpad->prepare_buf_func = NULL;
+
format = qtmux_klass->format;
structure = gst_caps_get_structure (caps, 0);
mimetype = gst_structure_get_name (structure);
qtpad->max_bitrate);
}
if (layer == 1) {
- g_warn_if_fail (format == GST_QT_MUX_FORMAT_MP4);
+ g_warn_if_fail (format == GST_QT_MUX_FORMAT_MP4
+ || format == GST_QT_MUX_FORMAT_QT);
entry.samples_per_packet = 384;
} else if (layer == 2) {
- g_warn_if_fail (format == GST_QT_MUX_FORMAT_MP4);
+ g_warn_if_fail (format == GST_QT_MUX_FORMAT_MP4
+ || format == GST_QT_MUX_FORMAT_QT);
entry.samples_per_packet = 1152;
} else {
g_warn_if_fail (layer == 3);
if (!entry.fourcc)
goto refuse_caps;
+ timescale = gst_qt_mux_pad_get_timescale (GST_QT_MUX_PAD_CAST (pad));
+ if (!timescale && qtmux->trak_timescale)
+ timescale = qtmux->trak_timescale;
+ else if (!timescale)
+ timescale = entry.sample_rate;
+
/* ok, set the pad info accordingly */
qtpad->fourcc = entry.fourcc;
qtpad->sample_size = constant_size;
qtpad->trak_ste =
(SampleTableEntry *) atom_trak_set_audio_type (qtpad->trak,
- qtmux->context, &entry,
- qtmux->trak_timescale ? qtmux->trak_timescale : entry.sample_rate,
- ext_atom, constant_size);
+ qtmux->context, &entry, timescale, ext_atom, constant_size);
gst_object_unref (qtmux);
return TRUE;
gst_object_unref (qtmux);
return FALSE;
}
- refuse_renegotiation:
- {
- GST_WARNING_OBJECT (qtmux,
- "pad %s refused renegotiation to %" GST_PTR_FORMAT,
- GST_PAD_NAME (pad), caps);
- gst_object_unref (qtmux);
- return FALSE;
- }
}
static gboolean
GList *ext_atom_list = NULL;
gboolean sync = FALSE;
int par_num, par_den;
+ const gchar *multiview_mode;
- qtpad->prepare_buf_func = NULL;
-
- /* does not go well to renegotiate stream mid-way, unless
- * the old caps are a subset of the new one (this means upstream
- * added more info to the caps, as both should be 'fixed' caps) */
- if (qtpad->fourcc) {
- GstCaps *current_caps;
-
- current_caps = gst_pad_get_current_caps (pad);
- g_assert (caps != NULL);
-
- if (!gst_qtmux_caps_is_subset_full (qtmux, current_caps, caps)) {
- gst_caps_unref (current_caps);
- goto refuse_renegotiation;
- }
- GST_DEBUG_OBJECT (qtmux,
- "pad %s accepted renegotiation to %" GST_PTR_FORMAT " from %"
- GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, current_caps);
- gst_caps_unref (current_caps);
- }
+ if (qtpad->fourcc)
+ return gst_qt_mux_can_renegotiate (qtmux, pad, caps);
GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
GST_DEBUG_PAD_NAME (pad), caps);
+ qtpad->prepare_buf_func = NULL;
+
format = qtmux_klass->format;
structure = gst_caps_get_structure (caps, 0);
mimetype = gst_structure_get_name (structure);
/* bring frame numerator into a range that ensures both reasonable resolution
* as well as a fair duration */
- rate = qtmux->trak_timescale ?
- qtmux->trak_timescale : atom_framerate_to_timescale (framerate_num,
- framerate_den);
+ qtpad->expected_sample_duration_n = framerate_num;
+ qtpad->expected_sample_duration_d = framerate_den;
+
+ rate = gst_qt_mux_pad_get_timescale (GST_QT_MUX_PAD_CAST (pad));
+ if (!rate && qtmux->trak_timescale)
+ rate = qtmux->trak_timescale;
+ else if (!rate)
+ rate = atom_framerate_to_timescale (framerate_num, framerate_den);
+
GST_DEBUG_OBJECT (qtmux, "Rate of video track selected: %" G_GUINT32_FORMAT,
rate);
+ multiview_mode = gst_structure_get_string (structure, "multiview-mode");
+ if (multiview_mode && !qtpad->trak->mdia.minf.stbl.svmi) {
+ GstVideoMultiviewMode mode;
+ GstVideoMultiviewFlags flags = 0;
+
+ mode = gst_video_multiview_mode_from_caps_string (multiview_mode);
+ gst_structure_get_flagset (structure, "multiview-flags", &flags, NULL);
+ switch (mode) {
+ case GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE:
+ qtpad->trak->mdia.minf.stbl.svmi =
+ atom_svmi_new (0,
+ flags & GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST);
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED:
+ qtpad->trak->mdia.minf.stbl.svmi =
+ atom_svmi_new (1,
+ flags & GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST);
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME:
+ qtpad->trak->mdia.minf.stbl.svmi =
+ atom_svmi_new (2,
+ flags & GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST);
+ break;
+ default:
+ GST_DEBUG_OBJECT (qtmux, "Unsupported multiview-mode %s",
+ multiview_mode);
+ break;
+ }
+ }
+
/* set common properties */
entry.width = width;
entry.height = height;
} else if (strcmp (mimetype, "image/jpeg") == 0) {
entry.fourcc = FOURCC_jpeg;
sync = FALSE;
+ } else if (strcmp (mimetype, "image/png") == 0) {
+ entry.fourcc = FOURCC_png;
+ sync = FALSE;
} else if (strcmp (mimetype, "image/x-j2c") == 0 ||
strcmp (mimetype, "image/x-jpc") == 0) {
const gchar *colorspace;
goto refuse_caps;
}
} else if (strcmp (mimetype, "video/x-vp8") == 0) {
- entry.fourcc = FOURCC_VP80;
- sync = FALSE;
+ entry.fourcc = FOURCC_vp08;
+ } else if (strcmp (mimetype, "video/x-vp9") == 0) {
+ entry.fourcc = FOURCC_vp09;
} else if (strcmp (mimetype, "video/x-dirac") == 0) {
entry.fourcc = FOURCC_drac;
} else if (strcmp (mimetype, "video/x-qt-part") == 0) {
} else if (strcmp (mimetype, "video/x-cineform") == 0) {
entry.fourcc = FOURCC_cfhd;
sync = FALSE;
+ } else if (strcmp (mimetype, "video/x-av1") == 0) {
+ gint presentation_delay;
+ guint8 presentation_delay_byte = 0;
+ GstBuffer *av1_codec_data;
+
+ if (gst_structure_get_int (structure, "presentation-delay",
+ &presentation_delay)) {
+ presentation_delay_byte = 1 << 5;
+ presentation_delay_byte |= MAX (0xF, presentation_delay & 0xF);
+ }
+
+
+ av1_codec_data = gst_buffer_new_allocate (NULL, 5, NULL);
+ /* Fill version and 3 bytes of flags to 0 */
+ gst_buffer_memset (av1_codec_data, 0, 0, 4);
+ gst_buffer_fill (av1_codec_data, 4, &presentation_delay_byte, 1);
+ if (codec_data)
+ av1_codec_data = gst_buffer_append (av1_codec_data,
+ gst_buffer_ref ((GstBuffer *) codec_data));
+
+ entry.fourcc = FOURCC_av01;
+
+ ext_atom = build_btrt_extension (0, qtpad->avg_bitrate, qtpad->max_bitrate);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ ext_atom = build_codec_data_extension (FOURCC_av1C, av1_codec_data);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ gst_buffer_unref (av1_codec_data);
}
if (!entry.fourcc)
gst_object_unref (qtmux);
return FALSE;
}
- refuse_renegotiation:
- {
- GST_WARNING_OBJECT (qtmux,
- "pad %s refused renegotiation to %" GST_PTR_FORMAT, GST_PAD_NAME (pad),
- caps);
- gst_object_unref (qtmux);
- return FALSE;
- }
}
static gboolean
GstStructure *structure;
SubtitleSampleEntry entry = { 0, };
- /* does not go well to renegotiate stream mid-way, unless
- * the old caps are a subset of the new one (this means upstream
- * added more info to the caps, as both should be 'fixed' caps) */
- if (qtpad->fourcc) {
- GstCaps *current_caps;
-
- current_caps = gst_pad_get_current_caps (pad);
- g_assert (caps != NULL);
-
- if (!gst_qtmux_caps_is_subset_full (qtmux, current_caps, caps)) {
- gst_caps_unref (current_caps);
- goto refuse_renegotiation;
- }
- GST_DEBUG_OBJECT (qtmux,
- "pad %s accepted renegotiation to %" GST_PTR_FORMAT " from %"
- GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, current_caps);
- gst_caps_unref (current_caps);
- }
+ if (qtpad->fourcc)
+ return gst_qt_mux_can_renegotiate (qtmux, pad, caps);
GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
GST_DEBUG_PAD_NAME (pad), caps);
gst_object_unref (qtmux);
return FALSE;
}
- refuse_renegotiation:
+ }
+
+ static gboolean
+ gst_qt_mux_caption_sink_set_caps (GstQTPad * qtpad, GstCaps * caps)
+ {
+ GstPad *pad = qtpad->collect.pad;
+ GstQTMux *qtmux = GST_QT_MUX_CAST (gst_pad_get_parent (pad));
+ GstStructure *structure;
+ guint32 fourcc_entry;
+ guint32 timescale;
+
+ if (qtpad->fourcc)
+ return gst_qt_mux_can_renegotiate (qtmux, pad, caps);
+
+ GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), caps);
+
+ /* captions default */
+ qtpad->is_out_of_order = FALSE;
+ qtpad->sync = FALSE;
+ qtpad->sparse = TRUE;
+ /* Closed caption data are within atoms */
+ qtpad->prepare_buf_func = gst_qt_mux_prepare_caption_buffer;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* We know we only handle 608,format=s334-1a and 708,format=cdp */
+ if (gst_structure_has_name (structure, "closedcaption/x-cea-608")) {
+ fourcc_entry = FOURCC_c608;
+ } else if (gst_structure_has_name (structure, "closedcaption/x-cea-708")) {
+ fourcc_entry = FOURCC_c708;
+ } else
+ goto refuse_caps;
+
+ /* We set the real timescale later to the one from the video track when
+ * writing the headers */
+ timescale = gst_qt_mux_pad_get_timescale (GST_QT_MUX_PAD_CAST (pad));
+ if (!timescale && qtmux->trak_timescale)
+ timescale = qtmux->trak_timescale;
+ else if (!timescale)
+ timescale = 30000;
+
+ qtpad->fourcc = fourcc_entry;
+ qtpad->trak_ste =
+ (SampleTableEntry *) atom_trak_set_caption_type (qtpad->trak,
+ qtmux->context, timescale, fourcc_entry);
+
+ /* Initialize caption track language code to 0 unless something else is
+ * specified. Without this, Final Cut considers it "non-standard"
+ */
+ qtpad->trak->mdia.mdhd.language_code = 0;
+
+ gst_object_unref (qtmux);
+ return TRUE;
+
+ /* ERRORS */
+ refuse_caps:
{
- GST_WARNING_OBJECT (qtmux,
- "pad %s refused renegotiation to %" GST_PTR_FORMAT, GST_PAD_NAME (pad),
- caps);
+ GST_WARNING_OBJECT (qtmux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
gst_object_unref (qtmux);
return FALSE;
}
g_assert (qtpad);
if (qtpad->trak) {
/* https://developer.apple.com/library/mac/#documentation/QuickTime/QTFF/QTFFChap4/qtff4.html */
- qtpad->trak->mdia.mdhd.language_code =
- (iso_code[0] - 0x60) * 0x400 + (iso_code[1] - 0x60) * 0x20 +
- (iso_code[2] - 0x60);
+ qtpad->trak->mdia.mdhd.language_code = language_code (iso_code);
}
}
g_free (code);
name = g_strdup_printf ("subtitle_%u", qtmux->subtitle_pads++);
}
lock = FALSE;
+ } else if (templ == gst_element_class_get_pad_template (klass, "caption_%u")) {
+ setcaps_func = gst_qt_mux_caption_sink_set_caps;
+ if (req_name != NULL && sscanf (req_name, "caption_%u", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("caption_%u", qtmux->caption_pads++);
+ }
+ lock = FALSE;
} else
goto wrong_template;
GST_DEBUG_OBJECT (qtmux, "Requested pad: %s", name);
/* create pad and add to collections */
- newpad = gst_pad_new_from_template (templ, name);
+ newpad =
+ g_object_new (GST_TYPE_QT_MUX_PAD, "name", name, "direction",
+ templ->direction, "template", templ, NULL);
g_free (name);
collect_pad = (GstQTPad *)
gst_collect_pads_add_pad (qtmux->collect, newpad, sizeof (GstQTPad),
case PROP_RESERVED_BYTES_PER_SEC:
g_value_set_uint (value, qtmux->reserved_bytes_per_sec_per_trak);
break;
+ case PROP_RESERVED_PREFILL:
+ g_value_set_boolean (value, qtmux->reserved_prefill);
+ break;
case PROP_INTERLEAVE_BYTES:
g_value_set_uint64 (value, qtmux->interleave_bytes);
break;
case PROP_INTERLEAVE_TIME:
g_value_set_uint64 (value, qtmux->interleave_time);
break;
- #endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+ case PROP_MAX_RAW_AUDIO_DRIFT:
+ g_value_set_uint64 (value, qtmux->max_raw_audio_drift);
+ break;
+ case PROP_START_GAP_THRESHOLD:
+ g_value_set_uint64 (value, qtmux->start_gap_threshold);
+ break;
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ case PROP_EXPECTED_TRAILER_SIZE:
+ g_value_set_uint(value, qtmux->expected_trailer_size);
+ break;
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
case PROP_RESERVED_BYTES_PER_SEC:
qtmux->reserved_bytes_per_sec_per_trak = g_value_get_uint (value);
break;
+ case PROP_RESERVED_PREFILL:
+ qtmux->reserved_prefill = g_value_get_boolean (value);
+ break;
case PROP_INTERLEAVE_BYTES:
qtmux->interleave_bytes = g_value_get_uint64 (value);
qtmux->interleave_bytes_set = TRUE;
qtmux->interleave_time = g_value_get_uint64 (value);
qtmux->interleave_time_set = TRUE;
break;
+ case PROP_MAX_RAW_AUDIO_DRIFT:
+ qtmux->max_raw_audio_drift = g_value_get_uint64 (value);
+ break;
+ case PROP_START_GAP_THRESHOLD:
+ qtmux->start_gap_threshold = g_value_get_uint64 (value);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
while (TRUE) {
GstQTMuxFormatProp *prop;
- GstCaps *subtitle_caps;
+ GstCaps *subtitle_caps, *caption_caps;
prop = &gst_qt_mux_format_list[i];
format = prop->format;
} else {
gst_caps_unref (subtitle_caps);
}
+ caption_caps = gst_static_caps_get (&prop->caption_sink_caps);
+ if (!gst_caps_is_equal (caption_caps, GST_CAPS_NONE)) {
+ params->caption_sink_caps = caption_caps;
+ } else {
+ gst_caps_unref (caption_caps);
+ }
/* create the type now */
type = g_type_register_static (GST_TYPE_ELEMENT, prop->type_name, &typeinfo,
gboolean sparse;
/* bitrates */
guint32 avg_bitrate, max_bitrate;
+ /* expected sample duration */
+ guint expected_sample_duration_n;
+ guint expected_sample_duration_d;
/* for avg bitrate calculation */
guint64 total_bytes;
GstBuffer *last_buf;
/* dts of last_buf */
GstClockTime last_dts;
+ guint64 sample_offset;
/* This is compensate for CTTS */
GstClockTime dts_adjustment;
/* store the first timestamp for comparing with other streams and
* know if there are late streams */
+ /* subjected to dts adjustment */
GstClockTime first_ts;
GstClockTime first_dts;
GstVideoTimeCode *first_tc;
GstClockTime first_pts;
guint64 tc_pos;
+
+ /* for keeping track in pre-fill mode */
+ GArray *samples;
+ /* current sample */
+ GstAdapter *raw_audio_adapter;
+ guint64 raw_audio_adapter_offset;
+ GstClockTime raw_audio_adapter_pts;
};
typedef enum _GstQTMuxState
GST_QT_MUX_MODE_FRAGMENTED,
GST_QT_MUX_MODE_FRAGMENTED_STREAMABLE,
GST_QT_MUX_MODE_FAST_START,
- GST_QT_MUX_MODE_ROBUST_RECORDING
+ GST_QT_MUX_MODE_ROBUST_RECORDING,
+ GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL,
} GstQtMuxMode;
struct _GstQTMux
/* keep track of the largest chunk to fine-tune brands */
GstClockTime longest_chunk;
- /* Earliest timestamp across all pads/traks */
+ /* Earliest timestamp across all pads/traks
+ * (unadjusted incoming PTS) */
GstClockTime first_ts;
/* Last DTS across all pads (= duration) */
GstClockTime last_dts;
GstClockTime interleave_time;
gboolean interleave_bytes_set, interleave_time_set;
+ GstClockTime max_raw_audio_drift;
+
/* Reserved minimum MOOV size in bytes
* This is converted from reserved_max_duration
* using the bytes/trak/sec estimate */
GstClockTime reserved_moov_update_period;
GstClockTime muxed_since_last_update;
+ gboolean reserved_prefill;
+
+ GstClockTime start_gap_threshold;
+
/* for request pad naming */
- guint video_pads, audio_pads, subtitle_pads;
+ guint video_pads, audio_pads, subtitle_pads, caption_pads;
+
+#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ guint expected_trailer_size;
+ guint audio_expected_trailer_size;
+ guint video_expected_trailer_size;
- #endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
};
struct _GstQTMuxClass
GstCaps *video_sink_caps;
GstCaps *audio_sink_caps;
GstCaps *subtitle_sink_caps;
+ GstCaps *caption_sink_caps;
} GstQTMuxClassParams;
#define GST_QT_MUX_PARAMS_QDATA g_quark_from_static_string("qt-mux-params")
#include <gst/tag/tag.h>
#include <gst/audio/audio.h>
#include <gst/video/video.h>
+ #include <gst/riff/riff.h>
+ #include <gst/pbutils/pbutils.h>
#include "qtatomparser.h"
#include "qtdemux_types.h"
#include "qtdemux.h"
#include "qtpalette.h"
- #include "gst/riff/riff-media.h"
- #include "gst/riff/riff-read.h"
-
- #include <gst/pbutils/pbutils.h>
-
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <gst/math-compat.h>
#ifdef HAVE_ZLIB
-# include <zlib.h>
+#include <zlib.h>
#endif
/* max. size considered 'sane' for non-mdat atoms */
#define QTDEMUX_MAX_ATOM_SIZE (25*1024*1024)
/* if the sample index is larger than this, something is likely wrong */
- #define QTDEMUX_MAX_SAMPLE_INDEX_SIZE (50*1024*1024)
+ #define QTDEMUX_MAX_SAMPLE_INDEX_SIZE (200*1024*1024)
/* For converting qt creation times to unix epoch times */
#define QTDEMUX_SECONDS_PER_DAY (60 * 60 * 24)
#define QTDEMUX_TREE_NODE_FOURCC(n) (QT_FOURCC(((guint8 *) (n)->data) + 4))
- #define STREAM_IS_EOS(s) (s->time_position == GST_CLOCK_TIME_NONE)
+ #define STREAM_IS_EOS(s) ((s)->time_position == GST_CLOCK_TIME_NONE)
#define ABSDIFF(x, y) ( (x) > (y) ? ((x) - (y)) : ((y) - (x)) )
+ #define QTDEMUX_STREAM(s) ((QtDemuxStream *)(s))
+ #define QTDEMUX_N_STREAMS(demux) ((demux)->active_streams->len)
+ #define QTDEMUX_NTH_STREAM(demux,idx) \
+ QTDEMUX_STREAM(g_ptr_array_index((demux)->active_streams,idx))
+ #define QTDEMUX_NTH_OLD_STREAM(demux,idx) \
+ QTDEMUX_STREAM(g_ptr_array_index((demux)->old_streams,idx))
+
GST_DEBUG_CATEGORY (qtdemux_debug);
+ #define GST_CAT_DEFAULT qtdemux_debug
typedef struct _QtDemuxSegment QtDemuxSegment;
typedef struct _QtDemuxSample QtDemuxSample;
gboolean keyframe; /* TRUE when this packet is a keyframe */
};
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+typedef struct _QtDemuxSphericalMetadata QtDemuxSphericalMetadata;
+
+struct _QtDemuxSphericalMetadata
+{
+ gboolean is_spherical;
+ gboolean is_stitched;
+ char *stitching_software;
+ char *projection_type;
+ char *stereo_mode;
+ int source_count;
+ int init_view_heading;
+ int init_view_pitch;
+ int init_view_roll;
+ int timestamp;
+ int full_pano_width_pixels;
+ int full_pano_height_pixels;
+ int cropped_area_image_width;
+ int cropped_area_image_height;
+ int cropped_area_left;
+ int cropped_area_top;
+ QTDEMUX_AMBISONIC_TYPE ambisonic_type;
+ QTDEMUX_AMBISONIC_FORMAT ambisonic_format;
+ QTDEMUX_AMBISONIC_ORDER ambisonic_order;
+};
+
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
/* Macros for converting to/from timescale */
#define QTSTREAMTIME_TO_GSTTIME(stream, value) (gst_util_uint64_scale((value), GST_SECOND, (stream)->timescale))
#define GSTTIME_TO_QTSTREAMTIME(stream, value) (gst_util_uint64_scale((value), (stream)->timescale, GST_SECOND))
#define QTSAMPLE_KEYFRAME(stream,sample) ((stream)->all_keyframe || (sample)->keyframe)
+ #define QTDEMUX_EXPOSE_GET_LOCK(demux) (&((demux)->expose_lock))
+ #define QTDEMUX_EXPOSE_LOCK(demux) G_STMT_START { \
+ GST_TRACE("Locking from thread %p", g_thread_self()); \
+ g_mutex_lock (QTDEMUX_EXPOSE_GET_LOCK (demux)); \
+ GST_TRACE("Locked from thread %p", g_thread_self()); \
+ } G_STMT_END
+
+ #define QTDEMUX_EXPOSE_UNLOCK(demux) G_STMT_START { \
+ GST_TRACE("Unlocking from thread %p", g_thread_self()); \
+ g_mutex_unlock (QTDEMUX_EXPOSE_GET_LOCK (demux)); \
+ } G_STMT_END
+
/*
* Quicktime has tracks and segments. A track is a continuous piece of
* multimedia content. The track is not always played from start to finish but
{
GstPad *pad;
+ GstQTDemux *demux;
+ gchar *stream_id;
+
QtDemuxStreamStsdEntry *stsd_entries;
guint stsd_entries_length;
guint cur_stsd_entry_index;
guint32 n_samples;
QtDemuxSample *samples;
gboolean all_keyframe; /* TRUE when all samples are keyframes (no stss) */
- guint32 first_duration; /* duration in timescale of first sample, used for figuring out
- the framerate */
guint32 n_samples_moof; /* sample count in a moof */
guint64 duration_moof; /* duration in timescale of a moof, used for figure out
* the framerate of fragmented format stream */
/* buffer needs some custom processing, e.g. subtitles */
gboolean need_process;
+ /* buffer needs potentially be split, e.g. CEA608 subtitles */
+ gboolean need_split;
/* current position */
guint32 segment_index;
guint32 protection_scheme_version;
gpointer protection_scheme_info; /* specific to the protection scheme */
GQueue protection_scheme_event_queue;
+
+ gint ref_count; /* atomic */
};
/* Contains properties and cryptographic info for a set of samples from a
static GstFlowReturn qtdemux_add_fragmented_samples (GstQTDemux * qtdemux);
+ static void gst_qtdemux_check_send_pending_segment (GstQTDemux * demux);
+
static GstStaticPadTemplate gst_qtdemux_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
#endif
static GstStateChangeReturn gst_qtdemux_change_state (GstElement * element,
GstStateChange transition);
+ static void gst_qtdemux_set_context (GstElement * element,
+ GstContext * context);
static gboolean qtdemux_sink_activate (GstPad * sinkpad, GstObject * parent);
static gboolean qtdemux_sink_activate_mode (GstPad * sinkpad,
GstObject * parent, GstPadMode mode, gboolean active);
GstBuffer * inbuf);
static gboolean gst_qtdemux_handle_sink_event (GstPad * pad, GstObject * parent,
GstEvent * event);
+ static gboolean gst_qtdemux_handle_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
static gboolean gst_qtdemux_setcaps (GstQTDemux * qtdemux, GstCaps * caps);
static gboolean gst_qtdemux_configure_stream (GstQTDemux * qtdemux,
QtDemuxStream * stream);
static GstFlowReturn gst_qtdemux_process_adapter (GstQTDemux * demux,
gboolean force);
+ static void gst_qtdemux_check_seekability (GstQTDemux * demux);
+
static gboolean qtdemux_parse_moov (GstQTDemux * qtdemux,
const guint8 * buffer, guint length);
static gboolean qtdemux_parse_node (GstQTDemux * qtdemux, GNode * node,
static gboolean qtdemux_parse_samples (GstQTDemux * qtdemux,
QtDemuxStream * stream, guint32 n);
static GstFlowReturn qtdemux_expose_streams (GstQTDemux * qtdemux);
- static void gst_qtdemux_stream_free (GstQTDemux * qtdemux,
- QtDemuxStream * stream);
- static void gst_qtdemux_stream_clear (GstQTDemux * qtdemux,
- QtDemuxStream * stream);
- static void gst_qtdemux_remove_stream (GstQTDemux * qtdemux, int index);
+ static QtDemuxStream *gst_qtdemux_stream_ref (QtDemuxStream * stream);
+ static void gst_qtdemux_stream_unref (QtDemuxStream * stream);
+ static void gst_qtdemux_stream_clear (QtDemuxStream * stream);
static GstFlowReturn qtdemux_prepare_streams (GstQTDemux * qtdemux);
- static void qtdemux_do_allocation (GstQTDemux * qtdemux,
- QtDemuxStream * stream);
+ static void qtdemux_do_allocation (QtDemuxStream * stream,
+ GstQTDemux * qtdemux);
static gboolean gst_qtdemux_activate_segment (GstQTDemux * qtdemux,
QtDemuxStream * stream, guint32 seg_idx, GstClockTime offset);
static gboolean gst_qtdemux_stream_update_segment (GstQTDemux * qtdemux,
static void gst_qtdemux_append_protection_system_id (GstQTDemux * qtdemux,
const gchar * id);
static void qtdemux_gst_structure_free (GstStructure * gststructure);
+ static void gst_qtdemux_reset (GstQTDemux * qtdemux, gboolean hard);
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+static void gst_tag_register_spherical_tags (void);
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
static void
gst_qtdemux_class_init (GstQTDemuxClass * klass)
{
gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_qtdemux_set_index);
gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_qtdemux_get_index);
#endif
+ gstelement_class->set_context = GST_DEBUG_FUNCPTR (gst_qtdemux_set_context);
gst_tag_register_musicbrainz_tags ();
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ gst_tag_register_spherical_tags ();
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
gst_element_class_add_static_pad_template (gstelement_class,
&gst_qtdemux_sink_template);
gst_element_class_add_static_pad_template (gstelement_class,
"David Schleef <ds@schleef.org>, Wim Taymans <wim@fluendo.com>");
GST_DEBUG_CATEGORY_INIT (qtdemux_debug, "qtdemux", 0, "qtdemux plugin");
-
+ gst_riff_init ();
}
static void
qtdemux_sink_activate_mode);
gst_pad_set_chain_function (qtdemux->sinkpad, gst_qtdemux_chain);
gst_pad_set_event_function (qtdemux->sinkpad, gst_qtdemux_handle_sink_event);
+ gst_pad_set_query_function (qtdemux->sinkpad, gst_qtdemux_handle_sink_query);
gst_element_add_pad (GST_ELEMENT_CAST (qtdemux), qtdemux->sinkpad);
- qtdemux->state = QTDEMUX_STATE_INITIAL;
- qtdemux->pullbased = FALSE;
- qtdemux->posted_redirect = FALSE;
- qtdemux->neededbytes = 16;
- qtdemux->todrop = 0;
qtdemux->adapter = gst_adapter_new ();
- qtdemux->offset = 0;
- qtdemux->first_mdat = -1;
- qtdemux->got_moov = FALSE;
- qtdemux->mdatoffset = -1;
- qtdemux->mdatbuffer = NULL;
- qtdemux->restoredata_buffer = NULL;
- qtdemux->restoredata_offset = -1;
- qtdemux->fragment_start = -1;
- qtdemux->fragment_start_offset = -1;
- qtdemux->media_caps = NULL;
- qtdemux->exposed = FALSE;
- qtdemux->mss_mode = FALSE;
- qtdemux->pending_newsegment = NULL;
- qtdemux->upstream_format_is_time = FALSE;
- qtdemux->have_group_id = FALSE;
- qtdemux->group_id = G_MAXUINT;
- qtdemux->cenc_aux_info_offset = 0;
- qtdemux->cenc_aux_info_sizes = NULL;
- qtdemux->cenc_aux_sample_count = 0;
- qtdemux->protection_system_ids = NULL;
g_queue_init (&qtdemux->protection_event_queue);
- gst_segment_init (&qtdemux->segment, GST_FORMAT_TIME);
- qtdemux->tag_list = gst_tag_list_new_empty ();
- gst_tag_list_set_scope (qtdemux->tag_list, GST_TAG_SCOPE_GLOBAL);
qtdemux->flowcombiner = gst_flow_combiner_new ();
+ g_mutex_init (&qtdemux->expose_lock);
+
+ qtdemux->active_streams = g_ptr_array_new_with_free_func
+ ((GDestroyNotify) gst_qtdemux_stream_unref);
+ qtdemux->old_streams = g_ptr_array_new_with_free_func
+ ((GDestroyNotify) gst_qtdemux_stream_unref);
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ qtdemux->spherical_metadata = (QtDemuxSphericalMetadata *)
+ malloc (sizeof (QtDemuxSphericalMetadata));
+
+ if (qtdemux->spherical_metadata) {
+ qtdemux->spherical_metadata->is_spherical = FALSE;
+ qtdemux->spherical_metadata->is_stitched = FALSE;
+ qtdemux->spherical_metadata->stitching_software = NULL;
+ qtdemux->spherical_metadata->projection_type = NULL;
+ qtdemux->spherical_metadata->stereo_mode = NULL;
+ qtdemux->spherical_metadata->source_count = 0;
+ qtdemux->spherical_metadata->init_view_heading = 0;
+ qtdemux->spherical_metadata->init_view_pitch = 0;
+ qtdemux->spherical_metadata->init_view_roll = 0;
+ qtdemux->spherical_metadata->timestamp = 0;
+ qtdemux->spherical_metadata->full_pano_width_pixels = 0;
+ qtdemux->spherical_metadata->full_pano_height_pixels = 0;
+ qtdemux->spherical_metadata->cropped_area_image_width = 0;
+ qtdemux->spherical_metadata->cropped_area_image_height = 0;
+ qtdemux->spherical_metadata->cropped_area_left = 0;
+ qtdemux->spherical_metadata->cropped_area_top = 0;
+ qtdemux->spherical_metadata->ambisonic_type = QTDEMUX_AMBISONIC_TYPE_UNKNOWN;
+ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_UNKNOWN;
+ qtdemux->spherical_metadata->ambisonic_order = QTDEMUX_AMBISONIC_ORDER_UNKNOWN;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
GST_OBJECT_FLAG_SET (qtdemux, GST_ELEMENT_FLAG_INDEXABLE);
+
+ gst_qtdemux_reset (qtdemux, TRUE);
}
static void
{
GstQTDemux *qtdemux = GST_QTDEMUX (object);
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ if (qtdemux->spherical_metadata) {
+ if (qtdemux->spherical_metadata->stitching_software)
+ free(qtdemux->spherical_metadata->stitching_software);
+ if (qtdemux->spherical_metadata->projection_type)
+ free(qtdemux->spherical_metadata->projection_type);
+ if (qtdemux->spherical_metadata->stereo_mode)
+ free(qtdemux->spherical_metadata->stereo_mode);
+
+ free(qtdemux->spherical_metadata);
+ qtdemux->spherical_metadata = NULL;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
if (qtdemux->adapter) {
g_object_unref (G_OBJECT (qtdemux->adapter));
qtdemux->adapter = NULL;
g_free (qtdemux->cenc_aux_info_sizes);
qtdemux->cenc_aux_info_sizes = NULL;
+ g_mutex_clear (&qtdemux->expose_lock);
+
+ g_ptr_array_free (qtdemux->active_streams, TRUE);
+ g_ptr_array_free (qtdemux->old_streams, TRUE);
G_OBJECT_CLASS (parent_class)->dispose (object);
}
res = gst_qtdemux_src_convert (qtdemux, pad,
src_fmt, src_value, dest_fmt, &dest_value);
- if (res) {
+ if (res)
gst_query_set_convert (query, src_fmt, src_value, dest_fmt, dest_value);
- res = TRUE;
- }
+
break;
}
case GST_QUERY_FORMATS:
stream->stream_tags);
gst_pad_push_event (stream->pad,
gst_event_new_tag (gst_tag_list_ref (stream->stream_tags)));
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ /* post message qtdemux tag (for early recive application) */
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
+ gst_message_new_tag (GST_OBJECT_CAST (qtdemux),
+ gst_tag_list_copy (stream->stream_tags)));
+#endif
}
if (G_UNLIKELY (stream->send_global_tags)) {
static void
gst_qtdemux_push_event (GstQTDemux * qtdemux, GstEvent * event)
{
- guint n;
gboolean has_valid_stream = FALSE;
GstEventType etype = GST_EVENT_TYPE (event);
+ guint i;
GST_DEBUG_OBJECT (qtdemux, "pushing %s event on all source pads",
GST_EVENT_TYPE_NAME (event));
- for (n = 0; n < qtdemux->n_streams; n++) {
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
GstPad *pad;
- QtDemuxStream *stream = qtdemux->streams[n];
- GST_DEBUG_OBJECT (qtdemux, "pushing on pad %i", n);
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ GST_DEBUG_OBJECT (qtdemux, "pushing on track-id %u", stream->track_id);
if ((pad = stream->pad)) {
has_valid_stream = TRUE;
}
}
- /* push a pending newsegment event, if any from the streaming thread */
- static void
- gst_qtdemux_push_pending_newsegment (GstQTDemux * qtdemux)
- {
- if (qtdemux->pending_newsegment) {
- gst_qtdemux_push_event (qtdemux, qtdemux->pending_newsegment);
- qtdemux->pending_newsegment = NULL;
- }
- }
-
typedef struct
{
guint64 media_time;
static gint
find_func (QtDemuxSample * s1, gint64 * media_time, gpointer user_data)
{
- if ((gint64) s1->timestamp + s1->pts_offset > *media_time)
+ if ((gint64) s1->timestamp > *media_time)
return 1;
- if ((gint64) s1->timestamp + s1->pts_offset == *media_time)
+ if ((gint64) s1->timestamp == *media_time)
return 0;
return -1;
/* find the index of the sample that includes the data for @media_time using a
* binary search. Only to be called in optimized cases of linear search below.
*
- * Returns the index of the sample.
+ * Returns the index of the sample with the corresponding *DTS*.
*/
static guint32
gst_qtdemux_find_index (GstQTDemux * qtdemux, QtDemuxStream * str,
/* use faster search if requested time in already parsed range */
sample = str->samples + str->stbl_index;
- if (str->stbl_index >= 0 &&
- mov_time <= (sample->timestamp + sample->pts_offset))
- return gst_qtdemux_find_index (qtdemux, str, media_time);
+ if (str->stbl_index >= 0 && mov_time <= sample->timestamp) {
+ index = gst_qtdemux_find_index (qtdemux, str, media_time);
+ sample = str->samples + index;
+ } else {
+ while (index < str->n_samples - 1) {
+ if (!qtdemux_parse_samples (qtdemux, str, index + 1))
+ goto parse_failed;
- while (index < str->n_samples - 1) {
- if (!qtdemux_parse_samples (qtdemux, str, index + 1))
- goto parse_failed;
+ sample = str->samples + index + 1;
+ if (mov_time < sample->timestamp) {
+ sample = str->samples + index;
+ break;
+ }
- sample = str->samples + index + 1;
- if (mov_time < (sample->timestamp + sample->pts_offset))
- break;
+ index++;
+ }
+ }
- index++;
+ /* sample->timestamp is now <= media_time, need to find the corresponding
+ * PTS now by looking backwards */
+ while (index > 0 && sample->timestamp + sample->pts_offset > mov_time) {
+ index--;
+ sample = str->samples + index;
}
+
return index;
/* ERRORS */
{
guint64 min_offset;
gint64 min_byte_offset = -1;
- gint n;
+ guint i;
min_offset = desired_time;
/* for each stream, find the index of the sample in the segment
* and move back to the previous keyframe. */
- for (n = 0; n < qtdemux->n_streams; n++) {
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
QtDemuxStream *str;
guint32 index, kindex;
guint32 seg_idx;
QtDemuxSegment *seg;
gboolean empty_segment = FALSE;
- str = qtdemux->streams[n];
+ str = QTDEMUX_NTH_STREAM (qtdemux, i);
if (CUR_STREAM (str)->sparse && !use_sparse)
continue;
}
GST_OBJECT_UNLOCK (qtdemux);
+ qtdemux->segment_seqnum = seqnum;
/* BYTE seek event */
event = gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, cur_type, byte_cur,
stop_type, stop);
guint32 seqnum, GstSeekFlags flags)
{
gint64 desired_offset;
- gint n;
+ guint i;
desired_offset = segment->position;
/* and set all streams to the final position */
gst_flow_combiner_reset (qtdemux->flowcombiner);
qtdemux->segment_seqnum = seqnum;
- for (n = 0; n < qtdemux->n_streams; n++) {
- QtDemuxStream *stream = qtdemux->streams[n];
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
stream->time_position = desired_offset;
stream->accumulated_base = 0;
gboolean flush;
gboolean update;
GstSegment seeksegment;
- guint32 seqnum = 0;
+ guint32 seqnum = GST_SEQNUM_INVALID;
GstEvent *flush_event;
gboolean ret;
/* stop streaming, either by flushing or by pausing the task */
if (flush) {
flush_event = gst_event_new_flush_start ();
- if (seqnum)
+ if (seqnum != GST_SEQNUM_INVALID)
gst_event_set_seqnum (flush_event, seqnum);
/* unlock upstream pull_range */
gst_pad_push_event (qtdemux->sinkpad, gst_event_ref (flush_event));
/* prepare for streaming again */
if (flush) {
flush_event = gst_event_new_flush_stop (TRUE);
- if (seqnum)
+ if (seqnum != GST_SEQNUM_INVALID)
gst_event_set_seqnum (flush_event, seqnum);
gst_pad_push_event (qtdemux->sinkpad, gst_event_ref (flush_event));
if (qtdemux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
GstMessage *msg = gst_message_new_segment_start (GST_OBJECT_CAST (qtdemux),
qtdemux->segment.format, qtdemux->segment.position);
- if (seqnum)
+ if (seqnum != GST_SEQNUM_INVALID)
gst_message_set_seqnum (msg, seqnum);
gst_element_post_message (GST_ELEMENT_CAST (qtdemux), msg);
}
GST_DEBUG_OBJECT (qtdemux, "collecting all metadata for all streams");
/* Build complete index */
- for (i = 0; i < qtdemux->n_streams; i++) {
- QtDemuxStream *stream = qtdemux->streams[i];
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
- if (!qtdemux_parse_samples (qtdemux, stream, stream->n_samples - 1))
- goto parse_error;
+ if (!qtdemux_parse_samples (qtdemux, stream, stream->n_samples - 1)) {
+ GST_LOG_OBJECT (qtdemux,
+ "Building complete index of track-id %u for seeking failed!",
+ stream->track_id);
+ return FALSE;
+ }
}
- return TRUE;
- /* ERRORS */
- parse_error:
- {
- GST_LOG_OBJECT (qtdemux,
- "Building complete index of stream %u for seeking failed!", i);
- return FALSE;
- }
+ return TRUE;
}
static gboolean
#endif
guint32 seqnum = gst_event_get_seqnum (event);
+ qtdemux->received_seek = TRUE;
+
if (seqnum == qtdemux->segment_seqnum) {
GST_LOG_OBJECT (pad,
"Drop duplicated SEEK event seqnum %" G_GUINT32_FORMAT, seqnum);
} else if (gst_pad_push_event (qtdemux->sinkpad, gst_event_ref (event))) {
GST_DEBUG_OBJECT (qtdemux, "Upstream successfully seeked");
res = TRUE;
- } else if (qtdemux->state == QTDEMUX_STATE_MOVIE && qtdemux->n_streams
+ } else if (qtdemux->state == QTDEMUX_STATE_MOVIE
+ && QTDEMUX_N_STREAMS (qtdemux)
&& !qtdemux->fragmented) {
res = gst_qtdemux_do_push_seek (qtdemux, pad, event);
} else {
}
}
- /* stream/index return sample that is min/max w.r.t. byte position,
- * time is min/max w.r.t. time of samples,
- * the latter need not be time of the former sample */
+ /* Find, for each track, the first sample in coding order that has a file offset >= @byte_pos.
+ *
+ * If @fw is false, the coding order is explored backwards.
+ *
+ * If @set is true, each stream will be moved to its matched sample, or EOS if no matching
+ * sample is found for that track.
+ *
+ * The stream and sample index of the sample with the minimum offset in the direction explored
+ * (see @fw) is returned in the output parameters @_stream and @_index respectively.
+ *
+ * @_time is set to the QTSAMPLE_PTS of the matched sample with the minimum QTSAMPLE_PTS in the
+ * direction explored, which may not always match the QTSAMPLE_PTS of the sample returned in
+ * @_stream and @_index. */
static void
gst_qtdemux_find_sample (GstQTDemux * qtdemux, gint64 byte_pos, gboolean fw,
gboolean set, QtDemuxStream ** _stream, gint * _index, gint64 * _time)
{
- gint i, n, index;
+ gint i, index;
gint64 time, min_time;
QtDemuxStream *stream;
+ gint iter;
min_time = -1;
stream = NULL;
index = -1;
- for (n = 0; n < qtdemux->n_streams; ++n) {
+ for (iter = 0; iter < QTDEMUX_N_STREAMS (qtdemux); iter++) {
QtDemuxStream *str;
gint inc;
gboolean set_sample;
- str = qtdemux->streams[n];
+ str = QTDEMUX_NTH_STREAM (qtdemux, iter);
set_sample = !set;
if (fw) {
*_index = index;
}
+ /* Copied from mpegtsbase code */
+ /* FIXME: replace this function when we add new util function for stream-id creation */
+ static gchar *
+ _get_upstream_id (GstQTDemux * demux)
+ {
+ gchar *upstream_id = gst_pad_get_stream_id (demux->sinkpad);
+
+ if (!upstream_id) {
+ /* Try to create one from the upstream URI, else use a randome number */
+ GstQuery *query;
+ gchar *uri = NULL;
+
+ /* Try to generate one from the URI query and
+ * if it fails take a random number instead */
+ query = gst_query_new_uri ();
+ if (gst_element_query (GST_ELEMENT_CAST (demux), query)) {
+ gst_query_parse_uri (query, &uri);
+ }
+
+ if (uri) {
+ GChecksum *cs;
+
+ /* And then generate an SHA256 sum of the URI */
+ cs = g_checksum_new (G_CHECKSUM_SHA256);
+ g_checksum_update (cs, (const guchar *) uri, strlen (uri));
+ g_free (uri);
+ upstream_id = g_strdup (g_checksum_get_string (cs));
+ g_checksum_free (cs);
+ } else {
+ /* Just get some random number if the URI query fails */
+ GST_FIXME_OBJECT (demux, "Creating random stream-id, consider "
+ "implementing a deterministic way of creating a stream-id");
+ upstream_id =
+ g_strdup_printf ("%08x%08x%08x%08x", g_random_int (), g_random_int (),
+ g_random_int (), g_random_int ());
+ }
+
+ gst_query_unref (query);
+ }
+ return upstream_id;
+ }
+
static QtDemuxStream *
- _create_stream (void)
+ _create_stream (GstQTDemux * demux, guint32 track_id)
{
QtDemuxStream *stream;
+ gchar *upstream_id;
stream = g_new0 (QtDemuxStream, 1);
+ stream->demux = demux;
+ stream->track_id = track_id;
+ upstream_id = _get_upstream_id (demux);
+ stream->stream_id = g_strdup_printf ("%s/%03u", upstream_id, track_id);
+ g_free (upstream_id);
/* new streams always need a discont */
stream->discont = TRUE;
/* we enable clipping for raw audio/video streams */
stream->stream_tags = gst_tag_list_new_empty ();
gst_tag_list_set_scope (stream->stream_tags, GST_TAG_SCOPE_STREAM);
g_queue_init (&stream->protection_scheme_event_queue);
+ stream->ref_count = 1;
+ /* consistent default for push based mode */
+ gst_segment_init (&stream->segment, GST_FORMAT_TIME);
return stream;
}
demux->fragmented = TRUE;
demux->mss_mode = TRUE;
- if (demux->n_streams > 1) {
+ if (QTDEMUX_N_STREAMS (demux) > 1) {
/* can't do this, we can only renegotiate for another mss format */
return FALSE;
}
/* TODO update when stream changes during playback */
- if (demux->n_streams == 0) {
- stream = _create_stream ();
- demux->streams[demux->n_streams] = stream;
- demux->n_streams = 1;
+ if (QTDEMUX_N_STREAMS (demux) == 0) {
+ stream = _create_stream (demux, 1);
+ g_ptr_array_add (demux->active_streams, stream);
/* mss has no stsd/stsd entry, use id 0 as default */
stream->stsd_entries_length = 1;
stream->stsd_sample_description_id = stream->cur_stsd_entry_index = 0;
stream->stsd_entries = g_new0 (QtDemuxStreamStsdEntry, 1);
} else {
- stream = demux->streams[0];
+ stream = QTDEMUX_NTH_STREAM (demux, 0);
}
timescale_v = gst_structure_get_value (structure, "timescale");
static void
gst_qtdemux_reset (GstQTDemux * qtdemux, gboolean hard)
{
- gint n;
+ gint i;
GST_DEBUG_OBJECT (qtdemux, "Resetting demux");
gst_pad_stop_task (qtdemux->sinkpad);
qtdemux->element_index = NULL;
#endif
qtdemux->major_brand = 0;
- if (qtdemux->pending_newsegment)
- gst_event_unref (qtdemux->pending_newsegment);
- qtdemux->pending_newsegment = NULL;
qtdemux->upstream_format_is_time = FALSE;
qtdemux->upstream_seekable = FALSE;
qtdemux->upstream_size = 0;
g_queue_foreach (&qtdemux->protection_event_queue, (GFunc) gst_event_unref,
NULL);
g_queue_clear (&qtdemux->protection_event_queue);
+
+ qtdemux->received_seek = FALSE;
+ qtdemux->first_moof_already_parsed = FALSE;
}
qtdemux->offset = 0;
gst_adapter_clear (qtdemux->adapter);
gst_segment_init (&qtdemux->segment, GST_FORMAT_TIME);
- qtdemux->segment_seqnum = 0;
+ qtdemux->need_segment = TRUE;
if (hard) {
- for (n = 0; n < qtdemux->n_streams; n++) {
- gst_qtdemux_stream_free (qtdemux, qtdemux->streams[n]);
- qtdemux->streams[n] = NULL;
- }
- qtdemux->n_streams = 0;
+ qtdemux->segment_seqnum = GST_SEQNUM_INVALID;
+ g_ptr_array_set_size (qtdemux->active_streams, 0);
+ g_ptr_array_set_size (qtdemux->old_streams, 0);
qtdemux->n_video_streams = 0;
qtdemux->n_audio_streams = 0;
qtdemux->n_sub_streams = 0;
gst_caps_replace (&qtdemux->media_caps, NULL);
qtdemux->timescale = 0;
qtdemux->got_moov = FALSE;
+ qtdemux->cenc_aux_info_offset = 0;
+ qtdemux->cenc_aux_info_sizes = NULL;
+ qtdemux->cenc_aux_sample_count = 0;
if (qtdemux->protection_system_ids) {
g_ptr_array_free (qtdemux->protection_system_ids, TRUE);
qtdemux->protection_system_ids = NULL;
}
+ qtdemux->streams_aware = GST_OBJECT_PARENT (qtdemux)
+ && GST_OBJECT_FLAG_IS_SET (GST_OBJECT_PARENT (qtdemux),
+ GST_BIN_FLAG_STREAMS_AWARE);
+
+ if (qtdemux->preferred_protection_system_id) {
+ g_free (qtdemux->preferred_protection_system_id);
+ qtdemux->preferred_protection_system_id = NULL;
+ }
} else if (qtdemux->mss_mode) {
gst_flow_combiner_reset (qtdemux->flowcombiner);
- for (n = 0; n < qtdemux->n_streams; n++)
- gst_qtdemux_stream_clear (qtdemux, qtdemux->streams[n]);
+ g_ptr_array_foreach (qtdemux->active_streams,
+ (GFunc) gst_qtdemux_stream_clear, NULL);
} else {
gst_flow_combiner_reset (qtdemux->flowcombiner);
- for (n = 0; n < qtdemux->n_streams; n++) {
- qtdemux->streams[n]->sent_eos = FALSE;
- qtdemux->streams[n]->time_position = 0;
- qtdemux->streams[n]->accumulated_base = 0;
- }
- if (!qtdemux->pending_newsegment) {
- qtdemux->pending_newsegment = gst_event_new_segment (&qtdemux->segment);
- if (qtdemux->segment_seqnum)
- gst_event_set_seqnum (qtdemux->pending_newsegment,
- qtdemux->segment_seqnum);
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ stream->sent_eos = FALSE;
+ stream->time_position = 0;
+ stream->accumulated_base = 0;
}
}
}
static void
gst_qtdemux_map_and_push_segments (GstQTDemux * qtdemux, GstSegment * segment)
{
- gint n, i;
+ gint i, iter;
- for (n = 0; n < qtdemux->n_streams; n++) {
- QtDemuxStream *stream = qtdemux->streams[n];
+ for (iter = 0; iter < QTDEMUX_N_STREAMS (qtdemux); iter++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, iter);
stream->time_position = segment->start;
* supported and are discarded when parsing the edts */
for (i = 0; i < stream->n_segments; i++) {
if (stream->segments[i].stop_time > segment->start) {
+ /* push the empty segment and move to the next one */
gst_qtdemux_activate_segment (qtdemux, stream, i,
stream->time_position);
if (QTSEGMENT_IS_EMPTY (&stream->segments[i])) {
- /* push the empty segment and move to the next one */
gst_qtdemux_send_gap_for_segment (qtdemux, stream, i,
stream->time_position);
+
+ /* accumulate previous segments */
+ if (GST_CLOCK_TIME_IS_VALID (stream->segment.stop))
+ stream->accumulated_base +=
+ (stream->segment.stop -
+ stream->segment.start) / ABS (stream->segment.rate);
continue;
}
}
}
+ static void
+ gst_qtdemux_stream_concat (GstQTDemux * qtdemux, GPtrArray * dest,
+ GPtrArray * src)
+ {
+ guint i;
+ guint len;
+
+ len = src->len;
+
+ if (len == 0)
+ return;
+
+ for (i = 0; i < len; i++) {
+ QtDemuxStream *stream = g_ptr_array_index (src, i);
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ GST_DEBUG_OBJECT (qtdemux, "Move stream %p (stream-id %s) to %p",
+ stream, GST_STR_NULL (stream->stream_id), dest);
+ #endif
+ g_ptr_array_add (dest, gst_qtdemux_stream_ref (stream));
+ }
+
+ g_ptr_array_set_size (src, 0);
+ }
+
static gboolean
gst_qtdemux_handle_sink_event (GstPad * sinkpad, GstObject * parent,
GstEvent * event)
GST_DEBUG_OBJECT (demux, "received newsegment %" GST_SEGMENT_FORMAT,
&segment);
- /* erase any previously set segment */
- gst_event_replace (&demux->pending_newsegment, NULL);
-
if (segment.format == GST_FORMAT_TIME) {
- GST_DEBUG_OBJECT (demux, "new pending_newsegment");
- gst_event_replace (&demux->pending_newsegment, event);
demux->upstream_format_is_time = TRUE;
+ demux->segment_seqnum = gst_event_get_seqnum (event);
} else {
GST_DEBUG_OBJECT (demux, "Not storing upstream newsegment, "
"not in time format");
/* chain will send initial newsegment after pads have been added */
- if (demux->state != QTDEMUX_STATE_MOVIE || !demux->n_streams) {
+ if (demux->state != QTDEMUX_STATE_MOVIE || !QTDEMUX_N_STREAMS (demux)) {
GST_DEBUG_OBJECT (demux, "still starting, eating event");
goto exit;
}
GST_DEBUG_OBJECT (demux, "Pushing newseg %" GST_SEGMENT_FORMAT, &segment);
/* map segment to internal qt segments and push on each stream */
- if (demux->n_streams) {
- if (demux->fragmented) {
- GstEvent *segment_event = gst_event_new_segment (&segment);
-
- gst_event_replace (&demux->pending_newsegment, NULL);
- gst_event_set_seqnum (segment_event, demux->segment_seqnum);
- gst_qtdemux_push_event (demux, segment_event);
- } else {
- gst_event_replace (&demux->pending_newsegment, NULL);
- gst_qtdemux_map_and_push_segments (demux, &segment);
- }
+ if (QTDEMUX_N_STREAMS (demux)) {
+ demux->need_segment = TRUE;
+ gst_qtdemux_check_send_pending_segment (demux);
}
/* clear leftover in current segment, if any */
gst_event_unref (event);
goto drop;
}
- break;
+ QTDEMUX_EXPOSE_LOCK (demux);
+ res = gst_pad_event_default (demux->sinkpad, parent, event);
+ QTDEMUX_EXPOSE_UNLOCK (demux);
+ goto drop;
}
case GST_EVENT_FLUSH_STOP:
{
if (!demux->pullbased) {
gint i;
gboolean has_valid_stream = FALSE;
- for (i = 0; i < demux->n_streams; i++) {
- if (demux->streams[i]->pad != NULL) {
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ if (QTDEMUX_NTH_STREAM (demux, i)->pad != NULL) {
has_valid_stream = TRUE;
break;
}
res = TRUE;
goto drop;
}
+ case GST_EVENT_STREAM_START:
+ {
+ res = TRUE;
+ gst_event_unref (event);
+
+ /* Drain all the buffers */
+ gst_qtdemux_process_adapter (demux, TRUE);
+ gst_qtdemux_reset (demux, FALSE);
+ /* We expect new moov box after new stream-start event */
+ if (demux->exposed) {
+ gst_qtdemux_stream_concat (demux,
+ demux->old_streams, demux->active_streams);
+ }
+
+ goto drop;
+ }
default:
break;
}
return res;
}
+ static gboolean
+ gst_qtdemux_handle_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ GstQTDemux *demux = GST_QTDEMUX (parent);
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_BITRATE:
+ {
+ GstClockTime duration;
+
+ /* populate demux->upstream_size if not done yet */
+ gst_qtdemux_check_seekability (demux);
+
+ if (demux->upstream_size != -1
+ && gst_qtdemux_get_duration (demux, &duration)) {
+ guint bitrate =
+ gst_util_uint64_scale (8 * demux->upstream_size, GST_SECOND,
+ duration);
+
+ GST_LOG_OBJECT (demux, "bitrate query byte length: %" G_GUINT64_FORMAT
+ " duration %" GST_TIME_FORMAT " resulting a bitrate of %u",
+ demux->upstream_size, GST_TIME_ARGS (duration), bitrate);
+
+ /* TODO: better results based on ranges/index tables */
+ gst_query_set_bitrate (query, bitrate);
+ res = TRUE;
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, (GstObject *) demux, query);
+ break;
+ }
+
+ return res;
+ }
+
+
#if 0
static void
gst_qtdemux_set_index (GstElement * element, GstIndex * index)
}
static void
- gst_qtdemux_stream_flush_segments_data (GstQTDemux * qtdemux,
- QtDemuxStream * stream)
+ gst_qtdemux_stream_flush_segments_data (QtDemuxStream * stream)
{
g_free (stream->segments);
stream->segments = NULL;
}
static void
- gst_qtdemux_stream_flush_samples_data (GstQTDemux * qtdemux,
- QtDemuxStream * stream)
+ gst_qtdemux_stream_flush_samples_data (QtDemuxStream * stream)
{
g_free (stream->samples);
stream->samples = NULL;
}
static void
- gst_qtdemux_stream_clear (GstQTDemux * qtdemux, QtDemuxStream * stream)
+ gst_qtdemux_stream_clear (QtDemuxStream * stream)
{
gint i;
if (stream->allocator)
entry->sparse = FALSE;
}
- gst_tag_list_unref (stream->stream_tags);
+ if (stream->stream_tags)
+ gst_tag_list_unref (stream->stream_tags);
+
stream->stream_tags = gst_tag_list_new_empty ();
gst_tag_list_set_scope (stream->stream_tags, GST_TAG_SCOPE_STREAM);
g_free (stream->redirect_uri);
g_queue_foreach (&stream->protection_scheme_event_queue,
(GFunc) gst_event_unref, NULL);
g_queue_clear (&stream->protection_scheme_event_queue);
- gst_qtdemux_stream_flush_segments_data (qtdemux, stream);
- gst_qtdemux_stream_flush_samples_data (qtdemux, stream);
+ gst_qtdemux_stream_flush_segments_data (stream);
+ gst_qtdemux_stream_flush_samples_data (stream);
}
static void
- gst_qtdemux_stream_free (GstQTDemux * qtdemux, QtDemuxStream * stream)
+ gst_qtdemux_stream_reset (QtDemuxStream * stream)
{
gint i;
- gst_qtdemux_stream_clear (qtdemux, stream);
+ gst_qtdemux_stream_clear (stream);
for (i = 0; i < stream->stsd_entries_length; i++) {
QtDemuxStreamStsdEntry *entry = &stream->stsd_entries[i];
if (entry->caps) {
entry->caps = NULL;
}
}
- gst_tag_list_unref (stream->stream_tags);
- if (stream->pad) {
- gst_element_remove_pad (GST_ELEMENT_CAST (qtdemux), stream->pad);
- gst_flow_combiner_remove_pad (qtdemux->flowcombiner, stream->pad);
- }
g_free (stream->stsd_entries);
- g_free (stream);
+ stream->stsd_entries = NULL;
+ stream->stsd_entries_length = 0;
}
- static void
- gst_qtdemux_remove_stream (GstQTDemux * qtdemux, int i)
+ static QtDemuxStream *
+ gst_qtdemux_stream_ref (QtDemuxStream * stream)
{
- g_assert (i >= 0 && i < qtdemux->n_streams && qtdemux->streams[i] != NULL);
+ g_atomic_int_add (&stream->ref_count, 1);
+
+ return stream;
+ }
- gst_qtdemux_stream_free (qtdemux, qtdemux->streams[i]);
- qtdemux->streams[i] = qtdemux->streams[qtdemux->n_streams - 1];
- qtdemux->streams[qtdemux->n_streams - 1] = NULL;
- qtdemux->n_streams--;
+ static void
+ gst_qtdemux_stream_unref (QtDemuxStream * stream)
+ {
+ if (g_atomic_int_dec_and_test (&stream->ref_count)) {
+ gst_qtdemux_stream_reset (stream);
+ gst_tag_list_unref (stream->stream_tags);
+ if (stream->pad) {
+ GstQTDemux *demux = stream->demux;
+ gst_element_remove_pad (GST_ELEMENT_CAST (demux), stream->pad);
+ gst_flow_combiner_remove_pad (demux->flowcombiner, stream->pad);
+ }
+ g_free (stream->stream_id);
+ g_free (stream);
+ }
}
static GstStateChangeReturn
GstStateChangeReturn result = GST_STATE_CHANGE_FAILURE;
switch (transition) {
- case GST_STATE_CHANGE_PAUSED_TO_READY:
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_qtdemux_reset (qtdemux, TRUE);
break;
default:
break;
}
static void
+ gst_qtdemux_set_context (GstElement * element, GstContext * context)
+ {
+ GstQTDemux *qtdemux = GST_QTDEMUX (element);
+
+ g_return_if_fail (GST_IS_CONTEXT (context));
+
+ if (gst_context_has_context_type (context,
+ "drm-preferred-decryption-system-id")) {
+ const GstStructure *s;
+
+ s = gst_context_get_structure (context);
+ g_free (qtdemux->preferred_protection_system_id);
+ qtdemux->preferred_protection_system_id =
+ g_strdup (gst_structure_get_string (s, "decryption-system-id"));
+ GST_DEBUG_OBJECT (element, "set preferred decryption system to %s",
+ qtdemux->preferred_protection_system_id);
+ }
+
+ GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
+ }
+
+ static void
qtdemux_parse_ftyp (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
{
/* counts as header data */
}
}
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+static void
+_get_int_value_from_xml_string (GstQTDemux * qtdemux,
+ const char *xml_str, const char *param_name, int *value)
+{
+ char *value_start, *value_end, *endptr;
+ const short value_length_max = 12;
+ char init_view_ret[12];
+ int value_length = 0;
+ int i = 0;
+
+ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
+
+ if (!value_start) {
+ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
+ param_name);
+ return;
+ }
+
+ value_start += strlen (param_name);
+ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
+ value_start++;
+
+ value_end = strchr (value_start, '<');
+ if (!value_end) {
+ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
+ return;
+ }
+
+ value_length = value_end - value_start;
+ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
+ || (value_start[value_length - 1] == '\t')))
+ value_length--;
+
+ if (value_start[i] == '+' || value_start[i] == '-')
+ i++;
+ while (i < value_length) {
+ if (value_start[i] < '0' || value_start[i] > '9') {
+ GST_ERROR_OBJECT (qtdemux,
+ "error: incorrect value, integer was expected\n");
+ return;
+ }
+ i++;
+ }
+
+ if (value_length >= value_length_max || value_length < 1) {
+ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
+ return;
+ }
+
+ strncpy (init_view_ret, value_start, value_length_max);
+ init_view_ret[value_length] = '\0';
+
+ *value = strtol (init_view_ret, &endptr, 10);
+ if (endptr == init_view_ret) {
+ GST_ERROR_OBJECT (qtdemux, "error: no digits were found\n");
+ return;
+ }
+
+ return;
+}
+
+static void
+_get_string_value_from_xml_string (GstQTDemux * qtdemux,
+ const char *xml_str, const char *param_name, char **value)
+{
+ char *value_start, *value_end;
+ const short value_length_max = 256;
+ int value_length = 0;
+
+ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
+
+ if (!value_start) {
+ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
+ param_name);
+ return;
+ }
+
+ value_start += strlen (param_name);
+ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
+ value_start++;
+
+ value_end = strchr (value_start, '<');
+ if (!value_end) {
+ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
+ return;
+ }
+
+ value_length = value_end - value_start;
+ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
+ || (value_start[value_length - 1] == '\t')))
+ value_length--;
+
+ if (value_length >= value_length_max || value_length < 1) {
+ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
+ return;
+ }
+
+ *value = strndup(value_start, value_length);
+
+ return;
+}
+
+static void
+_get_bool_value_from_xml_string (GstQTDemux * qtdemux,
+ const char *xml_str, const char *param_name, gboolean * value)
+{
+ char *value_start, *value_end;
+ int value_length = 0;
+
+ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
+
+ if (!value_start) {
+ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
+ param_name);
+ return;
+ }
+
+ value_start += strlen (param_name);
+ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
+ value_start++;
+
+ value_end = strchr (value_start, '<');
+ if (!value_end) {
+ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
+ return;
+ }
+
+ value_length = value_end - value_start;
+ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
+ || (value_start[value_length - 1] == '\t')))
+ value_length--;
+
+ if (value_length < 1) {
+ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
+ return;
+ }
+
+ *value = g_strstr_len(value_start, value_length, "true") ? TRUE : FALSE;
+
+ return;
+}
+
+static void
+_parse_spatial_video_metadata_from_xml_string (GstQTDemux * qtdemux, const char *xmlStr)
+{
+ const char is_spherical_str[] = "<GSpherical:Spherical>";
+ const char is_stitched_str[] = "<GSpherical:Stitched>";
+ const char stitching_software_str[] = "<GSpherical:StitchingSoftware>";
+ const char projection_type_str[] = "<GSpherical:ProjectionType>";
+ const char stereo_mode_str[] = "<GSpherical:StereoMode>";
+ const char source_count_str[] = "<GSpherical:SourceCount>";
+ const char init_view_heading_str[] = "<GSpherical:InitialViewHeadingDegrees>";
+ const char init_view_pitch_str[] = "<GSpherical:InitialViewPitchDegrees>";
+ const char init_view_roll_str[] = "<GSpherical:InitialViewRollDegrees>";
+ const char timestamp_str[] = "<GSpherical:Timestamp>";
+ const char full_pano_width_str[] = "<GSpherical:FullPanoWidthPixels>";
+ const char full_pano_height_str[] = "<GSpherical:FullPanoHeightPixels>";
+ const char cropped_area_image_width_str[] =
+ "<GSpherical:CroppedAreaImageWidthPixels>";
+ const char cropped_area_image_height_str[] =
+ "<GSpherical:CroppedAreaImageHeightPixels>";
+ const char cropped_area_left_str[] = "<GSpherical:CroppedAreaLeftPixels>";
+ const char cropped_area_top_str[] = "<GSpherical:CroppedAreaTopPixels>";
+
+ QtDemuxSphericalMetadata * spherical_metadata = qtdemux->spherical_metadata;
+
+ _get_bool_value_from_xml_string (qtdemux, xmlStr, is_spherical_str,
+ (gboolean *) & spherical_metadata->is_spherical);
+ _get_bool_value_from_xml_string (qtdemux, xmlStr, is_stitched_str,
+ (gboolean *) & spherical_metadata->is_stitched);
+
+ if (spherical_metadata->is_spherical && spherical_metadata->is_stitched) {
+ _get_string_value_from_xml_string (qtdemux, xmlStr,
+ stitching_software_str, &spherical_metadata->stitching_software);
+ _get_string_value_from_xml_string (qtdemux, xmlStr,
+ projection_type_str, &spherical_metadata->projection_type);
+ _get_string_value_from_xml_string (qtdemux, xmlStr, stereo_mode_str,
+ &spherical_metadata->stereo_mode);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, source_count_str,
+ &spherical_metadata->source_count);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ init_view_heading_str, &spherical_metadata->init_view_heading);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, init_view_pitch_str,
+ &spherical_metadata->init_view_pitch);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, init_view_roll_str,
+ &spherical_metadata->init_view_roll);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, timestamp_str,
+ &spherical_metadata->timestamp);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, full_pano_width_str,
+ &spherical_metadata->full_pano_width_pixels);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ full_pano_height_str, &spherical_metadata->full_pano_height_pixels);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ cropped_area_image_width_str,
+ &spherical_metadata->cropped_area_image_width);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ cropped_area_image_height_str,
+ &spherical_metadata->cropped_area_image_height);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, cropped_area_left_str,
+ &spherical_metadata->cropped_area_left);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, cropped_area_top_str,
+ &spherical_metadata->cropped_area_top);
+ }
+
+ return;
+}
+
+static void
+gst_tag_register_spherical_tags (void) {
+ gst_tag_register ("is_spherical", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-spherical"),
+ _("Flag indicating if the video is a spherical video"),
+ NULL);
+ gst_tag_register ("is_stitched", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-stitched"),
+ _("Flag indicating if the video is stitched"),
+ NULL);
+ gst_tag_register ("stitching_software", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("tag-stitching-software"),
+ _("Software used to stitch the spherical video"),
+ NULL);
+ gst_tag_register ("projection_type", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("tag-projection-type"),
+ _("Projection type used in the video frames"),
+ NULL);
+ gst_tag_register ("stereo_mode", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("tag-stereo-mode"),
+ _("Description of stereoscopic 3D layout"),
+ NULL);
+ gst_tag_register ("source_count", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-source-count"),
+ _("Number of cameras used to create the spherical video"),
+ NULL);
+ gst_tag_register ("init_view_heading", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-init-view-heading"),
+ _("The heading angle of the initial view in degrees"),
+ NULL);
+ gst_tag_register ("init_view_pitch", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-init-view-pitch"),
+ _("The pitch angle of the initial view in degrees"),
+ NULL);
+ gst_tag_register ("init_view_roll", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-init-view-roll"),
+ _("The roll angle of the initial view in degrees"),
+ NULL);
+ gst_tag_register ("timestamp", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-timestamp"),
+ _("Epoch timestamp of when the first frame in the video was recorded"),
+ NULL);
+ gst_tag_register ("full_pano_width_pixels", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-full-pano-width"),
+ _("Width of the encoded video frame in pixels"),
+ NULL);
+ gst_tag_register ("full_pano_height_pixels", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-full-pano-height"),
+ _("Height of the encoded video frame in pixels"),
+ NULL);
+ gst_tag_register ("cropped_area_image_width", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-image-width"),
+ _("Width of the video frame to display (e.g. cropping)"),
+ NULL);
+ gst_tag_register ("cropped_area_image_height", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-image-height"),
+ _("Height of the video frame to display (e.g. cropping)"),
+ NULL);
+ gst_tag_register ("cropped_area_left", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-left"),
+ _("Column where the left edge of the image was cropped from the"
+ " full sized panorama"),
+ NULL);
+ gst_tag_register ("cropped_area_top", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-top"),
+ _("Row where the top edge of the image was cropped from the"
+ " full sized panorama"),
+ NULL);
+ gst_tag_register ("ambisonic_type", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-ambisonic-type"),
+ _("Specifies the type of ambisonic audio represented"),
+ NULL);
+ gst_tag_register ("ambisonic_format", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-ambisonic-format"),
+ _("Specifies the ambisonic audio format"),
+ NULL);
+ gst_tag_register ("ambisonic_order", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-ambisonic-order"),
+ _("Specifies the ambisonic audio channel order"),
+ NULL);
+
+ return;
+}
+
+static void
+_send_spherical_metadata_msg_to_bus (GstQTDemux * qtdemux)
+{
+ GstTagList *taglist;
+ QtDemuxSphericalMetadata *spherical_metadata = qtdemux->spherical_metadata;
+
+ GST_DEBUG_OBJECT (qtdemux, "is_spherical = %d",
+ spherical_metadata->is_spherical);
+ GST_DEBUG_OBJECT (qtdemux, "is_stitched = %d",
+ spherical_metadata->is_stitched);
+ GST_DEBUG_OBJECT (qtdemux, "stitching_software = %s",
+ spherical_metadata->stitching_software);
+ GST_DEBUG_OBJECT (qtdemux, "projection_type = %s",
+ spherical_metadata->projection_type);
+ GST_DEBUG_OBJECT (qtdemux, "stereo_mode = %s",
+ spherical_metadata->stereo_mode);
+ GST_DEBUG_OBJECT (qtdemux, "source_count %d",
+ spherical_metadata->source_count);
+ GST_DEBUG_OBJECT (qtdemux, "init_view_heading = %d",
+ spherical_metadata->init_view_heading);
+ GST_DEBUG_OBJECT (qtdemux, "init_view_pitch = %d",
+ spherical_metadata->init_view_pitch);
+ GST_DEBUG_OBJECT (qtdemux, "init_view_roll = %d",
+ spherical_metadata->init_view_roll);
+ GST_DEBUG_OBJECT (qtdemux, "timestamp = %d", spherical_metadata->timestamp);
+ GST_DEBUG_OBJECT (qtdemux, "full_pano_width_pixels = %d",
+ spherical_metadata->full_pano_width_pixels);
+ GST_DEBUG_OBJECT (qtdemux, "full_pano_height_pixels = %d",
+ spherical_metadata->full_pano_height_pixels);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_image_width = %d",
+ spherical_metadata->cropped_area_image_width);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_image_height = %d",
+ spherical_metadata->cropped_area_image_height);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_left = %d",
+ spherical_metadata->cropped_area_left);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_top = %d",
+ spherical_metadata->cropped_area_top);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_type = %d",
+ spherical_metadata->ambisonic_type);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_order = %d",
+ spherical_metadata->ambisonic_order);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_format = %d",
+ spherical_metadata->ambisonic_format);
+
+ taglist = gst_tag_list_new_empty ();
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "is_spherical", spherical_metadata->is_spherical,
+ "is_stitched", spherical_metadata->is_stitched,
+ "source_count", spherical_metadata->source_count,
+ "init_view_heading", spherical_metadata->init_view_heading,
+ "init_view_pitch", spherical_metadata->init_view_pitch,
+ "init_view_roll", spherical_metadata->init_view_roll,
+ "timestamp", spherical_metadata->timestamp,
+ "full_pano_width_pixels", spherical_metadata->full_pano_width_pixels,
+ "full_pano_height_pixels", spherical_metadata->full_pano_height_pixels,
+ "cropped_area_image_width", spherical_metadata->cropped_area_image_width,
+ "cropped_area_image_height", spherical_metadata->cropped_area_image_height,
+ "cropped_area_left", spherical_metadata->cropped_area_left,
+ "cropped_area_top", spherical_metadata->cropped_area_top,
+ "ambisonic_type", spherical_metadata->ambisonic_type,
+ "ambisonic_format", spherical_metadata->ambisonic_format,
+ "ambisonic_order", spherical_metadata->ambisonic_order,
+ NULL);
+
+ if (spherical_metadata->stitching_software)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "stitching_software", spherical_metadata->stitching_software,
+ NULL);
+ if (spherical_metadata->projection_type)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "projection_type", spherical_metadata->projection_type,
+ NULL);
+ if (spherical_metadata->stereo_mode)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "stereo_mode", spherical_metadata->stereo_mode,
+ NULL);
+
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
+ gst_message_new_tag (GST_OBJECT_CAST (qtdemux),
+ gst_tag_list_copy (taglist)));
+
+ gst_tag_list_unref(taglist);
+
+ return;
+}
+
+static void
+qtdemux_parse_SA3D (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
+{
+ guint offset = 0;
+
+ guint8 version = 0;
+ guint8 ambisonic_type = 0;
+ guint32 ambisonic_order = 0;
+ guint8 ambisonic_channel_ordering = 0;
+ guint8 ambisonic_normalization = 0;
+ guint32 num_channels = 0;
+ guint32 channel_map[49] = { 0 }; /* Up to 6th order */
+
+ int i;
+
+ GST_DEBUG_OBJECT (qtdemux, "qtdemux_parse_SA3D");
+
+ qtdemux->header_size += length;
+ offset = (QT_UINT32 (buffer) == 0) ? 16 : 8;
+
+ if (length <= offset + 16) {
+ GST_DEBUG_OBJECT (qtdemux, "SA3D atom is too short, skipping");
+ return;
+ }
+
+ version = QT_UINT8 (buffer + offset);
+ ambisonic_type = QT_UINT8 (buffer + offset + 1);
+ ambisonic_order = QT_UINT32 (buffer + offset + 2);
+ ambisonic_channel_ordering = QT_UINT8 (buffer + offset + 6);
+ ambisonic_normalization = QT_UINT8 (buffer + offset + 7);
+ num_channels = QT_UINT32 (buffer + offset + 8);
+ for (i = 0; i < num_channels; ++i)
+ channel_map[i] = QT_UINT32 (buffer + offset + 12 + i * 4);
+
+ GST_DEBUG_OBJECT (qtdemux, "version: %d", version);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_type: %d", ambisonic_type);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_order: %d", ambisonic_order);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_channel_ordering: %d",
+ ambisonic_channel_ordering);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_normalization: %d",
+ ambisonic_normalization);
+ GST_DEBUG_OBJECT (qtdemux, "num_channels: %d", num_channels);
+ for (i = 0; i < num_channels; ++i)
+ GST_DEBUG_OBJECT (qtdemux, "channel_map: %d", channel_map[i]);
+
+ if (version == RFC_AMBISONIC_SA3DBOX_VERSION_SUPPORTED) {
+ if (ambisonic_type == RFC_AMBISONIC_TYPE_PERIPHONIC)
+ qtdemux->spherical_metadata->ambisonic_type = QTDEMUX_AMBISONIC_TYPE_PERIPHONIC;
+
+ if (ambisonic_order == RFC_AMBISONIC_ORDER_FOA) {
+ if (num_channels == 4) {
+ qtdemux->spherical_metadata->ambisonic_order = QTDEMUX_AMBISONIC_ORDER_FOA;
+
+ if ((ambisonic_channel_ordering == RFC_AMBISONIC_CHANNEL_ORDERING_ACN)
+ && (ambisonic_normalization == RFC_AMBISONIC_NORMALIZATION_SN3D)
+ && (channel_map[0] == 0) && (channel_map[1] == 1)
+ && (channel_map[2] == 2) && (channel_map[3] == 3))
+ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_AMBIX;
+
+ if ((ambisonic_channel_ordering == RFC_AMBISONIC_CHANNEL_ORDERING_FUMA)
+ && (ambisonic_normalization == RFC_AMBISONIC_NORMALIZATION_FUMA)
+ && (channel_map[0] == 0) && (channel_map[1] == 3)
+ && (channel_map[2] == 1) && (channel_map[3] == 2))
+ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_AMB;
+ }
+ }
+ }
+
+ return;
+}
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
static void
+ qtdemux_update_default_sample_encryption_settings (GstQTDemux * qtdemux,
+ QtDemuxCencSampleSetInfo * info, guint32 is_encrypted, guint8 iv_size,
+ const guint8 * kid)
+ {
+ GstBuffer *kid_buf = gst_buffer_new_allocate (NULL, 16, NULL);
+ gst_buffer_fill (kid_buf, 0, kid, 16);
+ if (info->default_properties)
+ gst_structure_free (info->default_properties);
+ info->default_properties =
+ gst_structure_new ("application/x-cenc",
+ "iv_size", G_TYPE_UINT, iv_size,
+ "encrypted", G_TYPE_BOOLEAN, (is_encrypted == 1),
+ "kid", GST_TYPE_BUFFER, kid_buf, NULL);
+ GST_DEBUG_OBJECT (qtdemux, "default sample properties: "
+ "is_encrypted=%u, iv_size=%u", is_encrypted, iv_size);
+ gst_buffer_unref (kid_buf);
+ }
+
+ static gboolean
+ qtdemux_update_default_piff_encryption_settings (GstQTDemux * qtdemux,
+ QtDemuxCencSampleSetInfo * info, GstByteReader * br)
+ {
+ guint32 algorithm_id = 0;
+ const guint8 *kid;
+ gboolean is_encrypted = TRUE;
+ guint8 iv_size = 8;
+
+ if (!gst_byte_reader_get_uint24_le (br, &algorithm_id)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting box's algorithm ID field");
+ return FALSE;
+ }
+
+ algorithm_id >>= 8;
+ if (algorithm_id == 0) {
+ is_encrypted = FALSE;
+ } else if (algorithm_id == 1) {
+ GST_DEBUG_OBJECT (qtdemux, "AES 128-bits CTR encrypted stream");
+ } else if (algorithm_id == 2) {
+ GST_DEBUG_OBJECT (qtdemux, "AES 128-bits CBC encrypted stream");
+ }
+
+ if (!gst_byte_reader_get_uint8 (br, &iv_size))
+ return FALSE;
+
+ if (!gst_byte_reader_get_data (br, 16, &kid))
+ return FALSE;
+
+ qtdemux_update_default_sample_encryption_settings (qtdemux, info,
+ is_encrypted, iv_size, kid);
+ gst_structure_set (info->default_properties, "piff_algorithm_id",
+ G_TYPE_UINT, algorithm_id, NULL);
+ return TRUE;
+ }
+
+
+ static void
qtdemux_parse_piff (GstQTDemux * qtdemux, const guint8 * buffer, gint length,
guint offset)
{
guint8 version;
guint32 flags = 0;
guint i;
- guint8 iv_size = 8;
+ guint iv_size = 8;
QtDemuxStream *stream;
GstStructure *structure;
QtDemuxCencSampleSetInfo *ss_info = NULL;
const gchar *system_id;
gboolean uses_sub_sample_encryption = FALSE;
+ guint32 sample_count;
- if (qtdemux->n_streams == 0)
+ if (QTDEMUX_N_STREAMS (qtdemux) == 0)
return;
- stream = qtdemux->streams[0];
+ stream = QTDEMUX_NTH_STREAM (qtdemux, 0);
structure = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
if (!gst_structure_has_name (structure, "application/x-cenc")) {
stream->protection_scheme_info = g_new0 (QtDemuxCencSampleSetInfo, 1);
ss_info = (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+ if (!ss_info->default_properties) {
+ ss_info->default_properties =
+ gst_structure_new ("application/x-cenc",
+ "iv_size", G_TYPE_UINT, iv_size, "encrypted", G_TYPE_BOOLEAN, TRUE,
+ NULL);
- if (ss_info->default_properties)
- gst_structure_free (ss_info->default_properties);
-
- ss_info->default_properties =
- gst_structure_new ("application/x-cenc",
- "iv_size", G_TYPE_UINT, iv_size, "encrypted", G_TYPE_BOOLEAN, TRUE, NULL);
+ }
if (ss_info->crypto_info) {
GST_LOG_OBJECT (qtdemux, "unreffing existing crypto_info");
}
if ((flags & 0x000001)) {
- guint32 algorithm_id = 0;
- const guint8 *kid;
- GstBuffer *kid_buf;
- gboolean is_encrypted = TRUE;
-
- if (!gst_byte_reader_get_uint24_le (&br, &algorithm_id)) {
- GST_ERROR_OBJECT (qtdemux, "Error getting box's algorithm ID field");
- return;
- }
-
- algorithm_id >>= 8;
- if (algorithm_id == 0) {
- is_encrypted = FALSE;
- } else if (algorithm_id == 1) {
- /* FIXME: maybe store this in properties? */
- GST_DEBUG_OBJECT (qtdemux, "AES 128-bits CTR encrypted stream");
- } else if (algorithm_id == 2) {
- /* FIXME: maybe store this in properties? */
- GST_DEBUG_OBJECT (qtdemux, "AES 128-bits CBC encrypted stream");
- }
-
- if (!gst_byte_reader_get_uint8 (&br, &iv_size))
- return;
-
- if (!gst_byte_reader_get_data (&br, 16, &kid))
+ if (!qtdemux_update_default_piff_encryption_settings (qtdemux, ss_info,
+ &br))
return;
-
- kid_buf = gst_buffer_new_allocate (NULL, 16, NULL);
- gst_buffer_fill (kid_buf, 0, kid, 16);
- if (ss_info->default_properties)
- gst_structure_free (ss_info->default_properties);
- ss_info->default_properties =
- gst_structure_new ("application/x-cenc",
- "iv_size", G_TYPE_UINT, iv_size,
- "encrypted", G_TYPE_BOOLEAN, is_encrypted,
- "kid", GST_TYPE_BUFFER, kid_buf, NULL);
- GST_DEBUG_OBJECT (qtdemux, "default sample properties: "
- "is_encrypted=%u, iv_size=%u", is_encrypted, iv_size);
- gst_buffer_unref (kid_buf);
} else if ((flags & 0x000002)) {
uses_sub_sample_encryption = TRUE;
}
- if (!gst_byte_reader_get_uint32_be (&br, &qtdemux->cenc_aux_sample_count)) {
+ if (!gst_structure_get_uint (ss_info->default_properties, "iv_size",
+ &iv_size)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting encryption IV size field");
+ return;
+ }
+
+ if (!gst_byte_reader_get_uint32_be (&br, &sample_count)) {
GST_ERROR_OBJECT (qtdemux, "Error getting box's sample count field");
return;
}
ss_info->crypto_info =
- g_ptr_array_new_full (qtdemux->cenc_aux_sample_count,
+ g_ptr_array_new_full (sample_count,
(GDestroyNotify) qtdemux_gst_structure_free);
- for (i = 0; i < qtdemux->cenc_aux_sample_count; ++i) {
+ for (i = 0; i < sample_count; ++i) {
GstStructure *properties;
guint8 *data;
GstBuffer *buf;
properties = qtdemux_get_cenc_sample_properties (qtdemux, stream, i);
if (properties == NULL) {
GST_ERROR_OBJECT (qtdemux, "failed to get properties for sample %u", i);
+ qtdemux->cenc_aux_sample_count = i;
return;
}
if (!gst_byte_reader_dup_data (&br, iv_size, &data)) {
GST_ERROR_OBJECT (qtdemux, "IV data not present for sample %u", i);
gst_structure_free (properties);
+ qtdemux->cenc_aux_sample_count = i;
return;
}
buf = gst_buffer_new_wrapped (data, iv_size);
if (uses_sub_sample_encryption) {
guint16 n_subsamples;
+ const GValue *kid_buf_value;
if (!gst_byte_reader_get_uint16_be (&br, &n_subsamples)
|| n_subsamples == 0) {
GST_ERROR_OBJECT (qtdemux,
"failed to get subsample count for sample %u", i);
gst_structure_free (properties);
+ qtdemux->cenc_aux_sample_count = i;
return;
}
GST_LOG_OBJECT (qtdemux, "subsample count: %u", n_subsamples);
GST_ERROR_OBJECT (qtdemux, "failed to get subsample data for sample %u",
i);
gst_structure_free (properties);
+ qtdemux->cenc_aux_sample_count = i;
return;
}
buf = gst_buffer_new_wrapped (data, n_subsamples * 6);
+
+ kid_buf_value =
+ gst_structure_get_value (ss_info->default_properties, "kid");
+
gst_structure_set (properties,
"subsample_count", G_TYPE_UINT, n_subsamples,
"subsamples", GST_TYPE_BUFFER, buf, NULL);
+ gst_structure_set_value (properties, "kid", kid_buf_value);
gst_buffer_unref (buf);
} else {
gst_structure_set (properties, "subsample_count", G_TYPE_UINT, 0, NULL);
g_ptr_array_add (ss_info->crypto_info, properties);
}
+
+ qtdemux->cenc_aux_sample_count = sample_count;
}
static void
0xa2, 0x44, 0x6c, 0x42, 0x7c, 0x64, 0x8d, 0xf4
};
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ static const guint8 spherical_uuid[] = {
+ 0xff, 0xcc, 0x82, 0x63, 0xf8, 0x55, 0x4a, 0x93,
+ 0x88, 0x14, 0x58, 0x7a, 0x02, 0x52, 0x1f, 0xdd
+ };
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
guint offset;
/* counts as header data */
return;
}
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ if (memcmp (buffer + offset, spherical_uuid, 16) == 0) {
+ const char *contents;
+
+ GST_DEBUG_OBJECT (qtdemux, "spherical uuid was found");
+ contents = (char *) (buffer + offset + 16);
+ GST_DEBUG_OBJECT (qtdemux, "contents: %s\n", contents);
+
+ if (qtdemux->spherical_metadata)
+ _parse_spatial_video_metadata_from_xml_string (qtdemux, contents);
+
+ return;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
if (memcmp (buffer + offset, xmp_uuid, 16) == 0) {
GstBuffer *buf;
GstTagList *taglist;
qtdemux->segment.stop = fixeddur;
}
}
- for (i = 0; i < qtdemux->n_streams; i++) {
- QtDemuxStream *stream = qtdemux->streams[i];
- if (stream) {
- movdur = GSTTIME_TO_QTSTREAMTIME (stream, duration);
- if (movdur > stream->duration) {
- GST_DEBUG_OBJECT (qtdemux,
- "Updating stream #%d duration to %" GST_TIME_FORMAT, i,
- GST_TIME_ARGS (duration));
- stream->duration = movdur;
- if (stream->dummy_segment) {
- /* Update all dummy values to new duration */
- stream->segments[0].stop_time = duration;
- stream->segments[0].duration = duration;
- stream->segments[0].media_stop = duration;
-
- /* let downstream know we possibly have a new stop time */
- if (stream->segment_index != -1) {
- GstClockTime pos;
-
- if (qtdemux->segment.rate >= 0) {
- pos = stream->segment.start;
- } else {
- pos = stream->segment.stop;
- }
- gst_qtdemux_stream_update_segment (qtdemux, stream,
- stream->segment_index, pos, NULL, NULL);
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ movdur = GSTTIME_TO_QTSTREAMTIME (stream, duration);
+ if (movdur > stream->duration) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "Updating stream #%d duration to %" GST_TIME_FORMAT, i,
+ GST_TIME_ARGS (duration));
+ stream->duration = movdur;
+ /* internal duration tracking state has been updated above, so */
+ /* preserve an open-ended dummy segment rather than repeatedly updating
+ * it and spamming downstream accordingly with segment events */
+ if (stream->dummy_segment &&
+ GST_CLOCK_TIME_IS_VALID (stream->segments[0].duration)) {
+ /* Update all dummy values to new duration */
+ stream->segments[0].stop_time = duration;
+ stream->segments[0].duration = duration;
+ stream->segments[0].media_stop = duration;
+
+ /* let downstream know we possibly have a new stop time */
+ if (stream->segment_index != -1) {
+ GstClockTime pos;
+
+ if (qtdemux->segment.rate >= 0) {
+ pos = stream->segment.start;
+ } else {
+ pos = stream->segment.stop;
}
+
+ gst_qtdemux_stream_update_segment (qtdemux, stream,
+ stream->segment_index, pos, NULL, NULL);
}
}
}
gboolean ismv = FALSE;
gint64 initial_offset;
- GST_LOG_OBJECT (qtdemux, "parsing trun stream %d; "
+ GST_LOG_OBJECT (qtdemux, "parsing trun track-id %d; "
"default dur %d, size %d, flags 0x%x, base offset %" G_GINT64_FORMAT ", "
"decode ts %" G_GINT64_FORMAT, stream->track_id, d_sample_duration,
d_sample_size, d_sample_flags, *base_offset, decode_ts);
return NULL;
}
- /* try to get it fast and simple */
- if (G_LIKELY (id <= qtdemux->n_streams)) {
- stream = qtdemux->streams[id - 1];
- if (G_LIKELY (stream->track_id == id))
- return stream;
- }
-
- /* linear search otherwise */
- for (i = 0; i < qtdemux->n_streams; i++) {
- stream = qtdemux->streams[i];
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
if (stream->track_id == id)
return stream;
}
if (qtdemux->mss_mode) {
/* mss should have only 1 stream anyway */
- return qtdemux->streams[0];
+ return QTDEMUX_NTH_STREAM (qtdemux, 0);
}
return NULL;
/* Push an event containing the pssh box onto the queues of all streams. */
event = gst_event_new_protection (sysid_string, pssh,
(parent_box_type == FOURCC_moov) ? "isobmff/moov" : "isobmff/moof");
- for (i = 0; i < qtdemux->n_streams; ++i) {
- g_queue_push_tail (&qtdemux->streams[i]->protection_scheme_event_queue,
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ GST_TRACE_OBJECT (qtdemux,
+ "adding protection event for stream %s and system %s",
+ stream->stream_id, sysid_string);
+ g_queue_push_tail (&stream->protection_scheme_event_queue,
gst_event_ref (event));
}
g_free (sysid_string);
guint32 ds_size = 0, ds_duration = 0, ds_flags = 0;
gint64 base_offset, running_offset;
guint32 frag_num;
+ GstClockTime min_dts = GST_CLOCK_TIME_NONE;
/* NOTE @stream ignored */
if (G_UNLIKELY (base_offset < -1))
goto lost_offset;
- if (qtdemux->upstream_format_is_time)
- gst_qtdemux_stream_flush_samples_data (qtdemux, stream);
+ min_dts = MIN (min_dts, QTSTREAMTIME_TO_GSTTIME (stream, decode_time));
+
+ if (!qtdemux->pullbased) {
+ /* Sample tables can grow enough to be problematic if the system memory
+ * is very low (e.g. embedded devices) and the videos very long
+ * (~8 MiB/hour for 25-30 fps video + typical AAC audio frames).
+ * Fortunately, we can easily discard them for each new fragment when
+ * we know qtdemux will not receive seeks outside of the current fragment.
+ * adaptivedemux honors this assumption.
+ * This optimization is also useful for applications that use qtdemux as
+ * a push-based simple demuxer, like Media Source Extensions. */
+ gst_qtdemux_stream_flush_samples_data (stream);
+ }
/* initialise moof sample data */
stream->n_samples_moof = 0;
pssh_node = qtdemux_tree_get_sibling_by_type (pssh_node, FOURCC_pssh);
}
+ if (!qtdemux->upstream_format_is_time && !qtdemux->first_moof_already_parsed
+ && !qtdemux->received_seek && GST_CLOCK_TIME_IS_VALID (min_dts)
+ && min_dts != 0) {
+ /* Unless the user has explictly requested another seek, perform an
+ * internal seek to the time specified in the tfdt.
+ *
+ * This way if the user opens a file where the first tfdt is 1 hour
+ * into the presentation, they will not have to wait 1 hour for run
+ * time to catch up and actual playback to start. */
+ gint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "First fragment has a non-zero tfdt, "
+ "performing an internal seek to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min_dts));
+
+ qtdemux->segment.start = min_dts;
+ qtdemux->segment.time = qtdemux->segment.position = min_dts;
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ stream->time_position = min_dts;
+ }
+
+ /* Before this code was run a segment was already sent when the moov was
+ * parsed... which is OK -- some apps (mostly tests) expect a segment to
+ * be emitted after a moov, and we can emit a second segment anyway for
+ * special cases like this. */
+ qtdemux->need_segment = TRUE;
+ }
+
+ qtdemux->first_moof_already_parsed = TRUE;
+
g_node_destroy (moof_node);
return TRUE;
break;
case FOURCC_mdat:
case FOURCC_free:
+ case FOURCC_skip:
case FOURCC_wide:
case FOURCC_PICT:
case FOURCC_pnot:
beach:
if (ret == GST_FLOW_EOS && (qtdemux->got_moov || qtdemux->media_caps)) {
/* digested all data, show what we have */
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ if (qtdemux->spherical_metadata)
+ _send_spherical_metadata_msg_to_bus (qtdemux);
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
qtdemux_prepare_streams (qtdemux);
+ QTDEMUX_EXPOSE_LOCK (qtdemux);
ret = qtdemux_expose_streams (qtdemux);
+ QTDEMUX_EXPOSE_UNLOCK (qtdemux);
qtdemux->state = QTDEMUX_STATE_MOVIE;
GST_DEBUG_OBJECT (qtdemux, "switching state to STATE_MOVIE (%d)",
static GstFlowReturn
gst_qtdemux_seek_to_previous_keyframe (GstQTDemux * qtdemux)
{
- guint8 n = 0;
guint32 seg_idx = 0, k_index = 0;
guint32 ref_seg_idx, ref_k_index;
GstClockTime k_pos = 0, last_stop = 0;
QtDemuxStream *ref_str = NULL;
guint64 seg_media_start_mov; /* segment media start time in mov format */
guint64 target_ts;
+ gint i;
/* Now we choose an arbitrary stream, get the previous keyframe timestamp
* and finally align all the other streams on that timestamp with their
* respective keyframes */
- for (n = 0; n < qtdemux->n_streams; n++) {
- QtDemuxStream *str = qtdemux->streams[n];
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *str = QTDEMUX_NTH_STREAM (qtdemux, i);
/* No candidate yet, take the first stream */
if (!ref_str) {
seg_media_start_mov = seg->trak_media_start;
GST_LOG_OBJECT (qtdemux, "keyframe index %u ts %" G_GUINT64_FORMAT
- " seg start %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT "\n",
+ " seg start %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT,
k_index, target_ts, seg_media_start_mov,
GST_TIME_ARGS (seg->media_start));
ref_k_index = k_index;
/* Align them all on this */
- for (n = 0; n < qtdemux->n_streams; n++) {
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
guint32 index = 0;
GstClockTime seg_time = 0;
- QtDemuxStream *str = qtdemux->streams[n];
+ QtDemuxStream *str = QTDEMUX_NTH_STREAM (qtdemux, i);
/* aligning reference stream again might lead to backing up to yet another
* keyframe (due to timestamp rounding issues),
seg_idx = ref_seg_idx;
seg = &str->segments[seg_idx];
k_index = ref_k_index;
- GST_DEBUG_OBJECT (qtdemux, "reference stream %d segment %d, "
- "sample at index %d", n, ref_str->segment_index, k_index);
+ GST_DEBUG_OBJECT (qtdemux, "reference track-id %u segment %d, "
+ "sample at index %d", str->track_id, ref_str->segment_index, k_index);
} else {
seg_idx = gst_qtdemux_find_segment (qtdemux, str, k_pos);
GST_DEBUG_OBJECT (qtdemux,
- "stream %d align segment %d for keyframe pos %" GST_TIME_FORMAT, n,
- seg_idx, GST_TIME_ARGS (k_pos));
+ "track-id %u align segment %d for keyframe pos %" GST_TIME_FORMAT,
+ str->track_id, seg_idx, GST_TIME_ARGS (k_pos));
/* get segment and time in the segment */
seg = &str->segments[seg_idx];
/* get the index of the sample with media time */
index = gst_qtdemux_find_index_linear (qtdemux, str, seg_time);
GST_DEBUG_OBJECT (qtdemux,
- "stream %d sample for %" GST_TIME_FORMAT " at %u", n,
+ "track-id %u sample for %" GST_TIME_FORMAT " at %u", str->track_id,
GST_TIME_ARGS (seg_time), index);
/* find previous keyframe */
/* Now seek back in time */
gst_qtdemux_move_stream (qtdemux, str, k_index);
- GST_DEBUG_OBJECT (qtdemux, "stream %d keyframe at %u, time position %"
- GST_TIME_FORMAT " playing from sample %u to %u", n, k_index,
+ GST_DEBUG_OBJECT (qtdemux, "track-id %u keyframe at %u, time position %"
+ GST_TIME_FORMAT " playing from sample %u to %u", str->track_id, k_index,
GST_TIME_ARGS (str->time_position), str->from_sample, str->to_sample);
}
/* now prepare and send the segment */
if (stream->pad) {
event = gst_event_new_segment (&stream->segment);
- if (qtdemux->segment_seqnum) {
+ if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID) {
gst_event_set_seqnum (event, qtdemux->segment_seqnum);
}
gst_pad_push_event (stream->pad, event);
* (FIXME: doesn't seem to work so well with ismv and wmv, as no parser; the
* tfra entries tells us which trun/sample the key unit is in, but we don't
* make use of this additional information at the moment) */
- if (qtdemux->fragmented) {
+ if (qtdemux->fragmented && !qtdemux->fragmented_seek_pending) {
stream->to_sample = G_MAXUINT32;
return TRUE;
+ } else {
+ /* well, it will be taken care of below */
+ qtdemux->fragmented_seek_pending = FALSE;
+ /* FIXME ideally the do_fragmented_seek can be done right here,
+ * rather than at loop level
+ * (which might even allow handling edit lists in a fragmented file) */
}
/* We don't need to look for a sample in push-based */
if (G_UNLIKELY (stream->segment_index != seg_idx))
gst_qtdemux_activate_segment (qtdemux, stream, seg_idx, time_position);
- if (G_UNLIKELY (QTSEGMENT_IS_EMPTY (&stream->segments[stream->
- segment_index]))) {
+ if (G_UNLIKELY (QTSEGMENT_IS_EMPTY (&stream->
+ segments[stream->segment_index]))) {
QtDemuxSegment *seg = &stream->segments[stream->segment_index];
GST_LOG_OBJECT (qtdemux, "Empty segment activated,"
{
gint i;
- if (demux->n_streams <= 1)
+ if (QTDEMUX_N_STREAMS (demux) <= 1)
return;
- for (i = 0; i < demux->n_streams; i++) {
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
QtDemuxStream *stream;
GstClockTime end_time;
- stream = demux->streams[i];
+ stream = QTDEMUX_NTH_STREAM (demux, i);
if (!stream->pad)
continue;
GST_PAD_NAME (stream->pad));
stream->sent_eos = TRUE;
event = gst_event_new_eos ();
- if (demux->segment_seqnum)
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
gst_event_set_seqnum (event, demux->segment_seqnum);
gst_pad_push_event (stream->pad, event);
}
return buffer;
}
- /* the input buffer metadata must be writable,
- * but time/duration etc not yet set and need not be preserved */
- static GstBuffer *
- gst_qtdemux_process_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
- GstBuffer * buf)
+ static guint8 *
+ convert_to_s334_1a (const guint8 * ccpair, guint8 ccpair_size, guint field,
+ gsize * res)
+ {
+ guint8 *storage;
+ gsize i;
+
+ /* We are converting from pairs to triplets */
+ *res = ccpair_size / 2 * 3;
+ storage = g_malloc (*res);
+ for (i = 0; i * 2 < ccpair_size; i += 1) {
+ /* FIXME: Use line offset 0 as we simply can't know here */
+ if (field == 1)
+ storage[i * 3] = 0x80 | 0x00;
+ else
+ storage[i * 3] = 0x00 | 0x00;
+ storage[i * 3 + 1] = ccpair[i * 2];
+ storage[i * 3 + 2] = ccpair[i * 2 + 1];
+ }
+
+ return storage;
+ }
+
+ static guint8 *
+ extract_cc_from_data (QtDemuxStream * stream, const guint8 * data, gsize size,
+ gsize * cclen)
{
- GstMapInfo map;
- guint nsize = 0;
- gchar *str;
+ guint8 *res = NULL;
+ guint32 atom_length, fourcc;
+ QtDemuxStreamStsdEntry *stsd_entry;
- /* not many cases for now */
- if (G_UNLIKELY (CUR_STREAM (stream)->fourcc == FOURCC_mp4s)) {
+ GST_MEMDUMP ("caption atom", data, size);
+
+ /* There might be multiple atoms */
+
+ *cclen = 0;
+ if (size < 8)
+ goto invalid_cdat;
+ atom_length = QT_UINT32 (data);
+ fourcc = QT_FOURCC (data + 4);
+ if (G_UNLIKELY (atom_length > size || atom_length == 8))
+ goto invalid_cdat;
+
+ GST_DEBUG_OBJECT (stream->pad, "here");
+
+ /* Check if we have somethig compatible */
+ stsd_entry = CUR_STREAM (stream);
+ switch (stsd_entry->fourcc) {
+ case FOURCC_c608:{
+ guint8 *cdat = NULL, *cdt2 = NULL;
+ gsize cdat_size = 0, cdt2_size = 0;
+ /* Should be cdat or cdt2 */
+ if (fourcc != FOURCC_cdat && fourcc != FOURCC_cdt2) {
+ GST_WARNING_OBJECT (stream->pad,
+ "Unknown data atom (%" GST_FOURCC_FORMAT ") for CEA608",
+ GST_FOURCC_ARGS (fourcc));
+ goto invalid_cdat;
+ }
+
+ /* Convert to S334-1 Annex A byte triplet */
+ if (fourcc == FOURCC_cdat)
+ cdat = convert_to_s334_1a (data + 8, atom_length - 8, 1, &cdat_size);
+ else
+ cdt2 = convert_to_s334_1a (data + 8, atom_length - 8, 2, &cdt2_size);
+ GST_DEBUG_OBJECT (stream->pad, "size:%" G_GSIZE_FORMAT " atom_length:%u",
+ size, atom_length);
+
+ /* Check for another atom ? */
+ if (size > atom_length + 8) {
+ guint32 new_atom_length = QT_UINT32 (data + atom_length);
+ if (size >= atom_length + new_atom_length) {
+ fourcc = QT_FOURCC (data + atom_length + 4);
+ if (fourcc == FOURCC_cdat) {
+ if (cdat == NULL)
+ cdat =
+ convert_to_s334_1a (data + atom_length + 8,
+ new_atom_length - 8, 1, &cdat_size);
+ else
+ GST_WARNING_OBJECT (stream->pad,
+ "Got multiple [cdat] atoms in a c608 sample. This is unsupported for now. Please file a bug");
+ } else {
+ if (cdt2 == NULL)
+ cdt2 =
+ convert_to_s334_1a (data + atom_length + 8,
+ new_atom_length - 8, 2, &cdt2_size);
+ else
+ GST_WARNING_OBJECT (stream->pad,
+ "Got multiple [cdt2] atoms in a c608 sample. This is unsupported for now. Please file a bug");
+ }
+ }
+ }
+
+ *cclen = cdat_size + cdt2_size;
+ res = g_malloc (*cclen);
+ if (cdat_size)
+ memcpy (res, cdat, cdat_size);
+ if (cdt2_size)
+ memcpy (res + cdat_size, cdt2, cdt2_size);
+ g_free (cdat);
+ g_free (cdt2);
+ }
+ break;
+ case FOURCC_c708:
+ if (fourcc != FOURCC_ccdp) {
+ GST_WARNING_OBJECT (stream->pad,
+ "Unknown data atom (%" GST_FOURCC_FORMAT ") for CEA708",
+ GST_FOURCC_ARGS (fourcc));
+ goto invalid_cdat;
+ }
+ *cclen = atom_length - 8;
+ res = g_memdup (data + 8, *cclen);
+ break;
+ default:
+ /* Keep this here in case other closed caption formats are added */
+ g_assert_not_reached ();
+ break;
+ }
+
+ GST_MEMDUMP ("Output", res, *cclen);
+ return res;
+
+ /* Errors */
+ invalid_cdat:
+ GST_WARNING ("[cdat] atom is too small or invalid");
+ return NULL;
+ }
+
+ /* the input buffer metadata must be writable,
+ * but time/duration etc not yet set and need not be preserved */
+ static GstBuffer *
+ gst_qtdemux_process_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+ {
+ GstMapInfo map;
+ guint nsize = 0;
+ gchar *str;
+
+ /* not many cases for now */
+ if (G_UNLIKELY (CUR_STREAM (stream)->fourcc == FOURCC_mp4s)) {
/* send a one time dvd clut event */
if (stream->pending_event && stream->pad)
gst_pad_push_event (stream->pad, stream->pending_event);
if (G_UNLIKELY (stream->subtype != FOURCC_text
&& stream->subtype != FOURCC_sbtl &&
- stream->subtype != FOURCC_subp)) {
+ stream->subtype != FOURCC_subp && stream->subtype != FOURCC_clcp)) {
return buf;
}
return buf;
}
+ if (stream->subtype == FOURCC_clcp) {
+ guint8 *cc;
+ gsize cclen = 0;
+ /* For closed caption, we need to extract the information from the
+ * [cdat],[cdt2] or [ccdp] atom */
+ cc = extract_cc_from_data (stream, map.data, map.size, &cclen);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ if (cc) {
+ buf = _gst_buffer_new_wrapped (cc, cclen, g_free);
+ } else {
+ /* Conversion failed or there's nothing */
+ buf = NULL;
+ }
+ return buf;
+ }
+
nsize = GST_READ_UINT16_BE (map.data);
nsize = MIN (nsize, map.size - 2);
return buf;
}
+ static GstFlowReturn
+ gst_qtdemux_push_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstClockTime pts, duration;
+
+ if (stream->need_clip)
+ buf = gst_qtdemux_clip_buffer (qtdemux, stream, buf);
+
+ if (G_UNLIKELY (buf == NULL))
+ goto exit;
+
+ if (G_UNLIKELY (stream->discont)) {
+ GST_LOG_OBJECT (qtdemux, "marking discont buffer");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ GST_LOG_OBJECT (qtdemux,
+ "Pushing buffer with dts %" GST_TIME_FORMAT ", pts %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT " on pad %s",
+ GST_TIME_ARGS (GST_BUFFER_DTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_PAD_NAME (stream->pad));
+
+ if (stream->protected && stream->protection_scheme_type == FOURCC_cenc) {
+ GstStructure *crypto_info;
+ QtDemuxCencSampleSetInfo *info =
+ (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+ gint index;
+ GstEvent *event;
+
+ while ((event = g_queue_pop_head (&stream->protection_scheme_event_queue))) {
+ GST_TRACE_OBJECT (stream->pad, "pushing protection event: %"
+ GST_PTR_FORMAT, event);
+ gst_pad_push_event (stream->pad, event);
+ }
+
+ if (info->crypto_info == NULL) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "cenc metadata hasn't been parsed yet, pushing buffer as if it wasn't encrypted");
+ } else {
+ /* The end of the crypto_info array matches our n_samples position,
+ * so count backward from there */
+ index = stream->sample_index - stream->n_samples + info->crypto_info->len;
+ if (G_LIKELY (index >= 0 && index < info->crypto_info->len)) {
+ /* steal structure from array */
+ crypto_info = g_ptr_array_index (info->crypto_info, index);
+ g_ptr_array_index (info->crypto_info, index) = NULL;
+ GST_LOG_OBJECT (qtdemux, "attaching cenc metadata [%u/%u]", index,
+ info->crypto_info->len);
+ if (!crypto_info || !gst_buffer_add_protection_meta (buf, crypto_info))
+ GST_ERROR_OBJECT (qtdemux,
+ "failed to attach cenc metadata to buffer");
+ } else {
+ GST_INFO_OBJECT (qtdemux, "No crypto info with index %d and sample %d",
+ index, stream->sample_index);
+ }
+ }
+ }
+
+ if (stream->alignment > 1)
+ buf = gst_qtdemux_align_buffer (qtdemux, buf, stream->alignment);
+
+ pts = GST_BUFFER_PTS (buf);
+ duration = GST_BUFFER_DURATION (buf);
+
+ ret = gst_pad_push (stream->pad, buf);
+
+ if (GST_CLOCK_TIME_IS_VALID (pts) && GST_CLOCK_TIME_IS_VALID (duration)) {
+ /* mark position in stream, we'll need this to know when to send GAP event */
+ stream->segment.position = pts + duration;
+ }
+
+ exit:
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_qtdemux_split_and_push_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (stream->subtype == FOURCC_clcp
+ && CUR_STREAM (stream)->fourcc == FOURCC_c608 && stream->need_split) {
+ GstMapInfo map;
+ guint n_output_buffers, n_field1 = 0, n_field2 = 0;
+ guint n_triplets, i;
+ guint field1_off = 0, field2_off = 0;
+
+ /* We have to split CEA608 buffers so that each outgoing buffer contains
+ * one byte pair per field according to the framerate of the video track.
+ *
+ * If there is only a single byte pair per field we don't have to do
+ * anything
+ */
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ n_triplets = map.size / 3;
+ for (i = 0; i < n_triplets; i++) {
+ if (map.data[3 * i] & 0x80)
+ n_field1++;
+ else
+ n_field2++;
+ }
+
+ g_assert (n_field1 || n_field2);
+
+ /* If there's more than 1 frame we have to split, otherwise we can just
+ * pass through */
+ if (n_field1 > 1 || n_field2 > 1) {
+ n_output_buffers =
+ gst_util_uint64_scale (GST_BUFFER_DURATION (buf),
+ CUR_STREAM (stream)->fps_n, GST_SECOND * CUR_STREAM (stream)->fps_d);
+
+ for (i = 0; i < n_output_buffers; i++) {
+ GstBuffer *outbuf =
+ gst_buffer_new_and_alloc ((n_field1 ? 3 : 0) + (n_field2 ? 3 : 0));
+ GstMapInfo outmap;
+ guint8 *outptr;
+
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+ outptr = outmap.data;
+
+ if (n_field1) {
+ gboolean found = FALSE;
+
+ while (map.data + field1_off < map.data + map.size) {
+ if (map.data[field1_off] & 0x80) {
+ memcpy (outptr, &map.data[field1_off], 3);
+ field1_off += 3;
+ found = TRUE;
+ break;
+ }
+ field1_off += 3;
+ }
+
+ if (!found) {
+ const guint8 empty[] = { 0x80, 0x80, 0x80 };
+
+ memcpy (outptr, empty, 3);
+ }
+
+ outptr += 3;
+ }
+
+ if (n_field2) {
+ gboolean found = FALSE;
+
+ while (map.data + field2_off < map.data + map.size) {
+ if ((map.data[field2_off] & 0x80) == 0) {
+ memcpy (outptr, &map.data[field2_off], 3);
+ field2_off += 3;
+ found = TRUE;
+ break;
+ }
+ field2_off += 3;
+ }
+
+ if (!found) {
+ const guint8 empty[] = { 0x00, 0x80, 0x80 };
+
+ memcpy (outptr, empty, 3);
+ }
+
+ outptr += 3;
+ }
+
+ gst_buffer_unmap (outbuf, &outmap);
+
+ GST_BUFFER_PTS (outbuf) =
+ GST_BUFFER_PTS (buf) + gst_util_uint64_scale (i,
+ GST_SECOND * CUR_STREAM (stream)->fps_d,
+ CUR_STREAM (stream)->fps_n);
+ GST_BUFFER_DURATION (outbuf) =
+ gst_util_uint64_scale (GST_SECOND, CUR_STREAM (stream)->fps_d,
+ CUR_STREAM (stream)->fps_n);
+ GST_BUFFER_OFFSET (outbuf) = -1;
+ GST_BUFFER_OFFSET_END (outbuf) = -1;
+
+ ret = gst_qtdemux_push_buffer (qtdemux, stream, outbuf);
+
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_NOT_LINKED)
+ break;
+ }
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ } else {
+ gst_buffer_unmap (buf, &map);
+ ret = gst_qtdemux_push_buffer (qtdemux, stream, buf);
+ }
+ } else {
+ ret = gst_qtdemux_push_buffer (qtdemux, stream, buf);
+ }
+
+ return ret;
+ }
+
/* Sets a buffer's attributes properly and pushes it downstream.
* Also checks for additional actions and custom processing that may
* need to be done first.
GST_BUFFER_OFFSET (buf) = -1;
GST_BUFFER_OFFSET_END (buf) = -1;
+ if (!keyframe) {
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ stream->on_keyframe = FALSE;
+ } else {
+ stream->on_keyframe = TRUE;
+ }
+
if (G_UNLIKELY (CUR_STREAM (stream)->rgb8_palette))
gst_buffer_append_memory (buf,
gst_memory_ref (CUR_STREAM (stream)->rgb8_palette));
}
#endif
- if (stream->need_clip)
- buf = gst_qtdemux_clip_buffer (qtdemux, stream, buf);
-
- if (G_UNLIKELY (buf == NULL))
- goto exit;
-
- if (G_UNLIKELY (stream->discont)) {
- GST_LOG_OBJECT (qtdemux, "marking discont buffer");
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
- stream->discont = FALSE;
- } else {
- GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
- }
-
- if (!keyframe) {
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
- stream->on_keyframe = FALSE;
- } else {
- stream->on_keyframe = TRUE;
- }
-
-
- GST_LOG_OBJECT (qtdemux,
- "Pushing buffer with dts %" GST_TIME_FORMAT ", pts %" GST_TIME_FORMAT
- ", duration %" GST_TIME_FORMAT " on pad %s", GST_TIME_ARGS (dts),
- GST_TIME_ARGS (pts), GST_TIME_ARGS (duration),
- GST_PAD_NAME (stream->pad));
-
- if (stream->protected && stream->protection_scheme_type == FOURCC_cenc) {
- GstStructure *crypto_info;
- QtDemuxCencSampleSetInfo *info =
- (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
- gint index;
- GstEvent *event;
-
- while ((event = g_queue_pop_head (&stream->protection_scheme_event_queue))) {
- gst_pad_push_event (stream->pad, event);
- }
-
- if (info->crypto_info == NULL) {
- GST_DEBUG_OBJECT (qtdemux, "cenc metadata hasn't been parsed yet");
- gst_buffer_unref (buf);
- goto exit;
- }
-
- /* The end of the crypto_info array matches our n_samples position,
- * so count backward from there */
- index = stream->sample_index - stream->n_samples + info->crypto_info->len;
- if (G_LIKELY (index >= 0 && index < info->crypto_info->len)) {
- /* steal structure from array */
- crypto_info = g_ptr_array_index (info->crypto_info, index);
- g_ptr_array_index (info->crypto_info, index) = NULL;
- GST_LOG_OBJECT (qtdemux, "attaching cenc metadata [%u/%u]", index,
- info->crypto_info->len);
- if (!crypto_info || !gst_buffer_add_protection_meta (buf, crypto_info))
- GST_ERROR_OBJECT (qtdemux, "failed to attach cenc metadata to buffer");
- } else {
- GST_INFO_OBJECT (qtdemux, "No crypto info with index %d and sample %d",
- index, stream->sample_index);
- }
- }
-
- if (stream->alignment > 1)
- buf = gst_qtdemux_align_buffer (qtdemux, buf, stream->alignment);
-
- ret = gst_pad_push (stream->pad, buf);
-
- if (GST_CLOCK_TIME_IS_VALID (pts) && GST_CLOCK_TIME_IS_VALID (duration)) {
- /* mark position in stream, we'll need this to know when to send GAP event */
- stream->segment.position = pts + duration;
- }
+ ret = gst_qtdemux_split_and_push_buffer (qtdemux, stream, buf);
exit:
return ret;
gst_qtdemux_do_fragmented_seek (GstQTDemux * qtdemux)
{
const QtDemuxRandomAccessEntry *best_entry = NULL;
- guint i;
+ gint i;
GST_OBJECT_LOCK (qtdemux);
- g_assert (qtdemux->n_streams > 0);
+ g_assert (QTDEMUX_N_STREAMS (qtdemux) > 0);
- for (i = 0; i < qtdemux->n_streams; i++) {
+ /* first see if we can determine where to go to using mfra,
+ * before we start clearing things */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
const QtDemuxRandomAccessEntry *entry;
QtDemuxStream *stream;
gboolean is_audio_or_video;
- stream = qtdemux->streams[i];
-
- g_free (stream->samples);
- stream->samples = NULL;
- stream->n_samples = 0;
- stream->stbl_index = -1; /* no samples have yet been parsed */
- stream->sample_index = -1;
-
- if (stream->protection_scheme_info) {
- /* Clear out any old cenc crypto info entries as we'll move to a new moof */
- if (stream->protection_scheme_type == FOURCC_cenc) {
- QtDemuxCencSampleSetInfo *info =
- (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
- if (info->crypto_info) {
- g_ptr_array_free (info->crypto_info, TRUE);
- info->crypto_info = NULL;
- }
- }
- }
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
if (stream->ra_entries == NULL)
continue;
best_entry = entry;
}
+ /* no luck, will handle seek otherwise */
if (best_entry == NULL) {
GST_OBJECT_UNLOCK (qtdemux);
return FALSE;
}
+ /* ok, now we can prepare for processing as of located moof */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream;
+
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ g_free (stream->samples);
+ stream->samples = NULL;
+ stream->n_samples = 0;
+ stream->stbl_index = -1; /* no samples have yet been parsed */
+ stream->sample_index = -1;
+
+ if (stream->protection_scheme_info) {
+ /* Clear out any old cenc crypto info entries as we'll move to a new moof */
+ if (stream->protection_scheme_type == FOURCC_cenc) {
+ QtDemuxCencSampleSetInfo *info =
+ (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+ if (info->crypto_info) {
+ g_ptr_array_free (info->crypto_info, TRUE);
+ info->crypto_info = NULL;
+ }
+ }
+ }
+ }
+
GST_INFO_OBJECT (qtdemux, "seek to %" GST_TIME_FORMAT ", best fragment "
"moof offset: %" G_GUINT64_FORMAT ", ts %" GST_TIME_FORMAT,
- GST_TIME_ARGS (qtdemux->streams[0]->time_position),
+ GST_TIME_ARGS (QTDEMUX_NTH_STREAM (qtdemux, 0)->time_position),
best_entry->moof_offset, GST_TIME_ARGS (best_entry->ts));
qtdemux->moof_offset = best_entry->moof_offset;
{
GstFlowReturn ret = GST_FLOW_OK;
GstBuffer *buf = NULL;
- QtDemuxStream *stream;
+ QtDemuxStream *stream, *target_stream = NULL;
GstClockTime min_time;
guint64 offset = 0;
GstClockTime dts = GST_CLOCK_TIME_NONE;
guint sample_size = 0;
gboolean empty = 0;
guint size;
- gint index;
gint i;
- gst_qtdemux_push_pending_newsegment (qtdemux);
-
if (qtdemux->fragmented_seek_pending) {
GST_INFO_OBJECT (qtdemux, "pending fragmented seek");
- gst_qtdemux_do_fragmented_seek (qtdemux);
- GST_INFO_OBJECT (qtdemux, "fragmented seek done!");
- qtdemux->fragmented_seek_pending = FALSE;
+ if (gst_qtdemux_do_fragmented_seek (qtdemux)) {
+ GST_INFO_OBJECT (qtdemux, "fragmented seek done!");
+ qtdemux->fragmented_seek_pending = FALSE;
+ } else {
+ GST_INFO_OBJECT (qtdemux, "fragmented seek still pending");
+ }
}
/* Figure out the next stream sample to output, min_time is expressed in
* global time and runs over the edit list segments. */
min_time = G_MAXUINT64;
- index = -1;
- for (i = 0; i < qtdemux->n_streams; i++) {
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
GstClockTime position;
- stream = qtdemux->streams[i];
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
position = stream->time_position;
/* position of -1 is EOS */
if (position != GST_CLOCK_TIME_NONE && position < min_time) {
min_time = position;
- index = i;
+ target_stream = stream;
}
}
/* all are EOS */
- if (G_UNLIKELY (index == -1)) {
+ if (G_UNLIKELY (target_stream == NULL)) {
GST_DEBUG_OBJECT (qtdemux, "all streams are EOS");
goto eos;
}
&& ((qtdemux->segment.rate >= 0 && qtdemux->segment.stop <= min_time)
|| (qtdemux->segment.rate < 0
&& qtdemux->segment.start > min_time))
- && qtdemux->streams[index]->on_keyframe)) {
+ && target_stream->on_keyframe)) {
GST_DEBUG_OBJECT (qtdemux, "we reached the end of our segment.");
- qtdemux->streams[index]->time_position = GST_CLOCK_TIME_NONE;
+ target_stream->time_position = GST_CLOCK_TIME_NONE;
goto eos_stream;
}
/* gap events for subtitle streams */
- for (i = 0; i < qtdemux->n_streams; i++) {
- stream = qtdemux->streams[i];
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
if (stream->pad && (stream->subtype == FOURCC_subp
|| stream->subtype == FOURCC_text
|| stream->subtype == FOURCC_sbtl)) {
}
}
- stream = qtdemux->streams[index];
+ stream = target_stream;
/* fetch info for the current sample of this stream */
if (G_UNLIKELY (!gst_qtdemux_prepare_current_sample (qtdemux, stream, &empty,
&offset, &sample_size, &dts, &pts, &duration, &keyframe)))
gst_qtdemux_stream_check_and_change_stsd_index (qtdemux, stream);
if (stream->new_caps) {
gst_qtdemux_configure_stream (qtdemux, stream);
- qtdemux_do_allocation (qtdemux, stream);
+ qtdemux_do_allocation (stream, qtdemux);
}
/* If we're doing a keyframe-only trickmode, only push keyframes on video streams */
- if (G_UNLIKELY (qtdemux->
- segment.flags & GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS)) {
+ if (G_UNLIKELY (qtdemux->segment.
+ flags & GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS)) {
if (stream->subtype == FOURCC_vide && !keyframe) {
- GST_LOG_OBJECT (qtdemux, "Skipping non-keyframe on stream %d", index);
+ GST_LOG_OBJECT (qtdemux, "Skipping non-keyframe on track-id %u",
+ stream->track_id);
goto next;
}
}
GST_DEBUG_OBJECT (qtdemux,
- "pushing from stream %d, empty %d offset %" G_GUINT64_FORMAT
+ "pushing from track-id %u, empty %d offset %" G_GUINT64_FORMAT
", size %d, dts=%" GST_TIME_FORMAT ", pts=%" GST_TIME_FORMAT
- ", duration %" GST_TIME_FORMAT, index, empty, offset, sample_size,
- GST_TIME_ARGS (dts), GST_TIME_ARGS (pts), GST_TIME_ARGS (duration));
+ ", duration %" GST_TIME_FORMAT, stream->track_id, empty, offset,
+ sample_size, GST_TIME_ARGS (dts), GST_TIME_ARGS (pts),
+ GST_TIME_ARGS (duration));
if (G_UNLIKELY (empty)) {
- /* empty segment, push a gap and move to the next one */
- gst_pad_push_event (stream->pad, gst_event_new_gap (pts, duration));
+ /* empty segment, push a gap if there's a second or more
+ * difference and move to the next one */
+ if ((pts + duration - stream->segment.position) >= GST_SECOND)
+ gst_pad_push_event (stream->pad, gst_event_new_gap (pts, duration));
stream->segment.position = pts + duration;
goto next;
}
/* fatal errors need special actions */
/* check EOS */
if (ret == GST_FLOW_EOS) {
- if (qtdemux->n_streams == 0) {
+ if (QTDEMUX_N_STREAMS (qtdemux) == 0) {
/* we have no streams, post an error */
gst_qtdemux_post_no_playable_stream_error (qtdemux);
}
message = gst_message_new_segment_done (GST_OBJECT_CAST (qtdemux),
GST_FORMAT_TIME, stop);
event = gst_event_new_segment_done (GST_FORMAT_TIME, stop);
- if (qtdemux->segment_seqnum) {
+ if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID) {
gst_message_set_seqnum (message, qtdemux->segment_seqnum);
gst_event_set_seqnum (event, qtdemux->segment_seqnum);
}
GST_FORMAT_TIME, qtdemux->segment.start);
event = gst_event_new_segment_done (GST_FORMAT_TIME,
qtdemux->segment.start);
- if (qtdemux->segment_seqnum) {
+ if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID) {
gst_message_set_seqnum (message, qtdemux->segment_seqnum);
gst_event_set_seqnum (event, qtdemux->segment_seqnum);
}
GST_LOG_OBJECT (qtdemux, "Sending EOS at end of segment");
event = gst_event_new_eos ();
- if (qtdemux->segment_seqnum)
+ if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID)
gst_event_set_seqnum (event, qtdemux->segment_seqnum);
gst_qtdemux_push_event (qtdemux, event);
}
has_next_entry (GstQTDemux * demux)
{
QtDemuxStream *stream;
- int i;
+ gint i;
GST_DEBUG_OBJECT (demux, "Checking if there are samples not played yet");
- for (i = 0; i < demux->n_streams; i++) {
- stream = demux->streams[i];
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ stream = QTDEMUX_NTH_STREAM (demux, i);
if (stream->sample_index == -1) {
stream->sample_index = 0;
}
if (stream->sample_index >= stream->n_samples) {
- GST_LOG_OBJECT (demux, "stream %d samples exhausted", i);
+ GST_LOG_OBJECT (demux, "track-id %u samples exhausted", stream->track_id);
continue;
}
GST_DEBUG_OBJECT (demux, "Found a sample");
static guint64
next_entry_size (GstQTDemux * demux)
{
- QtDemuxStream *stream;
- int i;
- int smallidx = -1;
+ QtDemuxStream *stream, *target_stream = NULL;
guint64 smalloffs = (guint64) - 1;
QtDemuxSample *sample;
+ gint i;
GST_LOG_OBJECT (demux, "Finding entry at offset %" G_GUINT64_FORMAT,
demux->offset);
- for (i = 0; i < demux->n_streams; i++) {
- stream = demux->streams[i];
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ stream = QTDEMUX_NTH_STREAM (demux, i);
if (stream->sample_index == -1) {
stream->sample_index = 0;
}
if (stream->sample_index >= stream->n_samples) {
- GST_LOG_OBJECT (demux, "stream %d samples exhausted", i);
+ GST_LOG_OBJECT (demux, "track-id %u samples exhausted", stream->track_id);
continue;
}
sample = &stream->samples[stream->sample_index];
GST_LOG_OBJECT (demux,
- "Checking Stream %d (sample_index:%d / offset:%" G_GUINT64_FORMAT
- " / size:%" G_GUINT32_FORMAT ")", i, stream->sample_index,
- sample->offset, sample->size);
+ "Checking track-id %u (sample_index:%d / offset:%" G_GUINT64_FORMAT
+ " / size:%" G_GUINT32_FORMAT ")", stream->track_id,
+ stream->sample_index, sample->offset, sample->size);
if (((smalloffs == -1)
|| (sample->offset < smalloffs)) && (sample->size)) {
- smallidx = i;
smalloffs = sample->offset;
+ target_stream = stream;
}
}
- GST_LOG_OBJECT (demux,
- "stream %d offset %" G_GUINT64_FORMAT " demux->offset :%"
- G_GUINT64_FORMAT, smallidx, smalloffs, demux->offset);
-
- if (smallidx == -1)
+ if (!target_stream)
return -1;
- stream = demux->streams[smallidx];
+ GST_LOG_OBJECT (demux,
+ "track-id %u offset %" G_GUINT64_FORMAT " demux->offset :%"
+ G_GUINT64_FORMAT, target_stream->track_id, smalloffs, demux->offset);
+
+ stream = target_stream;
sample = &stream->samples[stream->sample_index];
if (sample->offset >= demux->offset) {
/* store seqnum to drop flush events, they don't need to reach downstream */
demux->offset_seek_seqnum = gst_event_get_seqnum (event);
res = gst_pad_push_event (demux->sinkpad, event);
- demux->offset_seek_seqnum = 0;
+ demux->offset_seek_seqnum = GST_SEQNUM_INVALID;
return res;
}
demux->todrop -= bytes;
}
+ /* PUSH-MODE only: Send a segment, if not done already. */
static void
gst_qtdemux_check_send_pending_segment (GstQTDemux * demux)
{
- if (G_UNLIKELY (demux->pending_newsegment)) {
+ if (G_UNLIKELY (demux->need_segment)) {
gint i;
- gst_qtdemux_push_pending_newsegment (demux);
+ if (!demux->upstream_format_is_time) {
+ gst_qtdemux_map_and_push_segments (demux, &demux->segment);
+ } else {
+ GstEvent *segment_event;
+ segment_event = gst_event_new_segment (&demux->segment);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (segment_event, demux->segment_seqnum);
+ gst_qtdemux_push_event (demux, segment_event);
+ }
+
+ demux->need_segment = FALSE;
+
/* clear to send tags on all streams */
- for (i = 0; i < demux->n_streams; i++) {
- QtDemuxStream *stream;
- stream = demux->streams[i];
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (demux, i);
gst_qtdemux_push_tags (demux, stream);
if (CUR_STREAM (stream)->sparse) {
GST_INFO_OBJECT (demux, "Sending gap event on stream %d", i);
}
}
+ /* Used for push mode only. */
static void
gst_qtdemux_send_gap_for_segment (GstQTDemux * demux,
QtDemuxStream * stream, gint segment_index, GstClockTime pos)
{
GstClockTime ts, dur;
- GstEvent *gap;
ts = pos;
dur =
stream->segments[segment_index].duration - (pos -
stream->segments[segment_index].time);
- gap = gst_event_new_gap (ts, dur);
stream->time_position += dur;
- GST_DEBUG_OBJECT (stream->pad, "Pushing gap for empty "
- "segment: %" GST_PTR_FORMAT, gap);
- gst_pad_push_event (stream->pad, gap);
- }
-
- static void
- gst_qtdemux_stream_send_initial_gap_segments (GstQTDemux * demux,
- QtDemuxStream * stream)
- {
- gint i;
+ /* Only gaps with a duration of at least one second are propagated.
+ * Same workaround as in pull mode.
+ * (See 2e45926a96ec5298c6ef29bf912e5e6a06dc3e0e) */
+ if (dur >= GST_SECOND) {
+ GstEvent *gap;
+ gap = gst_event_new_gap (ts, dur);
- /* Push any initial gap segments before proceeding to the
- * 'real' data */
- for (i = 0; i < stream->n_segments; i++) {
- gst_qtdemux_activate_segment (demux, stream, i, stream->time_position);
-
- if (QTSEGMENT_IS_EMPTY (&stream->segments[i])) {
- gst_qtdemux_send_gap_for_segment (demux, stream, i,
- stream->time_position);
- } else {
- /* Only support empty segment at the beginning followed by
- * one non-empty segment, this was checked when parsing the
- * edts atom, arriving here is unexpected */
- g_assert (i + 1 == stream->n_segments);
- break;
- }
+ GST_DEBUG_OBJECT (stream->pad, "Pushing gap for empty "
+ "segment: %" GST_PTR_FORMAT, gap);
+ gst_pad_push_event (stream->pad, gap);
}
}
GST_DEBUG_OBJECT (demux, "Got DISCONT, marking all streams as DISCONT");
- for (i = 0; i < demux->n_streams; i++) {
- demux->streams[i]->discont = TRUE;
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ QTDEMUX_NTH_STREAM (demux, i)->discont = TRUE;
}
/* Check if we can land back on our feet in the case where upstream is
* in the case of trick-mode DASH for example) */
if (demux->upstream_format_is_time
&& GST_BUFFER_OFFSET (inbuf) != GST_BUFFER_OFFSET_NONE) {
- gint i;
- for (i = 0; i < demux->n_streams; i++) {
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
guint32 res;
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (demux, i);
GST_LOG_OBJECT (demux,
- "Stream #%d , checking if offset %" G_GUINT64_FORMAT
- " is a sample start", i, GST_BUFFER_OFFSET (inbuf));
+ "track-id #%u , checking if offset %" G_GUINT64_FORMAT
+ " is a sample start", stream->track_id, GST_BUFFER_OFFSET (inbuf));
res =
gst_qtdemux_find_index_for_given_media_offset_linear (demux,
- demux->streams[i], GST_BUFFER_OFFSET (inbuf));
+ stream, GST_BUFFER_OFFSET (inbuf));
if (res != -1) {
- QtDemuxSample *sample = &demux->streams[i]->samples[res];
+ QtDemuxSample *sample = &stream->samples[res];
GST_LOG_OBJECT (demux,
- "Checking if sample %d from stream %d is valid (offset:%"
- G_GUINT64_FORMAT " size:%" G_GUINT32_FORMAT ")", res, i,
- sample->offset, sample->size);
+ "Checking if sample %d from track-id %u is valid (offset:%"
+ G_GUINT64_FORMAT " size:%" G_GUINT32_FORMAT ")", res,
+ stream->track_id, sample->offset, sample->size);
if (sample->offset == GST_BUFFER_OFFSET (inbuf)) {
GST_LOG_OBJECT (demux,
"new buffer corresponds to a valid sample : %" G_GUINT32_FORMAT,
/* We can go back to standard playback mode */
demux->state = QTDEMUX_STATE_MOVIE;
/* Remember which sample this stream is at */
- demux->streams[i]->sample_index = res;
+ stream->sample_index = res;
/* Finally update all push-based values to the expected values */
- demux->neededbytes = demux->streams[i]->samples[res].size;
+ demux->neededbytes = stream->samples[res].size;
demux->offset = GST_BUFFER_OFFSET (inbuf);
demux->mdatleft =
demux->mdatsize - demux->offset + demux->mdatoffset;
* previously received one. */
if (!is_gap_input && demux->fragmented && demux->segment.rate < 0) {
gst_qtdemux_process_adapter (demux, TRUE);
- for (i = 0; i < demux->n_streams; i++)
- gst_qtdemux_stream_flush_samples_data (demux, demux->streams[i]);
+ g_ptr_array_foreach (demux->active_streams,
+ (GFunc) gst_qtdemux_stream_flush_samples_data, NULL);
}
}
}
if (fourcc == FOURCC_mdat) {
gint next_entry = next_entry_size (demux);
- if (demux->n_streams > 0 && (next_entry != -1 || !demux->fragmented)) {
+ if (QTDEMUX_N_STREAMS (demux) > 0 && (next_entry != -1
+ || !demux->fragmented)) {
/* we have the headers, start playback */
demux->state = QTDEMUX_STATE_MOVIE;
demux->neededbytes = next_entry;
extract_initial_length_and_fourcc (data, demux->neededbytes, NULL,
&fourcc);
if (fourcc == FOURCC_moov) {
- gint n;
-
/* in usual fragmented setup we could try to scan for more
* and end up at the the moov (after mdat) again */
- if (demux->got_moov && demux->n_streams > 0 &&
+ if (demux->got_moov && QTDEMUX_N_STREAMS (demux) > 0 &&
(!demux->fragmented
|| demux->last_moov_offset == demux->offset)) {
GST_DEBUG_OBJECT (demux,
if (demux->moov_node)
g_node_destroy (demux->moov_node);
demux->moov_node = NULL;
- } else {
- /* prepare newsegment to send when streaming actually starts */
- if (!demux->pending_newsegment) {
- demux->pending_newsegment =
- gst_event_new_segment (&demux->segment);
- if (demux->segment_seqnum)
- gst_event_set_seqnum (demux->pending_newsegment,
- demux->segment_seqnum);
- }
}
demux->last_moov_offset = demux->offset;
+ /* Update streams with new moov */
+ gst_qtdemux_stream_concat (demux,
+ demux->old_streams, demux->active_streams);
+
qtdemux_parse_moov (demux, data, demux->neededbytes);
qtdemux_node_dump (demux, demux->moov_node);
qtdemux_parse_tree (demux);
qtdemux_prepare_streams (demux);
- if (!demux->got_moov)
- qtdemux_expose_streams (demux);
- else {
-
- for (n = 0; n < demux->n_streams; n++) {
- QtDemuxStream *stream = demux->streams[n];
-
- gst_qtdemux_configure_stream (demux, stream);
- }
- }
+ QTDEMUX_EXPOSE_LOCK (demux);
+ qtdemux_expose_streams (demux);
+ QTDEMUX_EXPOSE_UNLOCK (demux);
demux->got_moov = TRUE;
- gst_qtdemux_check_send_pending_segment (demux);
- /* fragmented streams headers shouldn't contain edts atoms */
- if (!demux->fragmented) {
- for (n = 0; n < demux->n_streams; n++) {
- gst_qtdemux_stream_send_initial_gap_segments (demux,
- demux->streams[n]);
- }
- }
+ gst_qtdemux_check_send_pending_segment (demux);
if (demux->moov_node_compressed) {
g_node_destroy (demux->moov_node_compressed);
ret = GST_FLOW_ERROR;
goto done;
}
+
/* in MSS we need to expose the pads after the first moof as we won't get a moov */
if (demux->mss_mode && !demux->exposed) {
- if (!demux->pending_newsegment) {
- GST_DEBUG_OBJECT (demux, "new pending_newsegment");
- demux->pending_newsegment =
- gst_event_new_segment (&demux->segment);
- if (demux->segment_seqnum)
- gst_event_set_seqnum (demux->pending_newsegment,
- demux->segment_seqnum);
- }
+ QTDEMUX_EXPOSE_LOCK (demux);
qtdemux_expose_streams (demux);
+ QTDEMUX_EXPOSE_UNLOCK (demux);
}
+
+ gst_qtdemux_check_send_pending_segment (demux);
} else {
GST_DEBUG_OBJECT (demux, "Discarding [moof]");
}
case FOURCC_styp:
/* [styp] is like a [ftyp], but in fragment header. We ignore it for now
* FALLTHROUGH */
+ case FOURCC_skip:
case FOURCC_free:
- /* [free] is a padding atom */
+ /* [free] and [skip] are padding atoms */
GST_DEBUG_OBJECT (demux,
"Skipping fourcc while parsing header : %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (fourcc));
gst_adapter_unmap (demux->adapter);
data = NULL;
- if (demux->mdatbuffer && demux->n_streams) {
+ if (demux->mdatbuffer && QTDEMUX_N_STREAMS (demux)) {
gsize remaining_data_size = 0;
/* the mdat was before the header */
GST_DEBUG_OBJECT (demux, "We have n_streams:%d and mdatbuffer:%p",
- demux->n_streams, demux->mdatbuffer);
+ QTDEMUX_N_STREAMS (demux), demux->mdatbuffer);
/* restore our adapter/offset view of things with upstream;
* put preceding buffered data ahead of current moov data.
* This should also handle evil mdat, moov, mdat cases and alike */
case QTDEMUX_STATE_MOVIE:{
QtDemuxStream *stream = NULL;
QtDemuxSample *sample;
- int i = -1;
GstClockTime dts, pts, duration;
gboolean keyframe;
+ gint i;
GST_DEBUG_OBJECT (demux,
"BEGIN // in MOVIE for offset %" G_GUINT64_FORMAT, demux->offset);
GST_DEBUG_OBJECT (demux, "parsing cenc auxiliary info");
data = gst_adapter_map (demux->adapter, demux->todrop);
gst_byte_reader_init (&br, data + 8, demux->todrop);
- if (!qtdemux_parse_cenc_aux_info (demux, demux->streams[0], &br,
+ if (!qtdemux_parse_cenc_aux_info (demux,
+ QTDEMUX_NTH_STREAM (demux, 0), &br,
demux->cenc_aux_info_sizes, demux->cenc_aux_sample_count)) {
GST_ERROR_OBJECT (demux, "failed to parse cenc auxiliary info");
ret = GST_FLOW_ERROR;
gst_qtdemux_check_send_pending_segment (demux);
/* Figure out which stream this packet belongs to */
- for (i = 0; i < demux->n_streams; i++) {
- stream = demux->streams[i];
- if (stream->sample_index >= stream->n_samples)
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ stream = QTDEMUX_NTH_STREAM (demux, i);
+ if (stream->sample_index >= stream->n_samples) {
+ /* reset to be checked below G_UNLIKELY (stream == NULL) */
+ stream = NULL;
continue;
+ }
GST_LOG_OBJECT (demux,
- "Checking stream %d (sample_index:%d / offset:%" G_GUINT64_FORMAT
- " / size:%d)", i, stream->sample_index,
+ "Checking track-id %u (sample_index:%d / offset:%"
+ G_GUINT64_FORMAT " / size:%d)", stream->track_id,
+ stream->sample_index,
stream->samples[stream->sample_index].offset,
stream->samples[stream->sample_index].size);
break;
}
- if (G_UNLIKELY (stream == NULL || i == demux->n_streams))
+ if (G_UNLIKELY (stream == NULL))
goto unknown_stream;
gst_qtdemux_stream_check_and_change_stsd_index (demux, stream);
/* check for segment end */
if (G_UNLIKELY (demux->segment.stop != -1
- && demux->segment.stop <= pts && stream->on_keyframe)) {
+ && demux->segment.stop <= pts && stream->on_keyframe)
+ && !(demux->upstream_format_is_time && demux->segment.rate < 0)) {
GST_DEBUG_OBJECT (demux, "we reached the end of our segment.");
stream->time_position = GST_CLOCK_TIME_NONE; /* this means EOS */
/* check if all streams are eos */
ret = GST_FLOW_EOS;
- for (i = 0; i < demux->n_streams; i++) {
- if (!STREAM_IS_EOS (demux->streams[i])) {
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ if (!STREAM_IS_EOS (QTDEMUX_NTH_STREAM (demux, i))) {
ret = GST_FLOW_OK;
break;
}
case FOURCC_meta:
{
GST_DEBUG_OBJECT (qtdemux, "parsing meta atom");
- qtdemux_parse_container (qtdemux, node, buffer + 12, end);
+
+ /* You are reading this correctly. QTFF specifies that the
+ * metadata atom is a short atom, whereas ISO BMFF specifies
+ * it's a full atom. But since so many people are doing things
+ * differently, we actually peek into the atom to see which
+ * variant it is */
+ if (length < 16) {
+ GST_LOG_OBJECT (qtdemux, "skipping small %" GST_FOURCC_FORMAT " box",
+ GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+ if (QT_FOURCC (buffer + 12) == FOURCC_hdlr) {
+ /* Variant 1: What QTFF specifies. 'meta' is a short header which
+ * starts with a 'hdlr' atom */
+ qtdemux_parse_container (qtdemux, node, buffer + 8, end);
+ } else if (QT_UINT32 (buffer + 8) == 0x00000000) {
+ /* Variant 2: What ISO BMFF specifies. 'meta' is a _full_ atom
+ * with version/flags both set to zero */
+ qtdemux_parse_container (qtdemux, node, buffer + 12, end);
+ } else
+ GST_WARNING_OBJECT (qtdemux, "Unknown 'meta' atom format");
break;
}
case FOURCC_mp4s:
qtdemux_parse_container (qtdemux, node, buffer + 36, end);
break;
}
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ case FOURCC_SA3D:
+ {
+ qtdemux_parse_SA3D (qtdemux, buffer, end - buffer);
+ break;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
default:
if (!strcmp (type->name, "unknown"))
GST_MEMDUMP ("Unknown tag", buffer + 4, end - buffer - 4);
}
static void
- qtdemux_do_allocation (GstQTDemux * qtdemux, QtDemuxStream * stream)
+ qtdemux_do_allocation (QtDemuxStream * stream, GstQTDemux * qtdemux)
{
/* FIXME: This can only reliably work if demuxers have a
* separate streaming thread per srcpad. This should be
}
static gboolean
+ pad_query (const GValue * item, GValue * value, gpointer user_data)
+ {
+ GstPad *pad = g_value_get_object (item);
+ GstQuery *query = user_data;
+ gboolean res;
+
+ res = gst_pad_peer_query (pad, query);
+
+ if (res) {
+ g_value_set_boolean (value, TRUE);
+ return FALSE;
+ }
+
+ GST_INFO_OBJECT (pad, "pad peer query failed");
+ return TRUE;
+ }
+
+ static gboolean
+ gst_qtdemux_run_query (GstElement * element, GstQuery * query,
+ GstPadDirection direction)
+ {
+ GstIterator *it;
+ GstIteratorFoldFunction func = pad_query;
+ GValue res = { 0, };
+
+ g_value_init (&res, G_TYPE_BOOLEAN);
+ g_value_set_boolean (&res, FALSE);
+
+ /* Ask neighbor */
+ if (direction == GST_PAD_SRC)
+ it = gst_element_iterate_src_pads (element);
+ else
+ it = gst_element_iterate_sink_pads (element);
+
+ while (gst_iterator_fold (it, func, &res, query) == GST_ITERATOR_RESYNC)
+ gst_iterator_resync (it);
+
+ gst_iterator_free (it);
+
+ return g_value_get_boolean (&res);
+ }
+
+ static void
+ gst_qtdemux_request_protection_context (GstQTDemux * qtdemux,
+ QtDemuxStream * stream)
+ {
+ GstQuery *query;
+ GstContext *ctxt;
+ GstElement *element = GST_ELEMENT (qtdemux);
+ GstStructure *st;
+ gchar **filtered_sys_ids;
+ GValue event_list = G_VALUE_INIT;
+ GList *walk;
+
+ /* 1. Check if we already have the context. */
+ if (qtdemux->preferred_protection_system_id != NULL) {
+ GST_LOG_OBJECT (element,
+ "already have the protection context, no need to request it again");
+ return;
+ }
+
+ g_ptr_array_add (qtdemux->protection_system_ids, NULL);
+ filtered_sys_ids = gst_protection_filter_systems_by_available_decryptors (
+ (const gchar **) qtdemux->protection_system_ids->pdata);
+
+ g_ptr_array_remove_index (qtdemux->protection_system_ids,
+ qtdemux->protection_system_ids->len - 1);
+ GST_TRACE_OBJECT (qtdemux, "detected %u protection systems, we have "
+ "decryptors for %u of them, running context request",
+ qtdemux->protection_system_ids->len,
+ filtered_sys_ids ? g_strv_length (filtered_sys_ids) : 0);
+
+
+ if (stream->protection_scheme_event_queue.length) {
+ GST_TRACE_OBJECT (qtdemux, "using stream event queue, length %u",
+ stream->protection_scheme_event_queue.length);
+ walk = stream->protection_scheme_event_queue.tail;
+ } else {
+ GST_TRACE_OBJECT (qtdemux, "using demuxer event queue, length %u",
+ qtdemux->protection_event_queue.length);
+ walk = qtdemux->protection_event_queue.tail;
+ }
+
+ g_value_init (&event_list, GST_TYPE_LIST);
+ for (; walk; walk = g_list_previous (walk)) {
+ GValue *event_value = g_new0 (GValue, 1);
+ g_value_init (event_value, GST_TYPE_EVENT);
+ g_value_set_boxed (event_value, walk->data);
+ gst_value_list_append_and_take_value (&event_list, event_value);
+ }
+
+ /* 2a) Query downstream with GST_QUERY_CONTEXT for the context and
+ * check if downstream already has a context of the specific type
+ * 2b) Query upstream as above.
+ */
+ query = gst_query_new_context ("drm-preferred-decryption-system-id");
+ st = gst_query_writable_structure (query);
+ gst_structure_set (st, "track-id", G_TYPE_UINT, stream->track_id,
+ "available-stream-encryption-systems", G_TYPE_STRV, filtered_sys_ids,
+ NULL);
+ gst_structure_set_value (st, "stream-encryption-events", &event_list);
+ if (gst_qtdemux_run_query (element, query, GST_PAD_SRC)) {
+ gst_query_parse_context (query, &ctxt);
+ GST_INFO_OBJECT (element, "found context (%p) in downstream query", ctxt);
+ gst_element_set_context (element, ctxt);
+ } else if (gst_qtdemux_run_query (element, query, GST_PAD_SINK)) {
+ gst_query_parse_context (query, &ctxt);
+ GST_INFO_OBJECT (element, "found context (%p) in upstream query", ctxt);
+ gst_element_set_context (element, ctxt);
+ } else {
+ /* 3) Post a GST_MESSAGE_NEED_CONTEXT message on the bus with
+ * the required context type and afterwards check if a
+ * usable context was set now as in 1). The message could
+ * be handled by the parent bins of the element and the
+ * application.
+ */
+ GstMessage *msg;
+
+ GST_INFO_OBJECT (element, "posting need context message");
+ msg = gst_message_new_need_context (GST_OBJECT_CAST (element),
+ "drm-preferred-decryption-system-id");
+ st = (GstStructure *) gst_message_get_structure (msg);
+ gst_structure_set (st, "track-id", G_TYPE_UINT, stream->track_id,
+ "available-stream-encryption-systems", G_TYPE_STRV, filtered_sys_ids,
+ NULL);
+
+ gst_structure_set_value (st, "stream-encryption-events", &event_list);
+ gst_element_post_message (element, msg);
+ }
+
+ g_strfreev (filtered_sys_ids);
+ g_value_unset (&event_list);
+ gst_query_unref (query);
+ }
+
+ static gboolean
gst_qtdemux_configure_protected_caps (GstQTDemux * qtdemux,
QtDemuxStream * stream)
{
GstStructure *s;
- const gchar *selected_system;
+ const gchar *selected_system = NULL;
g_return_val_if_fail (qtdemux != NULL, FALSE);
g_return_val_if_fail (stream != NULL, FALSE);
FALSE);
if (stream->protection_scheme_type != FOURCC_cenc) {
- GST_ERROR_OBJECT (qtdemux, "unsupported protection scheme");
+ GST_ERROR_OBJECT (qtdemux,
+ "unsupported protection scheme: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->protection_scheme_type));
return FALSE;
}
if (qtdemux->protection_system_ids == NULL) {
"cenc protection system information has been found");
return FALSE;
}
- g_ptr_array_add (qtdemux->protection_system_ids, NULL);
- selected_system = gst_protection_select_system ((const gchar **)
- qtdemux->protection_system_ids->pdata);
- g_ptr_array_remove_index (qtdemux->protection_system_ids,
- qtdemux->protection_system_ids->len - 1);
+
+ gst_qtdemux_request_protection_context (qtdemux, stream);
+ if (qtdemux->preferred_protection_system_id != NULL) {
+ const gchar *preferred_system_array[] =
+ { qtdemux->preferred_protection_system_id, NULL };
+
+ selected_system = gst_protection_select_system (preferred_system_array);
+
+ if (selected_system) {
+ GST_TRACE_OBJECT (qtdemux, "selected preferred system %s",
+ qtdemux->preferred_protection_system_id);
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "could not select preferred system %s "
+ "because there is no available decryptor",
+ qtdemux->preferred_protection_system_id);
+ }
+ }
+
+ if (!selected_system) {
+ g_ptr_array_add (qtdemux->protection_system_ids, NULL);
+ selected_system = gst_protection_select_system ((const gchar **)
+ qtdemux->protection_system_ids->pdata);
+ g_ptr_array_remove_index (qtdemux->protection_system_ids,
+ qtdemux->protection_system_ids->len - 1);
+ }
+
if (!selected_system) {
GST_ERROR_OBJECT (qtdemux, "stream is protected, but no "
"suitable decryptor element has been found");
return FALSE;
}
+ GST_DEBUG_OBJECT (qtdemux, "selected protection system is %s",
+ selected_system);
+
s = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
if (!gst_structure_has_name (s, "application/x-cenc")) {
gst_structure_set (s,
}
static gboolean
- gst_qtdemux_configure_stream (GstQTDemux * qtdemux, QtDemuxStream * stream)
+ gst_qtdemux_guess_framerate (GstQTDemux * qtdemux, QtDemuxStream * stream)
{
- if (stream->subtype == FOURCC_vide) {
- /* fps is calculated base on the duration of the average framerate since
- * qt does not have a fixed framerate. */
- gboolean fps_available = TRUE;
+ /* fps is calculated base on the duration of the average framerate since
+ * qt does not have a fixed framerate. */
+ gboolean fps_available = TRUE;
+ guint32 first_duration = 0;
- if ((stream->n_samples == 1) && (stream->first_duration == 0)) {
- /* still frame */
- CUR_STREAM (stream)->fps_n = 0;
+ if (stream->n_samples > 0)
+ first_duration = stream->samples[0].duration;
+
+ if ((stream->n_samples == 1 && first_duration == 0)
+ || (qtdemux->fragmented && stream->n_samples_moof == 1)) {
+ /* still frame */
+ CUR_STREAM (stream)->fps_n = 0;
+ CUR_STREAM (stream)->fps_d = 1;
+ } else {
+ if (stream->duration == 0 || stream->n_samples < 2) {
+ CUR_STREAM (stream)->fps_n = stream->timescale;
CUR_STREAM (stream)->fps_d = 1;
+ fps_available = FALSE;
} else {
- if (stream->duration == 0 || stream->n_samples < 2) {
- CUR_STREAM (stream)->fps_n = stream->timescale;
- CUR_STREAM (stream)->fps_d = 1;
- fps_available = FALSE;
+ GstClockTime avg_duration;
+ guint64 duration;
+ guint32 n_samples;
+
+ /* duration and n_samples can be updated for fragmented format
+ * so, framerate of fragmented format is calculated using data in a moof */
+ if (qtdemux->fragmented && stream->n_samples_moof > 0
+ && stream->duration_moof > 0) {
+ n_samples = stream->n_samples_moof;
+ duration = stream->duration_moof;
} else {
- GstClockTime avg_duration;
- guint64 duration;
- guint32 n_samples;
-
- /* duration and n_samples can be updated for fragmented format
- * so, framerate of fragmented format is calculated using data in a moof */
- if (qtdemux->fragmented && stream->n_samples_moof > 0
- && stream->duration_moof > 0) {
- n_samples = stream->n_samples_moof;
- duration = stream->duration_moof;
- } else {
- n_samples = stream->n_samples;
- duration = stream->duration;
- }
+ n_samples = stream->n_samples;
+ duration = stream->duration;
+ }
- /* Calculate a framerate, ignoring the first sample which is sometimes truncated */
- /* stream->duration is guint64, timescale, n_samples are guint32 */
- avg_duration =
- gst_util_uint64_scale_round (duration -
- stream->first_duration, GST_SECOND,
- (guint64) (stream->timescale) * (n_samples - 1));
+ /* Calculate a framerate, ignoring the first sample which is sometimes truncated */
+ /* stream->duration is guint64, timescale, n_samples are guint32 */
+ avg_duration =
+ gst_util_uint64_scale_round (duration -
+ first_duration, GST_SECOND,
+ (guint64) (stream->timescale) * (n_samples - 1));
- GST_LOG_OBJECT (qtdemux,
- "Calculating avg sample duration based on stream (or moof) duration %"
- G_GUINT64_FORMAT
- " minus first sample %u, leaving %d samples gives %"
- GST_TIME_FORMAT, duration, stream->first_duration,
- n_samples - 1, GST_TIME_ARGS (avg_duration));
+ GST_LOG_OBJECT (qtdemux,
+ "Calculating avg sample duration based on stream (or moof) duration %"
+ G_GUINT64_FORMAT
+ " minus first sample %u, leaving %d samples gives %"
+ GST_TIME_FORMAT, duration, first_duration,
+ n_samples - 1, GST_TIME_ARGS (avg_duration));
- gst_video_guess_framerate (avg_duration, &CUR_STREAM (stream)->fps_n,
- &CUR_STREAM (stream)->fps_d);
+ fps_available =
+ gst_video_guess_framerate (avg_duration,
+ &CUR_STREAM (stream)->fps_n, &CUR_STREAM (stream)->fps_d);
- GST_DEBUG_OBJECT (qtdemux,
- "Calculating framerate, timescale %u gave fps_n %d fps_d %d",
- stream->timescale, CUR_STREAM (stream)->fps_n,
- CUR_STREAM (stream)->fps_d);
- }
+ GST_DEBUG_OBJECT (qtdemux,
+ "Calculating framerate, timescale %u gave fps_n %d fps_d %d",
+ stream->timescale, CUR_STREAM (stream)->fps_n,
+ CUR_STREAM (stream)->fps_d);
}
+ }
+
+ return fps_available;
+ }
+
+ static gboolean
+ gst_qtdemux_configure_stream (GstQTDemux * qtdemux, QtDemuxStream * stream)
+ {
+ if (stream->subtype == FOURCC_vide) {
+ gboolean fps_available = gst_qtdemux_guess_framerate (qtdemux, stream);
if (CUR_STREAM (stream)->caps) {
CUR_STREAM (stream)->caps =
gst_caps_make_writable (CUR_STREAM (stream)->caps);
- gst_caps_set_simple (CUR_STREAM (stream)->caps,
- "width", G_TYPE_INT, CUR_STREAM (stream)->width,
- "height", G_TYPE_INT, CUR_STREAM (stream)->height, NULL);
+ if (CUR_STREAM (stream)->width && CUR_STREAM (stream)->height)
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "width", G_TYPE_INT, CUR_STREAM (stream)->width,
+ "height", G_TYPE_INT, CUR_STREAM (stream)->height, NULL);
/* set framerate if calculated framerate is reliable */
if (fps_available) {
}
}
+ else if (stream->subtype == FOURCC_clcp && CUR_STREAM (stream)->caps) {
+ const GstStructure *s;
+ QtDemuxStream *fps_stream = NULL;
+ gboolean fps_available = FALSE;
+
+ /* CEA608 closed caption tracks are a bit special in that each sample
+ * can contain CCs for multiple frames, and CCs can be omitted and have to
+ * be inferred from the duration of the sample then.
+ *
+ * As such we take the framerate from the (first) video track here for
+ * CEA608 as there must be one CC byte pair for every video frame
+ * according to the spec.
+ *
+ * For CEA708 all is fine and there is one sample per frame.
+ */
+
+ s = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
+ if (gst_structure_has_name (s, "closedcaption/x-cea-608")) {
+ gint i;
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *tmp = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ if (tmp->subtype == FOURCC_vide) {
+ fps_stream = tmp;
+ break;
+ }
+ }
+
+ if (fps_stream) {
+ fps_available = gst_qtdemux_guess_framerate (qtdemux, fps_stream);
+ CUR_STREAM (stream)->fps_n = CUR_STREAM (fps_stream)->fps_n;
+ CUR_STREAM (stream)->fps_d = CUR_STREAM (fps_stream)->fps_d;
+ }
+ } else {
+ fps_available = gst_qtdemux_guess_framerate (qtdemux, stream);
+ fps_stream = stream;
+ }
+
+ CUR_STREAM (stream)->caps =
+ gst_caps_make_writable (CUR_STREAM (stream)->caps);
+
+ /* set framerate if calculated framerate is reliable */
+ if (fps_available) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "framerate", GST_TYPE_FRACTION, CUR_STREAM (stream)->fps_n,
+ CUR_STREAM (stream)->fps_d, NULL);
+ }
+ }
+
if (stream->pad) {
GstCaps *prev_caps = NULL;
GST_DEBUG_OBJECT (qtdemux, "setting caps %" GST_PTR_FORMAT,
CUR_STREAM (stream)->caps);
if (stream->new_stream) {
- gchar *stream_id;
GstEvent *event;
GstStreamFlags stream_flags = GST_STREAM_FLAG_NONE;
}
stream->new_stream = FALSE;
- stream_id =
- gst_pad_create_stream_id_printf (stream->pad,
- GST_ELEMENT_CAST (qtdemux), "%03u", stream->track_id);
- event = gst_event_new_stream_start (stream_id);
+ event = gst_event_new_stream_start (stream->stream_id);
if (qtdemux->have_group_id)
gst_event_set_group_id (event, qtdemux->group_id);
if (stream->disabled)
}
gst_event_set_stream_flags (event, stream_flags);
gst_pad_push_event (stream->pad, event);
- g_free (stream_id);
}
prev_caps = gst_pad_get_current_caps (stream->pad);
QtDemuxStream * stream, GstTagList * list)
{
gboolean ret = TRUE;
- /* consistent default for push based mode */
- gst_segment_init (&stream->segment, GST_FORMAT_TIME);
if (stream->subtype == FOURCC_vide) {
gchar *name = g_strdup_printf ("video_%u", qtdemux->n_video_streams);
} else if (stream->subtype == FOURCC_strm) {
GST_DEBUG_OBJECT (qtdemux, "stream type, not creating pad");
} else if (stream->subtype == FOURCC_subp || stream->subtype == FOURCC_text
- || stream->subtype == FOURCC_sbtl || stream->subtype == FOURCC_subt) {
+ || stream->subtype == FOURCC_sbtl || stream->subtype == FOURCC_subt
+ || stream->subtype == FOURCC_clcp) {
gchar *name = g_strdup_printf ("subtitle_%u", qtdemux->n_sub_streams);
stream->pad =
gst_byte_reader_skip_unchecked (&stream->ctts, 4);
offset = gst_byte_reader_get_int32_be_unchecked (&stream->ctts);
+ /* HACK: if sample_offset is larger than 2 * duration, ignore the box.
+ * slightly inaccurate PTS could be more usable than corrupted one */
+ if (G_UNLIKELY ((ABS (offset) / 2) > stream->duration)) {
+ GST_WARNING_OBJECT (qtdemux,
+ "Ignore corrupted ctts, sample_offset %" G_GINT32_FORMAT
+ " larger than duration %" G_GUINT64_FORMAT,
+ offset, stream->duration);
+
+ stream->cslg_shift = 0;
+ stream->ctts_present = FALSE;
+ return TRUE;
+ }
if (offset < cslg_least)
cslg_least = offset;
if ((edts = qtdemux_tree_get_child_by_type (trak, FOURCC_edts))) {
GNode *elst;
gint n_segments;
- gint i, count, entry_size;
+ gint segment_number, entry_size;
guint64 time;
GstClockTime stime;
const guint8 *buffer;
/* segments always start from 0 */
time = 0;
stime = 0;
- count = 0;
buffer += 16;
- for (i = 0; i < n_segments; i++) {
+ for (segment_number = 0; segment_number < n_segments; segment_number++) {
guint64 duration;
guint64 media_time;
- gboolean time_valid = TRUE;
+ gboolean empty_edit = FALSE;
QtDemuxSegment *segment;
guint32 rate_int;
GstClockTime media_start = GST_CLOCK_TIME_NONE;
media_time = QT_UINT64 (buffer + 8);
duration = QT_UINT64 (buffer);
if (media_time == G_MAXUINT64)
- time_valid = FALSE;
+ empty_edit = TRUE;
} else {
media_time = QT_UINT32 (buffer + 4);
duration = QT_UINT32 (buffer);
if (media_time == G_MAXUINT32)
- time_valid = FALSE;
+ empty_edit = TRUE;
}
- if (time_valid)
+ if (!empty_edit)
media_start = QTSTREAMTIME_TO_GSTTIME (stream, media_time);
- segment = &stream->segments[count++];
+ segment = &stream->segments[segment_number];
/* time and duration expressed in global timescale */
segment->time = stime;
- /* add non scaled values so we don't cause roundoff errors */
- if (duration || media_start == GST_CLOCK_TIME_NONE) {
+ if (duration != 0 || empty_edit) {
+ /* edge case: empty edits with duration=zero are treated here.
+ * (files should not have these anyway). */
+
+ /* add non scaled values so we don't cause roundoff errors */
time += duration;
stime = QTTIME_TO_GSTTIME (qtdemux, time);
segment->duration = stime - segment->time;
} else {
/* zero duration does not imply media_start == media_stop
- * but, only specify media_start.*/
- stime = QTTIME_TO_GSTTIME (qtdemux, qtdemux->duration);
- if (GST_CLOCK_TIME_IS_VALID (stime) && time_valid
- && stime >= media_start) {
- segment->duration = stime - media_start;
- } else {
- segment->duration = GST_CLOCK_TIME_NONE;
- }
+ * but, only specify media_start. The edit ends with the track. */
+ stime = segment->duration = GST_CLOCK_TIME_NONE;
+ /* Don't allow more edits after this one. */
+ n_segments = segment_number + 1;
}
segment->stop_time = stime;
segment->trak_media_start = media_time;
/* media_time expressed in stream timescale */
- if (time_valid) {
+ if (!empty_edit) {
segment->media_start = media_start;
- segment->media_stop = segment->media_start + segment->duration;
+ segment->media_stop = GST_CLOCK_TIME_IS_VALID (segment->duration)
+ ? segment->media_start + segment->duration : GST_CLOCK_TIME_NONE;
media_segments_count++;
} else {
segment->media_start = GST_CLOCK_TIME_NONE;
", duration %" GST_TIME_FORMAT ", media_start %" GST_TIME_FORMAT
" (%" G_GUINT64_FORMAT ") , media_stop %" GST_TIME_FORMAT
" stop_time %" GST_TIME_FORMAT " rate %g, (%d) timescale %u",
- i, GST_TIME_ARGS (segment->time),
+ segment_number, GST_TIME_ARGS (segment->time),
GST_TIME_ARGS (segment->duration),
GST_TIME_ARGS (segment->media_start), media_time,
GST_TIME_ARGS (segment->media_stop),
GST_TIME_ARGS (segment->stop_time), segment->rate, rate_int,
stream->timescale);
- if (segment->stop_time > qtdemux->segment.stop) {
+ if (segment->stop_time > qtdemux->segment.stop &&
+ !qtdemux->upstream_format_is_time) {
GST_WARNING_OBJECT (qtdemux, "Segment %d "
" extends to %" GST_TIME_FORMAT
- " past the end of the file duration %" GST_TIME_FORMAT
- " it will be truncated", i, GST_TIME_ARGS (segment->stop_time),
+ " past the end of the declared movie duration %" GST_TIME_FORMAT
+ " movie segment will be extended", segment_number,
+ GST_TIME_ARGS (segment->stop_time),
GST_TIME_ARGS (qtdemux->segment.stop));
- qtdemux->segment.stop = segment->stop_time;
+ qtdemux->segment.stop = qtdemux->segment.duration = segment->stop_time;
}
buffer += entry_size;
}
- GST_DEBUG_OBJECT (qtdemux, "found %d segments", count);
- stream->n_segments = count;
+ GST_DEBUG_OBJECT (qtdemux, "found %d segments", n_segments);
+ stream->n_segments = n_segments;
if (media_segments_count != 1)
allow_pushbased_edts = FALSE;
}
GNode *frma;
GNode *schm;
GNode *schi;
+ QtDemuxCencSampleSetInfo *info;
+ GNode *tenc;
+ const guint8 *tenc_data;
g_return_val_if_fail (qtdemux != NULL, FALSE);
g_return_val_if_fail (stream != NULL, FALSE);
GST_DEBUG_OBJECT (qtdemux, "sinf box does not contain schi box");
return FALSE;
}
+ if (stream->protection_scheme_type != FOURCC_cenc &&
+ stream->protection_scheme_type != FOURCC_piff) {
+ GST_ERROR_OBJECT (qtdemux,
+ "Invalid protection_scheme_type: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->protection_scheme_type));
+ return FALSE;
+ }
+
+ if (G_UNLIKELY (!stream->protection_scheme_info))
+ stream->protection_scheme_info =
+ g_malloc0 (sizeof (QtDemuxCencSampleSetInfo));
+
+ info = (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+
if (stream->protection_scheme_type == FOURCC_cenc) {
- QtDemuxCencSampleSetInfo *info;
- GNode *tenc;
- const guint8 *tenc_data;
- guint32 isEncrypted;
+ guint32 is_encrypted;
guint8 iv_size;
const guint8 *default_kid;
- GstBuffer *kid_buf;
-
- if (G_UNLIKELY (!stream->protection_scheme_info))
- stream->protection_scheme_info =
- g_malloc0 (sizeof (QtDemuxCencSampleSetInfo));
-
- info = (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
tenc = qtdemux_tree_get_child_by_type (schi, FOURCC_tenc);
if (!tenc) {
return FALSE;
}
tenc_data = (const guint8 *) tenc->data + 12;
- isEncrypted = QT_UINT24 (tenc_data);
+ is_encrypted = QT_UINT24 (tenc_data);
iv_size = QT_UINT8 (tenc_data + 3);
default_kid = (tenc_data + 4);
- kid_buf = gst_buffer_new_allocate (NULL, 16, NULL);
- gst_buffer_fill (kid_buf, 0, default_kid, 16);
- if (info->default_properties)
- gst_structure_free (info->default_properties);
- info->default_properties =
- gst_structure_new ("application/x-cenc",
- "iv_size", G_TYPE_UINT, iv_size,
- "encrypted", G_TYPE_BOOLEAN, (isEncrypted == 1),
- "kid", GST_TYPE_BUFFER, kid_buf, NULL);
- GST_DEBUG_OBJECT (qtdemux, "default sample properties: "
- "is_encrypted=%u, iv_size=%u", isEncrypted, iv_size);
- gst_buffer_unref (kid_buf);
+ qtdemux_update_default_sample_encryption_settings (qtdemux, info,
+ is_encrypted, iv_size, default_kid);
+ } else if (stream->protection_scheme_type == FOURCC_piff) {
+ GstByteReader br;
+ static const guint8 piff_track_encryption_uuid[] = {
+ 0x89, 0x74, 0xdb, 0xce, 0x7b, 0xe7, 0x4c, 0x51,
+ 0x84, 0xf9, 0x71, 0x48, 0xf9, 0x88, 0x25, 0x54
+ };
+
+ tenc = qtdemux_tree_get_child_by_type (schi, FOURCC_uuid);
+ if (!tenc) {
+ GST_ERROR_OBJECT (qtdemux, "schi box does not contain tenc box, "
+ "which is mandatory for Common Encryption");
+ return FALSE;
+ }
+
+ tenc_data = (const guint8 *) tenc->data + 8;
+ if (memcmp (tenc_data, piff_track_encryption_uuid, 16) != 0) {
+ gchar *box_uuid = qtdemux_uuid_bytes_to_string (tenc_data);
+ GST_ERROR_OBJECT (qtdemux,
+ "Unsupported track encryption box with uuid: %s", box_uuid);
+ g_free (box_uuid);
+ return FALSE;
+ }
+ tenc_data = (const guint8 *) tenc->data + 16 + 12;
+ gst_byte_reader_init (&br, tenc_data, 20);
+ if (!qtdemux_update_default_piff_encryption_settings (qtdemux, info, &br)) {
+ GST_ERROR_OBJECT (qtdemux, "PIFF track box parsing error");
+ return FALSE;
+ }
+ stream->protection_scheme_type = FOURCC_cenc;
}
+
return TRUE;
}
+ static gint
+ qtdemux_track_id_compare_func (QtDemuxStream ** stream1,
+ QtDemuxStream ** stream2)
+ {
+ return (gint) (*stream1)->track_id - (gint) (*stream2)->track_id;
+ }
+
/* parse the traks.
* With each track we associate a new QtDemuxStream that contains all the info
* about the trak.
GNode *stsd;
GNode *mp4a;
GNode *mp4v;
- GNode *wave;
GNode *esds;
- GNode *pasp;
- GNode *colr;
GNode *tref;
GNode *udta;
GNode *svmi;
- GNode *fiel;
QtDemuxStream *stream = NULL;
- gboolean new_stream = FALSE;
- gchar *codec = NULL;
const guint8 *stsd_data;
const guint8 *stsd_entry_data;
guint remaining_stsd_len;
guint32 tkhd_flags = 0;
guint8 tkhd_version = 0;
guint32 w = 0, h = 0;
- guint32 fourcc;
guint value_size, stsd_len, len;
guint32 track_id;
guint32 dummy;
!gst_byte_reader_get_uint32_be (&tkhd, &track_id))
goto corrupt_file;
- if (!qtdemux->got_moov) {
- if (qtdemux_find_stream (qtdemux, track_id))
- goto existing_stream;
- stream = _create_stream ();
- stream->track_id = track_id;
- new_stream = TRUE;
- } else {
- stream = qtdemux_find_stream (qtdemux, track_id);
- if (!stream) {
- GST_WARNING_OBJECT (qtdemux, "Stream not found, going to ignore it");
- goto skip_track;
- }
+ /* Check if current moov has duplicated track_id */
+ if (qtdemux_find_stream (qtdemux, track_id))
+ goto existing_stream;
- stream->stream_tags = gst_tag_list_make_writable (stream->stream_tags);
+ stream = _create_stream (qtdemux, track_id);
+ stream->stream_tags = gst_tag_list_make_writable (stream->stream_tags);
- /* flush samples data from this track from previous moov */
- gst_qtdemux_stream_flush_segments_data (qtdemux, stream);
- gst_qtdemux_stream_flush_samples_data (qtdemux, stream);
- }
/* need defaults for fragments */
qtdemux_parse_trex (qtdemux, stream, &dummy, &dummy, &dummy);
version = QT_UINT32 ((guint8 *) mdhd->data + 8);
GST_LOG_OBJECT (qtdemux, "track version/flags: %08x", version);
if (version == 0x01000000) {
- if (len < 38)
+ if (len < 42)
goto corrupt_file;
stream->timescale = QT_UINT32 ((guint8 *) mdhd->data + 28);
stream->duration = QT_UINT64 ((guint8 *) mdhd->data + 32);
- lang_code = QT_UINT16 ((guint8 *) mdhd->data + 36);
+ lang_code = QT_UINT16 ((guint8 *) mdhd->data + 40);
} else {
if (len < 30)
goto corrupt_file;
"found, assuming preview image or something; skipping track",
stream->duration, stream->timescale, qtdemux->duration,
qtdemux->timescale);
- if (new_stream)
- gst_qtdemux_stream_free (qtdemux, stream);
+ gst_qtdemux_stream_unref (stream);
return TRUE;
}
}
if (stsd_len < 24) {
/* .. but skip stream with empty stsd produced by some Vivotek cameras */
if (stream->subtype == FOURCC_vivo) {
- if (new_stream)
- gst_qtdemux_stream_free (qtdemux, stream);
+ gst_qtdemux_stream_unref (stream);
return TRUE;
} else {
goto corrupt_file;
stsd_entry_data = stsd_data + 16;
remaining_stsd_len = stsd_len - 16;
for (stsd_index = 0; stsd_index < stsd_entry_count; stsd_index++) {
+ guint32 fourcc;
+ gchar *codec = NULL;
QtDemuxStreamStsdEntry *entry = &stream->stsd_entries[stsd_index];
/* and that entry should fit within stsd */
}
if (stream->subtype == FOURCC_vide) {
+ GNode *colr;
+ GNode *fiel;
+ GNode *pasp;
gboolean gray;
gint depth, palette_size, palette_count;
guint32 *palette_data = NULL;
fiel = NULL;
/* pick 'the' stsd child */
mp4v = qtdemux_tree_get_child_by_index (stsd, stsd_index);
- if (!stream->protected) {
- if (QTDEMUX_TREE_NODE_FOURCC (mp4v) != fourcc) {
+ // We should skip parsing the stsd for non-protected streams if
+ // the entry doesn't match the fourcc, since they don't change
+ // format. However, for protected streams we can have partial
+ // encryption, where parts of the stream are encrypted and parts
+ // not. For both parts of such streams, we should ensure the
+ // esds overrides are parsed for both from the stsd.
+ if (QTDEMUX_TREE_NODE_FOURCC (mp4v) != fourcc) {
+ if (stream->protected && QTDEMUX_TREE_NODE_FOURCC (mp4v) != FOURCC_encv)
mp4v = NULL;
- }
- } else {
- if (QTDEMUX_TREE_NODE_FOURCC (mp4v) != FOURCC_encv) {
+ else if (!stream->protected)
mp4v = NULL;
- }
}
if (mp4v) {
/* parse, if found */
GstBuffer *buf;
- GST_DEBUG_OBJECT (qtdemux, "found avcC codec_data in stsd");
+ GST_DEBUG_OBJECT (qtdemux, "found hvcC codec_data in stsd");
/* First 4 bytes are the length of the atom, the next 4 bytes
* are the fourcc, the next 1 byte is the version, and the
}
break;
}
+ case FOURCC_av01:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - 0x56;
+ const guint8 *av1_data = stsd_entry_data + 0x56;
+
+ /* find av1C */
+ while (len >= 0x8) {
+ gint size;
+
+ if (QT_UINT32 (av1_data) <= len)
+ size = QT_UINT32 (av1_data) - 0x8;
+ else
+ size = len - 0x8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (av1_data + 0x4)) {
+ case FOURCC_av1C:
+ {
+ /* parse, if found */
+ GstBuffer *buf;
+ guint8 pres_delay_field;
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "found av1C codec_data in stsd of size %d", size);
+
+ /* not enough data, just ignore and hope for the best */
+ if (size < 5)
+ break;
+
+ /* Content is:
+ * 4 bytes: atom length
+ * 4 bytes: fourcc
+ * 1 byte: version
+ * 3 bytes: flags
+ * 3 bits: reserved
+ * 1 bits: initial_presentation_delay_present
+ * 4 bits: initial_presentation_delay (if present else reserved
+ * rest: OBUs.
+ */
+
+ if (av1_data[9] != 0) {
+ GST_WARNING ("Unknown version %d of av1C box", av1_data[9]);
+ break;
+ }
+
+ /* We skip initial_presentation_delay* for now */
+ pres_delay_field = *(av1_data + 12);
+ if (pres_delay_field & (1 << 5)) {
+ gst_caps_set_simple (entry->caps,
+ "presentation-delay", G_TYPE_INT,
+ (gint) (pres_delay_field & 0x0F) + 1, NULL);
+ }
+ if (size > 5) {
+ buf = gst_buffer_new_and_alloc (size - 5);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+ gst_buffer_fill (buf, 0, av1_data + 13, size - 5);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ len -= size + 8;
+ av1_data += size + 8;
+ }
+
+ break;
+ }
default:
break;
}
GST_FOURCC_ARGS (fourcc), entry->caps);
} else if (stream->subtype == FOURCC_soun) {
+ GNode *wave;
int version, samplesize;
guint16 compression_id;
gboolean amrwb = FALSE;
case FOURCC_twos:
case FOURCC_sowt:
case FOURCC_raw_:
+ case FOURCC_lpcm:
break;
default:
{
}
mp4a = qtdemux_tree_get_child_by_index (stsd, stsd_index);
- if (!stream->protected) {
- } else {
- if (QTDEMUX_TREE_NODE_FOURCC (mp4v) != FOURCC_encv) {
- mp4v = NULL;
- }
- }
- if (stream->protected && fourcc == FOURCC_mp4a) {
- if (QTDEMUX_TREE_NODE_FOURCC (mp4a) != FOURCC_enca) {
+ if (QTDEMUX_TREE_NODE_FOURCC (mp4a) != fourcc) {
+ if (stream->protected && QTDEMUX_TREE_NODE_FOURCC (mp4a) != FOURCC_enca)
mp4a = NULL;
- }
- } else {
- if (QTDEMUX_TREE_NODE_FOURCC (mp4a) != FOURCC_mp4a) {
+ else if (!stream->protected)
mp4a = NULL;
- }
}
wave = NULL;
}
break;
}
+ case FOURCC_lpcm:
+ /* Fully handled elsewhere */
+ break;
default:
GST_INFO_OBJECT (qtdemux,
"unhandled type %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (fourcc));
}
entry->sampled = TRUE;
} else if (stream->subtype == FOURCC_subp || stream->subtype == FOURCC_text
- || stream->subtype == FOURCC_sbtl || stream->subtype == FOURCC_subt) {
+ || stream->subtype == FOURCC_sbtl || stream->subtype == FOURCC_subt
+ || stream->subtype == FOURCC_clcp) {
entry->sampled = TRUE;
entry->sparse = TRUE;
qtdemux_parse_udta (qtdemux, stream->stream_tags, udta);
}
- /* now we are ready to add the stream */
- if (qtdemux->n_streams >= GST_QTDEMUX_MAX_STREAMS)
- goto too_many_streams;
-
- if (!qtdemux->got_moov) {
- qtdemux->streams[qtdemux->n_streams] = stream;
- qtdemux->n_streams++;
- GST_DEBUG_OBJECT (qtdemux, "n_streams is now %d", qtdemux->n_streams);
- }
+ /* Insert and sort new stream in track-id order.
+ * This will help in comparing old/new streams during stream update check */
+ g_ptr_array_add (qtdemux->active_streams, stream);
+ g_ptr_array_sort (qtdemux->active_streams,
+ (GCompareFunc) qtdemux_track_id_compare_func);
+ GST_DEBUG_OBJECT (qtdemux, "n_streams is now %d",
+ QTDEMUX_N_STREAMS (qtdemux));
return TRUE;
/* ERRORS */
- skip_track:
- {
- GST_INFO_OBJECT (qtdemux, "skip disabled track");
- if (new_stream)
- gst_qtdemux_stream_free (qtdemux, stream);
- return TRUE;
- }
corrupt_file:
{
GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
(_("This file is corrupt and cannot be played.")), (NULL));
- if (new_stream)
- gst_qtdemux_stream_free (qtdemux, stream);
+ if (stream)
+ gst_qtdemux_stream_unref (stream);
return FALSE;
}
error_encrypted:
{
GST_ELEMENT_ERROR (qtdemux, STREAM, DECRYPT, (NULL), (NULL));
- if (new_stream)
- gst_qtdemux_stream_free (qtdemux, stream);
+ gst_qtdemux_stream_unref (stream);
return FALSE;
}
samples_failed:
/* we posted an error already */
/* free stbl sub-atoms */
gst_qtdemux_stbl_free (stream);
- if (new_stream)
- gst_qtdemux_stream_free (qtdemux, stream);
+ gst_qtdemux_stream_unref (stream);
return FALSE;
}
existing_stream:
{
GST_INFO_OBJECT (qtdemux, "stream with track id %i already exists",
track_id);
- if (new_stream)
- gst_qtdemux_stream_free (qtdemux, stream);
return TRUE;
}
unknown_stream:
{
GST_INFO_OBJECT (qtdemux, "unknown subtype %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (stream->subtype));
- if (new_stream)
- gst_qtdemux_stream_free (qtdemux, stream);
- return TRUE;
- }
- too_many_streams:
- {
- GST_ELEMENT_WARNING (qtdemux, STREAM, DEMUX,
- (_("This file contains too many streams. Only playing first %d"),
- GST_QTDEMUX_MAX_STREAMS), (NULL));
+ gst_qtdemux_stream_unref (stream);
return TRUE;
}
}
QtDemuxStream *stream = NULL;
gint64 size, sys_bitrate, sum_bitrate = 0;
GstClockTime duration;
- gint i;
guint bitrate;
+ gint i;
if (qtdemux->fragmented)
return;
return;
}
- for (i = 0; i < qtdemux->n_streams; i++) {
- switch (qtdemux->streams[i]->subtype) {
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *str = QTDEMUX_NTH_STREAM (qtdemux, i);
+ switch (str->subtype) {
case FOURCC_soun:
case FOURCC_vide:
GST_DEBUG_OBJECT (qtdemux, "checking bitrate for %" GST_PTR_FORMAT,
- CUR_STREAM (qtdemux->streams[i])->caps);
+ CUR_STREAM (str)->caps);
/* retrieve bitrate, prefer avg then max */
bitrate = 0;
- if (qtdemux->streams[i]->stream_tags) {
- gst_tag_list_get_uint (qtdemux->streams[i]->stream_tags,
- GST_TAG_MAXIMUM_BITRATE, &bitrate);
- GST_DEBUG_OBJECT (qtdemux, "max-bitrate: %u", bitrate);
- gst_tag_list_get_uint (qtdemux->streams[i]->stream_tags,
- GST_TAG_NOMINAL_BITRATE, &bitrate);
- GST_DEBUG_OBJECT (qtdemux, "nominal-bitrate: %u", bitrate);
- gst_tag_list_get_uint (qtdemux->streams[i]->stream_tags,
- GST_TAG_BITRATE, &bitrate);
- GST_DEBUG_OBJECT (qtdemux, "bitrate: %u", bitrate);
+ if (str->stream_tags) {
+ if (gst_tag_list_get_uint (str->stream_tags,
+ GST_TAG_MAXIMUM_BITRATE, &bitrate))
+ GST_DEBUG_OBJECT (qtdemux, "max-bitrate: %u", bitrate);
+ if (gst_tag_list_get_uint (str->stream_tags,
+ GST_TAG_NOMINAL_BITRATE, &bitrate))
+ GST_DEBUG_OBJECT (qtdemux, "nominal-bitrate: %u", bitrate);
+ if (gst_tag_list_get_uint (str->stream_tags,
+ GST_TAG_BITRATE, &bitrate))
+ GST_DEBUG_OBJECT (qtdemux, "bitrate: %u", bitrate);
}
if (bitrate)
sum_bitrate += bitrate;
">1 stream with unknown bitrate - bailing");
return;
} else
- stream = qtdemux->streams[i];
+ stream = str;
}
default:
static GstFlowReturn
qtdemux_prepare_streams (GstQTDemux * qtdemux)
{
- gint i;
GstFlowReturn ret = GST_FLOW_OK;
+ gint i;
GST_DEBUG_OBJECT (qtdemux, "prepare streams");
- for (i = 0; ret == GST_FLOW_OK && i < qtdemux->n_streams; i++) {
- QtDemuxStream *stream = qtdemux->streams[i];
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
guint32 sample_num = 0;
- GST_DEBUG_OBJECT (qtdemux, "stream %d, id %d, fourcc %" GST_FOURCC_FORMAT,
- i, stream->track_id, GST_FOURCC_ARGS (CUR_STREAM (stream)->fourcc));
+ GST_DEBUG_OBJECT (qtdemux, "track-id %u, fourcc %" GST_FOURCC_FORMAT,
+ stream->track_id, GST_FOURCC_ARGS (CUR_STREAM (stream)->fourcc));
if (qtdemux->fragmented) {
/* need all moov samples first */
* in push mode, we'll just have to deal with it */
if (G_UNLIKELY (qtdemux->pullbased && !stream->n_samples)) {
GST_DEBUG_OBJECT (qtdemux, "no samples for stream; discarding");
- gst_qtdemux_remove_stream (qtdemux, i);
+ g_ptr_array_remove_index (qtdemux->active_streams, i);
+ i--;
+ continue;
+ } else if (stream->track_id == qtdemux->chapters_track_id &&
+ (stream->subtype == FOURCC_text || stream->subtype == FOURCC_sbtl)) {
+ /* TODO - parse chapters track and expose it as GstToc; For now just ignore it
+ so that it doesn't look like a subtitle track */
+ g_ptr_array_remove_index (qtdemux->active_streams, i);
i--;
continue;
}
break;
++sample_num;
}
- if (stream->n_samples > 0 && stream->stbl_index >= 0) {
- stream->first_duration = stream->samples[0].duration;
- GST_LOG_OBJECT (qtdemux, "stream %d first duration %u",
- stream->track_id, stream->first_duration);
- }
}
return ret;
}
+ static gboolean
+ _stream_equal_func (const QtDemuxStream * stream, const gchar * stream_id)
+ {
+ return g_strcmp0 (stream->stream_id, stream_id) == 0;
+ }
+
+ static gboolean
+ qtdemux_is_streams_update (GstQTDemux * qtdemux)
+ {
+ gint i;
+
+ /* Different length, updated */
+ if (QTDEMUX_N_STREAMS (qtdemux) != qtdemux->old_streams->len)
+ return TRUE;
+
+ /* streams in list are sorted in track-id order */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ /* Different stream-id, updated */
+ if (g_strcmp0 (QTDEMUX_NTH_STREAM (qtdemux, i)->stream_id,
+ QTDEMUX_NTH_OLD_STREAM (qtdemux, i)->stream_id))
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static gboolean
+ qtdemux_reuse_and_configure_stream (GstQTDemux * qtdemux,
+ QtDemuxStream * oldstream, QtDemuxStream * newstream)
+ {
+ /* Connect old stream's srcpad to new stream */
+ newstream->pad = oldstream->pad;
+ oldstream->pad = NULL;
+
+ /* unset new_stream to prevent stream-start event */
+ newstream->new_stream = FALSE;
+
+ return gst_qtdemux_configure_stream (qtdemux, newstream);
+ }
+
+ /* g_ptr_array_find_with_equal_func is available since 2.54,
+ * replacement until we can depend unconditionally on the real one in GLib */
+ #if !GLIB_CHECK_VERSION(2,54,0)
+ #define g_ptr_array_find_with_equal_func qtdemux_ptr_array_find_with_equal_func
+ static gboolean
+ qtdemux_ptr_array_find_with_equal_func (GPtrArray * haystack,
+ gconstpointer needle, GEqualFunc equal_func, guint * index_)
+ {
+ guint i;
+
+ g_return_val_if_fail (haystack != NULL, FALSE);
+
+ if (equal_func == NULL)
+ equal_func = g_direct_equal;
+
+ for (i = 0; i < haystack->len; i++) {
+ if (equal_func (g_ptr_array_index (haystack, i), needle)) {
+ if (index_ != NULL)
+ *index_ = i;
+ return TRUE;
+ }
+ }
+
+ return FALSE;
+ }
+ #endif
+
+ static gboolean
+ qtdemux_update_streams (GstQTDemux * qtdemux)
+ {
+ gint i;
+ g_assert (qtdemux->streams_aware);
+
+ /* At below, figure out which stream in active_streams has identical stream-id
+ * with that of in old_streams. If there is matching stream-id,
+ * corresponding newstream will not be exposed again,
+ * but demux will reuse srcpad of matched old stream
+ *
+ * active_streams : newly created streams from the latest moov
+ * old_streams : existing streams (belong to previous moov)
+ */
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ QtDemuxStream *oldstream = NULL;
+ guint target;
+
+ GST_DEBUG_OBJECT (qtdemux, "track-id %u, fourcc %" GST_FOURCC_FORMAT,
+ stream->track_id, GST_FOURCC_ARGS (CUR_STREAM (stream)->fourcc));
+
+ if (g_ptr_array_find_with_equal_func (qtdemux->old_streams,
+ stream->stream_id, (GEqualFunc) _stream_equal_func, &target)) {
+ oldstream = QTDEMUX_NTH_OLD_STREAM (qtdemux, target);
+
+ /* null pad stream cannot be reused */
+ if (oldstream->pad == NULL)
+ oldstream = NULL;
+ }
+
+ if (oldstream) {
+ GST_DEBUG_OBJECT (qtdemux, "Reuse track-id %d", oldstream->track_id);
+
+ if (!qtdemux_reuse_and_configure_stream (qtdemux, oldstream, stream))
+ return FALSE;
+
+ /* we don't need to preserve order of old streams */
+ g_ptr_array_remove_fast (qtdemux->old_streams, oldstream);
+ } else {
+ GstTagList *list;
+
+ /* now we have all info and can expose */
+ list = stream->stream_tags;
+ stream->stream_tags = NULL;
+ if (!gst_qtdemux_add_stream (qtdemux, stream, list))
+ return FALSE;
+ }
+ }
+
+ return TRUE;
+ }
+
+ /* Must be called with expose lock */
static GstFlowReturn
qtdemux_expose_streams (GstQTDemux * qtdemux)
{
gint i;
- GSList *oldpads = NULL;
- GSList *iter;
GST_DEBUG_OBJECT (qtdemux, "exposing streams");
- for (i = 0; i < qtdemux->n_streams; i++) {
- QtDemuxStream *stream = qtdemux->streams[i];
- GstPad *oldpad = stream->pad;
- GstTagList *list;
+ if (!qtdemux_is_streams_update (qtdemux)) {
+ GST_DEBUG_OBJECT (qtdemux, "Reuse all streams");
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *new_stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ QtDemuxStream *old_stream = QTDEMUX_NTH_OLD_STREAM (qtdemux, i);
+ if (!qtdemux_reuse_and_configure_stream (qtdemux, old_stream, new_stream))
+ return GST_FLOW_ERROR;
+ }
- GST_DEBUG_OBJECT (qtdemux, "stream %d, id %d, fourcc %" GST_FOURCC_FORMAT,
- i, stream->track_id, GST_FOURCC_ARGS (CUR_STREAM (stream)->fourcc));
+ g_ptr_array_set_size (qtdemux->old_streams, 0);
+ qtdemux->need_segment = TRUE;
- if ((stream->subtype == FOURCC_text || stream->subtype == FOURCC_sbtl) &&
- stream->track_id == qtdemux->chapters_track_id) {
- /* TODO - parse chapters track and expose it as GstToc; For now just ignore it
- so that it doesn't look like a subtitle track */
- gst_qtdemux_remove_stream (qtdemux, i);
- i--;
- continue;
- }
+ return GST_FLOW_OK;
+ }
- /* now we have all info and can expose */
- list = stream->stream_tags;
- stream->stream_tags = NULL;
- if (oldpad)
- oldpads = g_slist_prepend (oldpads, oldpad);
- if (!gst_qtdemux_add_stream (qtdemux, stream, list))
+ if (qtdemux->streams_aware) {
+ if (!qtdemux_update_streams (qtdemux))
return GST_FLOW_ERROR;
+ } else {
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ GstTagList *list;
+
+ /* now we have all info and can expose */
+ list = stream->stream_tags;
+ stream->stream_tags = NULL;
+ if (!gst_qtdemux_add_stream (qtdemux, stream, list))
+ return GST_FLOW_ERROR;
+
+ }
}
gst_qtdemux_guess_bitrate (qtdemux);
gst_element_no_more_pads (GST_ELEMENT_CAST (qtdemux));
- for (iter = oldpads; iter; iter = g_slist_next (iter)) {
- GstPad *oldpad = iter->data;
- GstEvent *event;
+ /* If we have still old_streams, it's no more used stream */
+ for (i = 0; i < qtdemux->old_streams->len; i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_OLD_STREAM (qtdemux, i);
- event = gst_event_new_eos ();
- if (qtdemux->segment_seqnum)
- gst_event_set_seqnum (event, qtdemux->segment_seqnum);
+ if (stream->pad) {
+ GstEvent *event;
+
+ event = gst_event_new_eos ();
+ if (qtdemux->segment_seqnum)
+ gst_event_set_seqnum (event, qtdemux->segment_seqnum);
- gst_pad_push_event (oldpad, event);
- gst_pad_set_active (oldpad, FALSE);
- gst_element_remove_pad (GST_ELEMENT (qtdemux), oldpad);
- gst_flow_combiner_remove_pad (qtdemux->flowcombiner, oldpad);
- gst_object_unref (oldpad);
+ gst_pad_push_event (stream->pad, event);
+ }
}
+ g_ptr_array_set_size (qtdemux->old_streams, 0);
+
/* check if we should post a redirect in case there is a single trak
* and it is a redirecting trak */
- if (qtdemux->n_streams == 1 && qtdemux->streams[0]->redirect_uri != NULL) {
+ if (QTDEMUX_N_STREAMS (qtdemux) == 1 &&
+ QTDEMUX_NTH_STREAM (qtdemux, 0)->redirect_uri != NULL) {
GstMessage *m;
GST_INFO_OBJECT (qtdemux, "Issuing a redirect due to a single track with "
"an external content");
m = gst_message_new_element (GST_OBJECT_CAST (qtdemux),
gst_structure_new ("redirect",
- "new-location", G_TYPE_STRING, qtdemux->streams[0]->redirect_uri,
- NULL));
+ "new-location", G_TYPE_STRING,
+ QTDEMUX_NTH_STREAM (qtdemux, 0)->redirect_uri, NULL));
gst_element_post_message (GST_ELEMENT_CAST (qtdemux), m);
qtdemux->posted_redirect = TRUE;
}
- for (i = 0; i < qtdemux->n_streams; i++) {
- QtDemuxStream *stream = qtdemux->streams[i];
+ g_ptr_array_foreach (qtdemux->active_streams,
+ (GFunc) qtdemux_do_allocation, qtdemux);
- qtdemux_do_allocation (qtdemux, stream);
- }
+ qtdemux->need_segment = TRUE;
qtdemux->exposed = TRUE;
return GST_FLOW_OK;
const char *tag, const char *dummy, GNode * node)
{
GNode *data;
+ GstDateTime *datetime = NULL;
char *s;
int len;
int type;
s = g_strndup ((char *) data->data + 16, len - 16);
GST_DEBUG_OBJECT (qtdemux, "adding date '%s'", s);
+ datetime = gst_date_time_new_from_iso8601_string (s);
+ if (datetime != NULL) {
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_DATE_TIME,
+ datetime, NULL);
+ gst_date_time_unref (datetime);
+ }
+
ret = sscanf (s, "%u-%u-%u", &y, &m, &d);
if (ret >= 1 && y > 1500 && y < 3000) {
GDate *date;
GNode *trak;
GNode *udta;
GNode *mvex;
- GstClockTime duration;
GNode *pssh;
guint64 creation_time;
GstDateTime *datetime = NULL;
qtdemux_parse_mehd (qtdemux, &mehd_data);
}
- /* set duration in the segment info */
- gst_qtdemux_get_duration (qtdemux, &duration);
- if (duration) {
+ /* Update the movie segment duration, unless it was directly given to us
+ * by upstream. Otherwise let it as is, as we don't want to mangle the
+ * duration provided by upstream that may come e.g. from a MPD file. */
+ if (!qtdemux->upstream_format_is_time) {
+ GstClockTime duration;
+ /* set duration in the segment info */
+ gst_qtdemux_get_duration (qtdemux, &duration);
qtdemux->segment.duration = duration;
/* also do not exceed duration; stop is set that way post seek anyway,
* and segment activation falls back to duration,
return len;
}
+ static GList *
+ parse_xiph_stream_headers (GstQTDemux * qtdemux, gpointer codec_data,
+ gsize codec_data_size)
+ {
+ GList *list = NULL;
+ guint8 *p = codec_data;
+ gint i, offset, num_packets;
+ guint *length, last;
+
+ GST_MEMDUMP_OBJECT (qtdemux, "xiph codec data", codec_data, codec_data_size);
+
+ if (codec_data == NULL || codec_data_size == 0)
+ goto error;
+
+ /* start of the stream and vorbis audio or theora video, need to
+ * send the codec_priv data as first three packets */
+ num_packets = p[0] + 1;
+ GST_DEBUG_OBJECT (qtdemux,
+ "%u stream headers, total length=%" G_GSIZE_FORMAT " bytes",
+ (guint) num_packets, codec_data_size);
+
+ /* Let's put some limits, Don't think there even is a xiph codec
+ * with more than 3-4 headers */
+ if (G_UNLIKELY (num_packets > 16)) {
+ GST_WARNING_OBJECT (qtdemux,
+ "Unlikely number of xiph headers, most likely not valid");
+ goto error;
+ }
+
+ length = g_alloca (num_packets * sizeof (guint));
+ last = 0;
+ offset = 1;
+
+ /* first packets, read length values */
+ for (i = 0; i < num_packets - 1; i++) {
+ length[i] = 0;
+ while (offset < codec_data_size) {
+ length[i] += p[offset];
+ if (p[offset++] != 0xff)
+ break;
+ }
+ last += length[i];
+ }
+ if (offset + last > codec_data_size)
+ goto error;
+
+ /* last packet is the remaining size */
+ length[i] = codec_data_size - offset - last;
+
+ for (i = 0; i < num_packets; i++) {
+ GstBuffer *hdr;
+
+ GST_DEBUG_OBJECT (qtdemux, "buffer %d: %u bytes", i, (guint) length[i]);
+
+ if (offset + length[i] > codec_data_size)
+ goto error;
+
+ hdr = gst_buffer_new_wrapped (g_memdup (p + offset, length[i]), length[i]);
+ list = g_list_append (list, hdr);
+
+ offset += length[i];
+ }
+
+ return list;
+
+ /* ERRORS */
+ error:
+ {
+ if (list != NULL)
+ g_list_free_full (list, (GDestroyNotify) gst_buffer_unref);
+ return NULL;
+ }
+
+ }
+
/* this can change the codec originally present in @list */
static void
gst_qtdemux_handle_esds (GstQTDemux * qtdemux, QtDemuxStream * stream,
guint8 *data_ptr = NULL;
int data_len = 0;
guint8 object_type_id = 0;
+ guint8 stream_type = 0;
const char *codec_name = NULL;
GstCaps *caps = NULL;
switch (tag) {
case ES_DESCRIPTOR_TAG:
- GST_DEBUG_OBJECT (qtdemux, "ID %04x", QT_UINT16 (ptr));
- GST_DEBUG_OBJECT (qtdemux, "priority %04x", QT_UINT8 (ptr + 2));
+ GST_DEBUG_OBJECT (qtdemux, "ID 0x%04x", QT_UINT16 (ptr));
+ GST_DEBUG_OBJECT (qtdemux, "priority 0x%04x", QT_UINT8 (ptr + 2));
ptr += 3;
break;
case DECODER_CONFIG_DESC_TAG:{
guint max_bitrate, avg_bitrate;
object_type_id = QT_UINT8 (ptr);
+ stream_type = QT_UINT8 (ptr + 1) >> 2;
max_bitrate = QT_UINT32 (ptr + 5);
avg_bitrate = QT_UINT32 (ptr + 9);
GST_DEBUG_OBJECT (qtdemux, "object_type_id %02x", object_type_id);
- GST_DEBUG_OBJECT (qtdemux, "stream_type %02x", QT_UINT8 (ptr + 1));
+ GST_DEBUG_OBJECT (qtdemux, "stream_type %02x", stream_type);
GST_DEBUG_OBJECT (qtdemux, "buffer_size_db %02x", QT_UINT24 (ptr + 2));
GST_DEBUG_OBJECT (qtdemux, "max bitrate %u", max_bitrate);
GST_DEBUG_OBJECT (qtdemux, "avg bitrate %u", avg_bitrate);
case 0x69: /* MPEG-2 BC audio */
case 0x6B: /* MPEG-1 audio */
caps = gst_caps_new_simple ("audio/mpeg",
- "mpegversion", G_TYPE_INT, 1, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
+ "mpegversion", G_TYPE_INT, 1, NULL);
codec_name = "MPEG-1 audio";
break;
case 0x6A: /* MPEG-1 */
caps = gst_caps_new_simple ("audio/x-dts",
"framed", G_TYPE_BOOLEAN, TRUE, NULL);
break;
+ case 0xDD:
+ if (stream_type == 0x05 && data_ptr) {
+ GList *headers =
+ parse_xiph_stream_headers (qtdemux, data_ptr, data_len);
+ if (headers) {
+ GList *tmp;
+ GValue arr_val = G_VALUE_INIT;
+ GValue buf_val = G_VALUE_INIT;
+ GstStructure *s;
+
+ /* Let's assume it's vorbis if it's an audio stream of type 0xdd and we have codec data that extracts properly */
+ codec_name = "Vorbis";
+ caps = gst_caps_new_empty_simple ("audio/x-vorbis");
+ g_value_init (&arr_val, GST_TYPE_ARRAY);
+ g_value_init (&buf_val, GST_TYPE_BUFFER);
+ for (tmp = headers; tmp; tmp = tmp->next) {
+ g_value_set_boxed (&buf_val, (GstBuffer *) tmp->data);
+ gst_value_array_append_value (&arr_val, &buf_val);
+ }
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_take_value (s, "streamheader", &arr_val);
+ g_value_unset (&buf_val);
+ g_list_free (headers);
+
+ data_ptr = NULL;
+ data_len = 0;
+ }
+ }
+ break;
case 0xE1: /* QCELP */
/* QCELP, the codec_data is a riff tag (little endian) with
* more info (http://ftp.3gpp2.org/TSGC/Working/2003/2003-05-SanDiego/TSG-C-2003-05-San%20Diego/WG1/SWG12/C12-20030512-006%20=%20C12-20030217-015_Draft_Baseline%20Text%20of%20FFMS_R2.doc). */
GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;
switch (fourcc) {
- case GST_MAKE_FOURCC ('p', 'n', 'g', ' '):
+ case FOURCC_png:
_codec ("PNG still images");
caps = gst_caps_new_empty_simple ("image/png");
break;
caps = gst_caps_from_string ("video/x-dnxhd");
break;
case FOURCC_VP80:
+ case FOURCC_vp08:
_codec ("On2 VP8");
caps = gst_caps_from_string ("video/x-vp8");
break;
+ case FOURCC_vp09:
+ _codec ("Google VP9");
+ caps = gst_caps_from_string ("video/x-vp9");
+ break;
case FOURCC_apcs:
_codec ("Apple ProRes LT");
caps =
caps = gst_caps_new_simple ("video/x-wmv",
"wmvversion", G_TYPE_INT, 3, "format", G_TYPE_STRING, "WVC1", NULL);
break;
+ case FOURCC_av01:
+ _codec ("AV1");
+ caps = gst_caps_new_empty_simple ("video/x-av1");
+ break;
case GST_MAKE_FOURCC ('k', 'p', 'c', 'd'):
default:
{
_codec ("Opus");
caps = gst_caps_new_empty_simple ("audio/x-opus");
break;
- case GST_MAKE_FOURCC ('l', 'p', 'c', 'm'):
+ case FOURCC_lpcm:
{
guint32 flags = 0;
guint32 depth = 0;
};
_codec ("Raw LPCM audio");
- if (data && len >= 56) {
- depth = QT_UINT32 (data + 40);
- flags = QT_UINT32 (data + 44);
- width = QT_UINT32 (data + 48) * 8 / entry->n_channels;
+ if (data && len >= 36) {
+ depth = QT_UINT32 (data + 24);
+ flags = QT_UINT32 (data + 28);
+ width = QT_UINT32 (data + 32) * 8 / entry->n_channels;
}
if ((flags & FLAG_IS_FLOAT) == 0) {
if (depth == 0)
depth = 16;
if (width == 0)
width = 16;
+ if ((flags & FLAG_IS_ALIGNED_HIGH))
+ depth = width;
+
format = gst_audio_format_build_integer ((flags & FLAG_IS_SIGNED) ?
TRUE : FALSE, (flags & FLAG_IS_BIG_ENDIAN) ?
G_BIG_ENDIAN : G_LITTLE_ENDIAN, width, depth);
caps = gst_caps_new_simple ("audio/x-raw",
- "format", G_TYPE_STRING, gst_audio_format_to_string (format),
- "layout", G_TYPE_STRING, (flags & FLAG_IS_NON_INTERLEAVED) ?
- "non-interleaved" : "interleaved", NULL);
+ "format", G_TYPE_STRING,
+ format !=
+ GST_AUDIO_FORMAT_UNKNOWN ? gst_audio_format_to_string (format) :
+ "UNKNOWN", "layout", G_TYPE_STRING,
+ (flags & FLAG_IS_NON_INTERLEAVED) ? "non-interleaved" :
+ "interleaved", NULL);
stream->alignment = GST_ROUND_UP_8 (depth);
stream->alignment = round_up_pow2 (stream->alignment);
} else {
_codec ("XML subtitles");
caps = gst_caps_new_empty_simple ("application/ttml+xml");
break;
+ case FOURCC_c608:
+ _codec ("CEA 608 Closed Caption");
+ caps =
+ gst_caps_new_simple ("closedcaption/x-cea-608", "format",
+ G_TYPE_STRING, "s334-1a", NULL);
+ stream->need_process = TRUE;
+ stream->need_split = TRUE;
+ break;
+ case FOURCC_c708:
+ _codec ("CEA 708 Closed Caption");
+ caps =
+ gst_caps_new_simple ("closedcaption/x-cea-708", "format",
+ G_TYPE_STRING, "cdp", NULL);
+ stream->need_process = TRUE;
+ break;
+
default:
{
caps = _get_unknown_codec_name ("text", fourcc);
G_BEGIN_DECLS
- GST_DEBUG_CATEGORY_EXTERN (qtdemux_debug);
- #define GST_CAT_DEFAULT qtdemux_debug
-
#define GST_TYPE_QTDEMUX \
(gst_qtdemux_get_type())
#define GST_QTDEMUX(obj) \
#define GST_QT_DEMUX_PRIVATE_TAG "private-qt-tag"
#define GST_QT_DEMUX_CLASSIFICATION_TAG "classification"
- #define GST_QTDEMUX_MAX_STREAMS 32
-
typedef struct _GstQTDemux GstQTDemux;
typedef struct _GstQTDemuxClass GstQTDemuxClass;
typedef struct _QtDemuxStream QtDemuxStream;
QTDEMUX_STATE_BUFFER_MDAT /* Buffering the mdat atom */
};
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+typedef struct _QtDemuxSphericalMetadata QtDemuxSphericalMetadata;
+#endif
+
struct _GstQTDemux {
GstElement element;
gboolean posted_redirect;
- QtDemuxStream *streams[GST_QTDEMUX_MAX_STREAMS];
- gint n_streams;
+ /* Protect pad exposing from flush event */
+ GMutex expose_lock;
+
+ /* list of QtDemuxStream */
+ GPtrArray *active_streams;
+ GPtrArray *old_streams;
+
gint n_video_streams;
gint n_audio_streams;
gint n_sub_streams;
/* configured playback region */
GstSegment segment;
- /* The SEGMENT_EVENT from upstream *OR* generated from segment (above) */
- GstEvent *pending_newsegment;
+ /* PUSH-BASED only: If the initial segment event, or a segment consequence of
+ * a seek or incoming TIME segment from upstream needs to be pushed. This
+ * variable is used instead of pushing the event directly because at that
+ * point we may not have yet emitted the srcpads. */
+ gboolean need_segment;
guint32 segment_seqnum;
guint64 cenc_aux_info_offset;
guint8 *cenc_aux_info_sizes;
guint32 cenc_aux_sample_count;
+ gchar *preferred_protection_system_id;
+ /* Whether the parent bin is streams-aware, meaning we can
+ * add/remove streams at any point in time */
+ gboolean streams_aware;
/*
* ALL VARIABLES BELOW ARE ONLY USED IN PUSH-BASED MODE
* header start.
* Note : This is not computed from the GST_BUFFER_OFFSET field */
guint64 fragment_start_offset;
- #endif
+
+ /* These two fields are used to perform an implicit seek when a fragmented
+ * file whose first tfdt is not zero. This way if the first fragment starts
+ * at 1 hour, the user does not have to wait 1 hour or perform a manual seek
+ * for the image to move and the sound to play.
+ *
+ * This implicit seek is only done if the first parsed fragment has a non-zero
+ * decode base time and a seek has not been received previously, hence these
+ * fields. */
+ gboolean received_seek;
+ gboolean first_moof_already_parsed;
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ QtDemuxSphericalMetadata *spherical_metadata;
++#endif
};
struct _GstQTDemuxClass {
#include <string.h>
#include <glib/gprintf.h>
+ #include <gst/base/base.h>
+
/* For AVI compatibility mode
and for fourcc stuff */
#include <gst/riff/riff-read.h>
PROP_0,
PROP_METADATA,
PROP_STREAMINFO,
- PROP_MAX_GAP_TIME
+ PROP_MAX_GAP_TIME,
+ PROP_MAX_BACKTRACK_DISTANCE
};
- #define DEFAULT_MAX_GAP_TIME (2 * GST_SECOND)
- #define INVALID_DATA_THRESHOLD (2 * 1024 * 1024)
+ #define DEFAULT_MAX_GAP_TIME (2 * GST_SECOND)
+ #define DEFAULT_MAX_BACKTRACK_DISTANCE 30
+ #define INVALID_DATA_THRESHOLD (2 * 1024 * 1024)
static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
static gboolean gst_matroska_demux_handle_sink_event (GstPad * pad,
GstObject * parent, GstEvent * event);
+ static gboolean gst_matroska_demux_handle_sink_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
static GstFlowReturn gst_matroska_demux_chain (GstPad * pad,
GstObject * object, GstBuffer * buffer);
static GstCaps
* gst_matroska_demux_subtitle_caps (GstMatroskaTrackSubtitleContext *
subtitlecontext, const gchar * codec_id, gpointer data, guint size);
+ static const gchar *gst_matroska_track_encryption_algorithm_name (gint val);
+ static const gchar *gst_matroska_track_encryption_cipher_mode_name (gint val);
+ static const gchar *gst_matroska_track_encoding_scope_name (gint val);
/* stream methods */
static void gst_matroska_demux_reset (GstElement * element);
"gaps longer than this (0 = disabled).", 0, G_MAXUINT64,
DEFAULT_MAX_GAP_TIME, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MAX_BACKTRACK_DISTANCE,
+ g_param_spec_uint ("max-backtrack-distance",
+ "Maximum backtrack distance",
+ "Maximum backtrack distance in seconds when seeking without "
+ "and index in pull mode and search for a keyframe "
+ "(0 = disable backtracking).",
+ 0, G_MAXUINT, DEFAULT_MAX_BACKTRACK_DISTANCE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_matroska_demux_change_state);
gstelement_class->send_event =
GST_DEBUG_FUNCPTR (gst_matroska_demux_chain));
gst_pad_set_event_function (demux->common.sinkpad,
GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_sink_event));
+ gst_pad_set_query_function (demux->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_sink_query));
gst_element_add_pad (GST_ELEMENT (demux), demux->common.sinkpad);
/* init defaults for common read context */
/* property defaults */
demux->max_gap_time = DEFAULT_MAX_GAP_TIME;
+ demux->max_backtrack_distance = DEFAULT_MAX_BACKTRACK_DISTANCE;
GST_OBJECT_FLAG_SET (demux, GST_ELEMENT_FLAG_INDEXABLE);
demux->num_a_streams = 0;
demux->num_t_streams = 0;
demux->num_v_streams = 0;
+ demux->have_nonintraonly_v_streams = FALSE;
demux->have_group_id = FALSE;
demux->group_id = G_MAXUINT;
demux->to_time = GST_CLOCK_TIME_NONE;
demux->cluster_time = GST_CLOCK_TIME_NONE;
demux->cluster_offset = 0;
+ demux->cluster_prevsize = 0;
+ demux->seen_cluster_prevsize = FALSE;
demux->next_cluster_offset = 0;
+ demux->stream_last_time = GST_CLOCK_TIME_NONE;
+ demux->last_cluster_offset = 0;
demux->index_offset = 0;
demux->seekable = FALSE;
demux->need_segment = FALSE;
demux->cached_length = G_MAXUINT64;
+ if (demux->deferred_seek_event)
+ gst_event_unref (demux->deferred_seek_event);
+ demux->deferred_seek_event = NULL;
+ demux->deferred_seek_pad = NULL;
+
gst_flow_combiner_clear (demux->flowcombiner);
}
GstMapInfo map;
gpointer data;
gsize size;
+ GstBuffer *out_buf = buf;
g_return_val_if_fail (GST_IS_BUFFER (buf), NULL);
GST_DEBUG ("decoding buffer %p", buf);
- gst_buffer_map (buf, &map, GST_MAP_READ);
+ gst_buffer_map (out_buf, &map, GST_MAP_READ);
data = map.data;
size = map.size;
if (gst_matroska_decode_data (context->encodings, &data, &size,
GST_MATROSKA_TRACK_ENCODING_SCOPE_FRAME, FALSE)) {
- gst_buffer_unmap (buf, &map);
- gst_buffer_unref (buf);
- return gst_buffer_new_wrapped (data, size);
+ if (data != map.data) {
+ gst_buffer_unmap (out_buf, &map);
+ gst_buffer_unref (out_buf);
+ out_buf = gst_buffer_new_wrapped (data, size);
+ } else {
+ gst_buffer_unmap (out_buf, &map);
+ }
} else {
GST_DEBUG ("decode data failed");
- gst_buffer_unmap (buf, &map);
- gst_buffer_unref (buf);
+ gst_buffer_unmap (out_buf, &map);
+ gst_buffer_unref (out_buf);
return NULL;
}
+ /* Encrypted stream */
+ if (context->protection_info) {
+
+ GstStructure *info_protect = gst_structure_copy (context->protection_info);
+ gboolean encrypted = FALSE;
+
+ gst_buffer_map (out_buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ if (gst_matroska_parse_protection_meta (&data, &size, info_protect,
+ &encrypted)) {
+ if (data != map.data) {
+ GstBuffer *tmp_buf;
+
+ gst_buffer_unmap (out_buf, &map);
+ tmp_buf = out_buf;
+ out_buf = gst_buffer_copy_region (tmp_buf, GST_BUFFER_COPY_ALL,
+ gst_buffer_get_size (tmp_buf) - size, size);
+ gst_buffer_unref (tmp_buf);
+ if (encrypted)
+ gst_buffer_add_protection_meta (out_buf, info_protect);
+ else
+ gst_structure_free (info_protect);
+ } else {
+ gst_buffer_unmap (out_buf, &map);
+ gst_structure_free (info_protect);
+ }
+ } else {
+ GST_WARNING ("Adding protection metadata failed");
+ gst_buffer_unmap (out_buf, &map);
+ gst_buffer_unref (out_buf);
+ gst_structure_free (info_protect);
+ return NULL;
+ }
+ }
+
+ return out_buf;
}
static void
}
static GstFlowReturn
- gst_matroska_demux_add_stream (GstMatroskaDemux * demux, GstEbmlRead * ebml)
+ gst_matroska_demux_parse_colour (GstMatroskaDemux * demux, GstEbmlRead * ebml,
+ GstMatroskaTrackVideoContext * video_context)
+ {
+ GstFlowReturn ret;
+ GstVideoColorimetry colorimetry;
+ guint32 id;
+ guint64 num;
+
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
+ colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
+
+ DEBUG_ELEMENT_START (demux, ebml, "TrackVideoColour");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK)
+ goto beach;
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ goto beach;
+
+ switch (id) {
+ case GST_MATROSKA_ID_VIDEOMATRIXCOEFFICIENTS:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ switch (num) {
+ case 0:
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB;
+ break;
+ case 1:
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT709;
+ break;
+ case 2:
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
+ break;
+ case 4:
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_FCC;
+ break;
+ /* FIXME: "5: BT470BG" is undefined in GstVideoColorMatrix
+ * but it's functionally same as "6: BT601" */
+ case 5:
+ case 6:
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
+ break;
+ case 7:
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
+ break;
+ case 9:
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
+ break;
+ default:
+ GST_FIXME_OBJECT (demux, "Unsupported color matrix coefficients %"
+ G_GUINT64_FORMAT, num);
+ break;
+ }
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEORANGE:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ switch (num) {
+ case 0:
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
+ break;
+ case 1:
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_16_235;
+ break;
+ case 2:
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
+ break;
+ default:
+ GST_FIXME_OBJECT (demux, "Unsupported color range %"
+ G_GUINT64_FORMAT, num);
+ break;
+ }
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEOTRANSFERCHARACTERISTICS:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ switch (num) {
+ /* FIXME: "6: BT601" and "14: BT2020_10" are undefined in
+ * GstVideoTransferFunction, but functionally same as "1: BT709" */
+ case 1:
+ case 6:
+ case 14:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_BT709;
+ break;
+ case 2:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+ break;
+ case 4:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA22;
+ break;
+ case 5:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA28;
+ break;
+ case 7:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_SMPTE240M;
+ break;
+ case 8:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA10;
+ break;
+ case 9:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_LOG100;
+ break;
+ case 10:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_LOG316;
+ break;
+ case 13:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_SRGB;
+ break;
+ case 15:
+ colorimetry.transfer = GST_VIDEO_TRANSFER_BT2020_12;
+ break;
+ default:
+ GST_FIXME_OBJECT (demux,
+ "Unsupported color transfer characteristics %"
+ G_GUINT64_FORMAT, num);
+ break;
+ }
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEOPRIMARIES:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ switch (num) {
+ case 1:
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
+ break;
+ case 2:
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
+ break;
+ case 4:
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT470M;
+ break;
+ case 5:
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT470BG;
+ break;
+ case 6:
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M;
+ break;
+ case 7:
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE240M;
+ break;
+ case 8:
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_FILM;
+ break;
+ case 9:
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
+ break;
+ default:
+ GST_FIXME_OBJECT (demux, "Unsupported color primaries %"
+ G_GUINT64_FORMAT, num);
+ break;
+ }
+ break;
+ }
+
+ default:
+ GST_FIXME_OBJECT (demux, "Unsupported subelement 0x%x in Colour", id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ memcpy (&video_context->colorimetry, &colorimetry,
+ sizeof (GstVideoColorimetry));
+
+ beach:
+ DEBUG_ELEMENT_STOP (demux, ebml, "TrackVideoColour", ret);
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_parse_stream (GstMatroskaDemux * demux, GstEbmlRead * ebml,
+ GstMatroskaTrackContext ** dest_context)
{
- GstElementClass *klass = GST_ELEMENT_GET_CLASS (demux);
GstMatroskaTrackContext *context;
- GstPadTemplate *templ = NULL;
- GstStreamFlags stream_flags;
GstCaps *caps = NULL;
GstTagList *cached_taglist;
- gchar *padname = NULL;
GstFlowReturn ret;
guint32 id, riff_fourcc = 0;
guint16 riff_audio_fmt = 0;
- GstEvent *stream_start;
gchar *codec = NULL;
- gchar *stream_id;
DEBUG_ELEMENT_START (demux, ebml, "TrackEntry");
/* allocate generic... if we know the type, we'll g_renew()
* with the precise type */
context = g_new0 (GstMatroskaTrackContext, 1);
- g_ptr_array_add (demux->common.src, context);
- context->index = demux->common.num_streams;
context->index_writer_id = -1;
context->type = 0; /* no type yet */
context->default_duration = 0;
context->dts_only = FALSE;
context->intra_only = FALSE;
context->tags = gst_tag_list_new_empty ();
- demux->common.num_streams++;
- g_assert (demux->common.src->len == demux->common.num_streams);
+ g_queue_init (&context->protection_event_queue);
+ context->protection_info = NULL;
- GST_DEBUG_OBJECT (demux, "Stream number %d", context->index);
+ GST_DEBUG_OBJECT (demux, "Parsing a TrackEntry (%d tracks parsed so far)",
+ demux->common.num_streams);
/* try reading the trackentry headers */
while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
GST_ERROR_OBJECT (demux, "Invalid TrackNumber 0");
ret = GST_FLOW_ERROR;
break;
- } else if (!gst_matroska_read_common_tracknumber_unique (&demux->common,
- num)) {
- GST_ERROR_OBJECT (demux, "TrackNumber %" G_GUINT64_FORMAT
- " is not unique", num);
- ret = GST_FLOW_ERROR;
- break;
}
GST_DEBUG_OBJECT (demux, "TrackNumber: %" G_GUINT64_FORMAT, num);
context->type = 0;
break;
}
- g_ptr_array_index (demux->common.src, demux->common.num_streams - 1)
- = context;
break;
}
break;
}
videocontext = (GstMatroskaTrackVideoContext *) context;
- g_ptr_array_index (demux->common.src, demux->common.num_streams - 1)
- = context;
while (ret == GST_FLOW_OK &&
gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
break;
- if (num)
- context->flags |= GST_MATROSKA_VIDEOTRACK_INTERLACED;
+ if (num == 1)
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_INTERLACED;
+ else if (num == 2)
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE;
else
- context->flags &= ~GST_MATROSKA_VIDEOTRACK_INTERLACED;
- GST_DEBUG_OBJECT (demux, "TrackVideoInterlaced: %d",
- (context->flags & GST_MATROSKA_VIDEOTRACK_INTERLACED) ? 1 :
- 0);
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_UNKNOWN;
+
+ GST_DEBUG_OBJECT (demux, "video track interlacing mode: %d",
+ videocontext->interlace_mode);
break;
}
g_free (data);
break;
}
+
+ /* color info */
+ case GST_MATROSKA_ID_VIDEOCOLOUR:{
+ ret = gst_matroska_demux_parse_colour (demux, ebml, videocontext);
+ break;
+ }
+
case GST_MATROSKA_ID_VIDEOSTEREOMODE:
{
guint64 num;
break;
audiocontext = (GstMatroskaTrackAudioContext *) context;
- g_ptr_array_index (demux->common.src, demux->common.num_streams - 1)
- = context;
while (ret == GST_FLOW_OK &&
gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
if (ret == GST_FLOW_OK || ret == GST_FLOW_EOS)
GST_WARNING_OBJECT (ebml, "Unknown stream/codec in track entry header");
- demux->common.num_streams--;
- g_ptr_array_remove_index (demux->common.src, demux->common.num_streams);
- g_assert (demux->common.src->len == demux->common.num_streams);
gst_matroska_track_free (context);
-
+ context = NULL;
+ *dest_context = NULL;
return ret;
}
if (cached_taglist)
gst_tag_list_insert (context->tags, cached_taglist, GST_TAG_MERGE_APPEND);
- /* now create the GStreamer connectivity */
+ /* compute caps */
switch (context->type) {
case GST_MATROSKA_TRACK_TYPE_VIDEO:{
GstMatroskaTrackVideoContext *videocontext =
(GstMatroskaTrackVideoContext *) context;
- padname = g_strdup_printf ("video_%u", demux->num_v_streams++);
- templ = gst_element_class_get_pad_template (klass, "video_%u");
caps = gst_matroska_demux_video_caps (videocontext,
context->codec_id, context->codec_priv,
context->codec_priv_size, &codec, &riff_fourcc);
GstMatroskaTrackAudioContext *audiocontext =
(GstMatroskaTrackAudioContext *) context;
- padname = g_strdup_printf ("audio_%u", demux->num_a_streams++);
- templ = gst_element_class_get_pad_template (klass, "audio_%u");
caps = gst_matroska_demux_audio_caps (audiocontext,
context->codec_id, context->codec_priv, context->codec_priv_size,
&codec, &riff_audio_fmt);
GstMatroskaTrackSubtitleContext *subtitlecontext =
(GstMatroskaTrackSubtitleContext *) context;
- padname = g_strdup_printf ("subtitle_%u", demux->num_t_streams++);
- templ = gst_element_class_get_pad_template (klass, "subtitle_%u");
caps = gst_matroska_demux_subtitle_caps (subtitlecontext,
context->codec_id, context->codec_priv, context->codec_priv_size);
break;
lang = gst_tag_get_language_code (context->language);
gst_tag_list_add (context->tags, GST_TAG_MERGE_REPLACE,
GST_TAG_LANGUAGE_CODE, (lang) ? lang : context->language, NULL);
+
+ if (context->name) {
+ gst_tag_list_add (context->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_TITLE, context->name, NULL);
+ }
context->tags_changed = TRUE;
}
context->stream_headers, caps);
}
+ if (context->encodings) {
+ GstMatroskaTrackEncoding *enc;
+ guint i;
+
+ for (i = 0; i < context->encodings->len; i++) {
+ enc = &g_array_index (context->encodings, GstMatroskaTrackEncoding, i);
+ if (enc->type == GST_MATROSKA_ENCODING_ENCRYPTION /* encryption */ ) {
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_has_name (s, "application/x-webm-enc")) {
+ gst_structure_set (s, "original-media-type", G_TYPE_STRING,
+ gst_structure_get_name (s), NULL);
+ gst_structure_set (s, "encryption-algorithm", G_TYPE_STRING,
+ gst_matroska_track_encryption_algorithm_name (enc->enc_algo),
+ NULL);
+ gst_structure_set (s, "encoding-scope", G_TYPE_STRING,
+ gst_matroska_track_encoding_scope_name (enc->scope), NULL);
+ gst_structure_set (s, "cipher-mode", G_TYPE_STRING,
+ gst_matroska_track_encryption_cipher_mode_name
+ (enc->enc_cipher_mode), NULL);
+ gst_structure_set_name (s, "application/x-webm-enc");
+ }
+ }
+ }
+ }
+
+ context->caps = caps;
+
+ /* tadaah! */
+ *dest_context = context;
+ return ret;
+ }
+
+ static void
+ gst_matroska_demux_add_stream (GstMatroskaDemux * demux,
+ GstMatroskaTrackContext * context)
+ {
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (demux);
+ gchar *padname = NULL;
+ GstPadTemplate *templ = NULL;
+ GstStreamFlags stream_flags;
+
+ GstEvent *stream_start;
+
+ gchar *stream_id;
+
+ g_ptr_array_add (demux->common.src, context);
+ context->index = demux->common.num_streams++;
+ g_assert (demux->common.src->len == demux->common.num_streams);
+ g_ptr_array_index (demux->common.src, demux->common.num_streams - 1) =
+ context;
+
+ /* now create the GStreamer connectivity */
+ switch (context->type) {
+ case GST_MATROSKA_TRACK_TYPE_VIDEO:
+ padname = g_strdup_printf ("video_%u", demux->num_v_streams++);
+ templ = gst_element_class_get_pad_template (klass, "video_%u");
+
+ if (!context->intra_only)
+ demux->have_nonintraonly_v_streams = TRUE;
+ break;
+
+ case GST_MATROSKA_TRACK_TYPE_AUDIO:
+ padname = g_strdup_printf ("audio_%u", demux->num_a_streams++);
+ templ = gst_element_class_get_pad_template (klass, "audio_%u");
+ break;
+
+ case GST_MATROSKA_TRACK_TYPE_SUBTITLE:
+ padname = g_strdup_printf ("subtitle_%u", demux->num_t_streams++);
+ templ = gst_element_class_get_pad_template (klass, "subtitle_%u");
+ break;
+
+ default:
+ /* we should already have quit by now */
+ g_assert_not_reached ();
+ }
+
/* the pad in here */
context->pad = gst_pad_new_from_template (templ, padname);
- context->caps = caps;
gst_pad_set_event_function (context->pad,
GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_src_event));
GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_src_query));
GST_INFO_OBJECT (demux, "Adding pad '%s' with caps %" GST_PTR_FORMAT,
- padname, caps);
+ padname, context->caps);
gst_pad_set_element_private (context->pad, context);
stream_flags |= GST_STREAM_FLAG_SPARSE;
if (context->flags & GST_MATROSKA_TRACK_DEFAULT)
stream_flags |= GST_STREAM_FLAG_SELECT;
+ else if (!(context->flags & GST_MATROSKA_TRACK_ENABLED))
+ stream_flags |= GST_STREAM_FLAG_UNSELECT;
+
gst_event_set_stream_flags (stream_start, stream_flags);
gst_pad_push_event (context->pad, stream_start);
gst_pad_set_caps (context->pad, context->caps);
gst_flow_combiner_add_pad (demux->flowcombiner, context->pad);
g_free (padname);
-
- /* tadaah! */
- return ret;
}
static gboolean
if (GST_EVENT_TYPE (event) == GST_EVENT_SEEK) {
/* no seeking until we are (safely) ready */
if (demux->common.state != GST_MATROSKA_READ_STATE_DATA) {
- GST_DEBUG_OBJECT (demux, "not ready for seeking yet");
- gst_event_unref (event);
- return FALSE;
+ GST_DEBUG_OBJECT (demux,
+ "not ready for seeking yet, deferring seek: %" GST_PTR_FORMAT, event);
+ if (demux->deferred_seek_event)
+ gst_event_unref (demux->deferred_seek_event);
+ demux->deferred_seek_event = event;
+ demux->deferred_seek_pad = NULL;
+ return TRUE;
}
res = gst_matroska_demux_handle_seek_event (demux, NULL, event);
} else {
/* searches for a cluster start from @pos,
* return GST_FLOW_OK and cluster position in @pos if found */
static GstFlowReturn
- gst_matroska_demux_search_cluster (GstMatroskaDemux * demux, gint64 * pos)
+ gst_matroska_demux_search_cluster (GstMatroskaDemux * demux, gint64 * pos,
+ gboolean forward)
{
gint64 newpos = *pos;
gint64 orig_offset;
GstFlowReturn ret = GST_FLOW_OK;
- const guint chunk = 64 * 1024;
+ const guint chunk = 128 * 1024;
GstBuffer *buf = NULL;
GstMapInfo map;
gpointer data = NULL;
orig_offset = demux->common.offset;
- GST_LOG_OBJECT (demux, "searching cluster following offset %" G_GINT64_FORMAT,
- *pos);
+ GST_LOG_OBJECT (demux, "searching cluster %s offset %" G_GINT64_FORMAT,
+ forward ? "following" : "preceding", *pos);
if (demux->clusters) {
gint64 *cpos;
cpos = gst_util_array_binary_search (demux->clusters->data,
demux->clusters->len, sizeof (gint64),
(GCompareDataFunc) gst_matroska_cluster_compare,
- GST_SEARCH_MODE_AFTER, pos, NULL);
+ forward ? GST_SEARCH_MODE_AFTER : GST_SEARCH_MODE_BEFORE, pos, NULL);
/* sanity check */
if (cpos) {
GST_DEBUG_OBJECT (demux,
while (1) {
GstByteReader reader;
gint cluster_pos;
+ guint toread = chunk;
+ if (!forward) {
+ /* never read beyond the requested target */
+ if (G_UNLIKELY (newpos < chunk)) {
+ toread = newpos;
+ newpos = 0;
+ } else {
+ newpos -= chunk;
+ }
+ }
if (buf != NULL) {
gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
buf = NULL;
}
- ret = gst_pad_pull_range (demux->common.sinkpad, newpos, chunk, &buf);
+ ret = gst_pad_pull_range (demux->common.sinkpad, newpos, toread, &buf);
if (ret != GST_FLOW_OK)
break;
GST_DEBUG_OBJECT (demux,
}
gst_byte_reader_init (&reader, data, size);
- resume:
- cluster_pos = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff,
- GST_MATROSKA_ID_CLUSTER, 0, gst_byte_reader_get_remaining (&reader));
+ cluster_pos = -1;
+ while (1) {
+ gint found = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff,
+ GST_MATROSKA_ID_CLUSTER, 0, gst_byte_reader_get_remaining (&reader));
+ if (forward) {
+ cluster_pos = found;
+ break;
+ }
+ /* need last occurrence when searching backwards */
+ if (found >= 0) {
+ cluster_pos = gst_byte_reader_get_pos (&reader) + found;
+ gst_byte_reader_skip (&reader, found + 4);
+ } else {
+ break;
+ }
+ }
+
if (cluster_pos >= 0) {
newpos += cluster_pos;
- /* prepare resuming at next byte */
- if (!gst_byte_reader_skip (&reader, cluster_pos + 1)) {
- GST_DEBUG_OBJECT (demux, "Need more data -> continue");
- continue;
- }
GST_DEBUG_OBJECT (demux,
"found cluster ebml id at offset %" G_GINT64_FORMAT, newpos);
/* extra checks whether we really sync'ed to a cluster:
GST_ELEMENT_CAST (demux), &id, &length, &needed);
if (ret != GST_FLOW_OK) {
GST_DEBUG_OBJECT (demux, "need more data -> continue");
- continue;
+ goto next;
}
g_assert (id == GST_MATROSKA_ID_CLUSTER);
GST_DEBUG_OBJECT (demux, "cluster size %" G_GUINT64_FORMAT ", prefix %d",
demux->common.offset += length + needed;
ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
GST_ELEMENT_CAST (demux), &id, &length, &needed);
- if (ret != GST_FLOW_OK) {
- /* we skipped one byte in the reader above, need to accomodate for
- * that when resuming skipping from the reader instead of reading a
- * new chunk */
- newpos += 1;
- goto resume;
- }
+ if (ret != GST_FLOW_OK)
+ goto next;
GST_DEBUG_OBJECT (demux, "next element is %scluster",
id == GST_MATROSKA_ID_CLUSTER ? "" : "not ");
if (id == GST_MATROSKA_ID_CLUSTER)
break;
- /* not ok, resume
- * we skipped one byte in the reader above, need to accomodate for
- * that when resuming skipping from the reader instead of reading a
- * new chunk */
- newpos += 1;
- goto resume;
+ next:
+ if (forward)
+ newpos += 1;
} else {
/* partial cluster id may have been in tail of buffer */
- newpos += MAX (gst_byte_reader_get_remaining (&reader), 4) - 3;
+ newpos +=
+ forward ? MAX (gst_byte_reader_get_remaining (&reader), 4) - 3 : 3;
}
}
return ret;
}
+ /* Three states to express: starts with I-frame, starts with delta, don't know */
+ typedef enum
+ {
+ CLUSTER_STATUS_NONE = 0,
+ CLUSTER_STATUS_STARTS_WITH_KEYFRAME,
+ CLUSTER_STATUS_STARTS_WITH_DELTAUNIT,
+ } ClusterStatus;
+
+ typedef struct
+ {
+ guint64 offset;
+ guint64 size;
+ guint64 prev_size;
+ GstClockTime time;
+ ClusterStatus status;
+ } ClusterInfo;
+
+ static const gchar *
+ cluster_status_get_nick (ClusterStatus status)
+ {
+ switch (status) {
+ case CLUSTER_STATUS_NONE:
+ return "none";
+ case CLUSTER_STATUS_STARTS_WITH_KEYFRAME:
+ return "key";
+ case CLUSTER_STATUS_STARTS_WITH_DELTAUNIT:
+ return "delta";
+ }
+ return "???";
+ }
+
+ /* Skip ebml-coded number:
+ * 1xxx.. = 1 byte
+ * 01xx.. = 2 bytes
+ * 001x.. = 3 bytes, etc.
+ */
+ static gboolean
+ bit_reader_skip_ebml_num (GstBitReader * br)
+ {
+ guint8 i, v = 0;
+
+ if (!gst_bit_reader_peek_bits_uint8 (br, &v, 8))
+ return FALSE;
+
+ for (i = 0; i < 8; i++) {
+ if ((v & (0x80 >> i)) != 0)
+ break;
+ }
+ return gst_bit_reader_skip (br, (i + 1) * 8);
+ }
+
+ /* Don't probe more than that many bytes into the cluster for keyframe info
+ * (random value, mostly for sanity checking) */
+ #define MAX_CLUSTER_INFO_PROBE_LENGTH 256
+
+ static gboolean
+ gst_matroska_demux_peek_cluster_info (GstMatroskaDemux * demux,
+ ClusterInfo * cluster, guint64 offset)
+ {
+ demux->common.offset = offset;
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+
+ cluster->offset = offset;
+ cluster->size = 0;
+ cluster->prev_size = 0;
+ cluster->time = GST_CLOCK_TIME_NONE;
+ cluster->status = CLUSTER_STATUS_NONE;
+
+ /* parse first few elements in cluster */
+ do {
+ GstFlowReturn flow;
+ guint64 length;
+ guint32 id;
+ guint needed;
+
+ flow = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+
+ if (flow != GST_FLOW_OK)
+ break;
+
+ GST_LOG_OBJECT (demux, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", needed %d", demux->common.offset, id,
+ length, needed);
+
+ /* Reached start of next cluster without finding data, stop processing */
+ if (id == GST_MATROSKA_ID_CLUSTER && cluster->offset != offset)
+ break;
+
+ /* Not going to parse into these for now, stop processing */
+ if (id == GST_MATROSKA_ID_ENCRYPTEDBLOCK
+ || id == GST_MATROSKA_ID_BLOCKGROUP || id == GST_MATROSKA_ID_BLOCK)
+ break;
+
+ /* SimpleBlock: peek at headers to check if it's a keyframe */
+ if (id == GST_MATROSKA_ID_SIMPLEBLOCK) {
+ GstBitReader br;
+ guint8 *d, hdr_len, v = 0;
+
+ GST_DEBUG_OBJECT (demux, "SimpleBlock found");
+
+ /* SimpleBlock header is max. 21 bytes */
+ hdr_len = MIN (21, length);
+
+ flow = gst_matroska_read_common_peek_bytes (&demux->common,
+ demux->common.offset, hdr_len, NULL, &d);
+
+ if (flow != GST_FLOW_OK)
+ break;
+
+ gst_bit_reader_init (&br, d, hdr_len);
+
+ /* skip prefix: ebml id (SimpleBlock) + element length */
+ if (!gst_bit_reader_skip (&br, 8 * needed))
+ break;
+
+ /* skip track number (ebml coded) */
+ if (!bit_reader_skip_ebml_num (&br))
+ break;
+
+ /* skip Timecode */
+ if (!gst_bit_reader_skip (&br, 16))
+ break;
+
+ /* read flags */
+ if (!gst_bit_reader_get_bits_uint8 (&br, &v, 8))
+ break;
+
+ if ((v & 0x80) != 0)
+ cluster->status = CLUSTER_STATUS_STARTS_WITH_KEYFRAME;
+ else
+ cluster->status = CLUSTER_STATUS_STARTS_WITH_DELTAUNIT;
+
+ break;
+ }
+
+ flow = gst_matroska_demux_parse_id (demux, id, length, needed);
+
+ if (flow != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_CLUSTER:
+ if (length == G_MAXUINT64)
+ cluster->size = 0;
+ else
+ cluster->size = length + needed;
+ break;
+ case GST_MATROSKA_ID_PREVSIZE:
+ cluster->prev_size = demux->cluster_prevsize;
+ break;
+ case GST_MATROSKA_ID_CLUSTERTIMECODE:
+ cluster->time = demux->cluster_time * demux->common.time_scale;
+ break;
+ case GST_MATROSKA_ID_SILENTTRACKS:
+ /* ignore and continue */
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "Unknown ebml id 0x%08x (possibly garbage), "
+ "bailing out", id);
+ goto out;
+ }
+ } while (demux->common.offset - offset < MAX_CLUSTER_INFO_PROBE_LENGTH);
+
+ out:
+
+ GST_INFO_OBJECT (demux, "Cluster @ %" G_GUINT64_FORMAT ": "
+ "time %" GST_TIME_FORMAT ", size %" G_GUINT64_FORMAT ", "
+ "prev_size %" G_GUINT64_FORMAT ", %s", cluster->offset,
+ GST_TIME_ARGS (cluster->time), cluster->size, cluster->prev_size,
+ cluster_status_get_nick (cluster->status));
+
+ /* return success as long as we could extract the minimum useful information */
+ return cluster->time != GST_CLOCK_TIME_NONE;
+ }
+
+ /* returns TRUE if the cluster offset was updated */
+ static gboolean
+ gst_matroska_demux_scan_back_for_keyframe_cluster (GstMatroskaDemux * demux,
+ gint64 * cluster_offset, GstClockTime * cluster_time)
+ {
+ GstClockTime stream_start_time = demux->stream_start_time;
+ guint64 first_cluster_offset = demux->first_cluster_offset;
+ gint64 off = *cluster_offset;
+ ClusterInfo cluster = { 0, };
+
+ GST_INFO_OBJECT (demux, "Checking if cluster starts with keyframe");
+ while (off > first_cluster_offset) {
+ if (!gst_matroska_demux_peek_cluster_info (demux, &cluster, off)) {
+ GST_LOG_OBJECT (demux,
+ "Couldn't get info on cluster @ %" G_GUINT64_FORMAT, off);
+ break;
+ }
+
+ /* Keyframe? Then we're done */
+ if (cluster.status == CLUSTER_STATUS_STARTS_WITH_KEYFRAME) {
+ GST_LOG_OBJECT (demux,
+ "Found keyframe at start of cluster @ %" G_GUINT64_FORMAT, off);
+ break;
+ }
+
+ /* We only scan back if we *know* we landed on a cluster that
+ * starts with a delta frame. */
+ if (cluster.status != CLUSTER_STATUS_STARTS_WITH_DELTAUNIT) {
+ GST_LOG_OBJECT (demux,
+ "No delta frame at start of cluster @ %" G_GUINT64_FORMAT, off);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Cluster starts with delta frame, backtracking");
+
+ /* Don't scan back more than this much in time from the cluster we
+ * originally landed on. This is mostly a sanity check in case a file
+ * always has keyframes in the middle of clusters and never at the
+ * beginning. Without this we would always scan back to the beginning
+ * of the file in that case. */
+ if (cluster.time != GST_CLOCK_TIME_NONE) {
+ GstClockTimeDiff distance = GST_CLOCK_DIFF (cluster.time, *cluster_time);
+
+ if (distance < 0 || distance > demux->max_backtrack_distance * GST_SECOND) {
+ GST_DEBUG_OBJECT (demux, "Haven't found cluster with keyframe within "
+ "%u secs of original seek target cluster, stopping",
+ demux->max_backtrack_distance);
+ break;
+ }
+ }
+
+ /* If we have cluster prev_size we can skip back efficiently. If not,
+ * we'll just do a brute force search for a cluster identifier */
+ if (cluster.prev_size > 0 && off >= cluster.prev_size) {
+ off -= cluster.prev_size;
+ } else {
+ GstFlowReturn flow;
+
+ GST_LOG_OBJECT (demux, "Cluster has no or invalid prev size, searching "
+ "for previous cluster instead then");
+
+ flow = gst_matroska_demux_search_cluster (demux, &off, FALSE);
+ if (flow != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "cluster search yielded flow %s, stopping",
+ gst_flow_get_name (flow));
+ break;
+ }
+ }
+
+ if (off <= first_cluster_offset) {
+ GST_LOG_OBJECT (demux, "Reached first cluster, stopping");
+ *cluster_offset = first_cluster_offset;
+ *cluster_time = stream_start_time;
+ return TRUE;
+ }
+ GST_LOG_OBJECT (demux, "Trying prev cluster @ %" G_GUINT64_FORMAT, off);
+ }
+
+ /* If we found a cluster starting with a keyframe jump to that instead,
+ * otherwise leave everything as it was before */
+ if (cluster.time != GST_CLOCK_TIME_NONE
+ && (cluster.offset == first_cluster_offset
+ || cluster.status == CLUSTER_STATUS_STARTS_WITH_KEYFRAME)) {
+ *cluster_offset = cluster.offset;
+ *cluster_time = cluster.time;
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
/* bisect and scan through file for cluster starting before @time,
* returns fake index entry with corresponding info on cluster */
static GstMatroskaIndex *
GstMatroskaIndex *entry = NULL;
GstMatroskaReadState current_state;
GstClockTime otime, prev_cluster_time, current_cluster_time, cluster_time;
- gint64 opos, newpos, startpos = 0, current_offset;
+ GstClockTime atime;
+ gint64 opos, newpos, current_offset;
gint64 prev_cluster_offset = -1, current_cluster_offset, cluster_offset;
- const guint chunk = 64 * 1024;
+ gint64 apos, maxpos;
+ guint64 cluster_size = 0;
GstFlowReturn ret;
guint64 length;
guint32 id;
guint needed;
- /* (under)estimate new position, resync using cluster ebml id,
- * and scan forward to appropriate cluster
- * (and re-estimate if need to go backward) */
-
- prev_cluster_time = GST_CLOCK_TIME_NONE;
+ /* estimate new position, resync using cluster ebml id,
+ * and bisect further or scan forward to appropriate cluster */
- /* store some current state */
+ /* save some current global state which will be touched by our scanning */
current_state = demux->common.state;
g_return_val_if_fail (current_state == GST_MATROSKA_READ_STATE_DATA, NULL);
demux->common.state = GST_MATROSKA_READ_STATE_SCANNING;
- /* estimate using start and current position */
+ /* estimate using start and last known cluster */
GST_OBJECT_LOCK (demux);
- opos = demux->common.offset - demux->common.ebml_segment_start;
- otime = demux->common.segment.position;
+ apos = demux->first_cluster_offset;
+ atime = demux->stream_start_time;
+ opos = demux->last_cluster_offset;
+ otime = demux->stream_last_time;
GST_OBJECT_UNLOCK (demux);
/* sanitize */
- time = MAX (time, demux->stream_start_time);
+ time = MAX (time, atime);
+ otime = MAX (otime, atime);
+ opos = MAX (opos, apos);
- /* avoid division by zero in first estimation below */
- if (otime <= demux->stream_start_time)
- otime = time;
+ maxpos = gst_matroska_read_common_get_length (&demux->common);
+
+ /* invariants;
+ * apos <= opos
+ * atime <= otime
+ * apos always refer to a cluster before target time;
+ * opos may or may not be after target time, but if it is once so,
+ * then also in next iteration
+ * */
retry:
GST_LOG_OBJECT (demux,
+ "apos: %" G_GUINT64_FORMAT ", atime: %" GST_TIME_FORMAT ", %"
+ GST_TIME_FORMAT " in stream time, "
"opos: %" G_GUINT64_FORMAT ", otime: %" GST_TIME_FORMAT ", %"
GST_TIME_FORMAT " in stream time (start %" GST_TIME_FORMAT "), time %"
- GST_TIME_FORMAT, opos, GST_TIME_ARGS (otime),
- GST_TIME_ARGS (otime - demux->stream_start_time),
+ GST_TIME_FORMAT, apos, GST_TIME_ARGS (atime),
+ GST_TIME_ARGS (atime - demux->stream_start_time), opos,
+ GST_TIME_ARGS (otime), GST_TIME_ARGS (otime - demux->stream_start_time),
GST_TIME_ARGS (demux->stream_start_time), GST_TIME_ARGS (time));
- if (otime <= demux->stream_start_time) {
- newpos = 0;
+ g_assert (atime <= otime);
+ g_assert (apos <= opos);
+ if (time == GST_CLOCK_TIME_NONE) {
+ GST_DEBUG_OBJECT (demux, "searching last cluster");
+ newpos = maxpos;
+ if (newpos == -1) {
+ GST_DEBUG_OBJECT (demux, "unknown file size; bailing out");
+ goto exit;
+ }
+ } else if (otime <= atime) {
+ newpos = apos;
} else {
- newpos =
- gst_util_uint64_scale (opos - demux->common.ebml_segment_start,
- time - demux->stream_start_time,
- otime - demux->stream_start_time) - chunk;
- if (newpos < 0)
- newpos = 0;
+ newpos = apos +
+ gst_util_uint64_scale (opos - apos, time - atime, otime - atime);
+ if (maxpos != -1 && newpos > maxpos)
+ newpos = maxpos;
}
- /* favour undershoot */
- newpos = newpos * 90 / 100;
- newpos += demux->common.ebml_segment_start;
GST_DEBUG_OBJECT (demux,
"estimated offset for %" GST_TIME_FORMAT ": %" G_GINT64_FORMAT,
GST_TIME_ARGS (time), newpos);
- /* and at least start scanning before previous scan start to avoid looping */
- startpos = startpos * 90 / 100;
- if (startpos && startpos < newpos)
- newpos = startpos;
-
- /* read in at newpos and scan for ebml cluster id */
- startpos = newpos;
- while (1) {
-
- ret = gst_matroska_demux_search_cluster (demux, &newpos);
- if (ret == GST_FLOW_EOS) {
- /* heuristic HACK */
- newpos = startpos * 80 / 100;
- GST_DEBUG_OBJECT (demux, "EOS; "
- "new estimated offset for %" GST_TIME_FORMAT ": %" G_GINT64_FORMAT,
- GST_TIME_ARGS (time), newpos);
- startpos = newpos;
- continue;
- } else if (ret != GST_FLOW_OK) {
+ /* search backwards */
+ if (newpos > apos) {
+ ret = gst_matroska_demux_search_cluster (demux, &newpos, FALSE);
+ if (ret != GST_FLOW_OK)
goto exit;
- } else {
- break;
- }
}
/* then start scanning and parsing for cluster time,
- * re-estimate if overshoot, otherwise next cluster and so on */
+ * re-estimate if possible, otherwise next cluster and so on */
+ /* note that each re-estimate is entered with a change in apos or opos,
+ * avoiding infinite loop */
demux->common.offset = newpos;
demux->cluster_time = cluster_time = GST_CLOCK_TIME_NONE;
+ cluster_size = 0;
+ prev_cluster_time = GST_CLOCK_TIME_NONE;
while (1) {
- guint64 cluster_size = 0;
-
/* peek and parse some elements */
ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
GST_ELEMENT_CAST (demux), &id, &length, &needed);
GST_DEBUG_OBJECT (demux, "found cluster at offset %" G_GINT64_FORMAT
" with time %" GST_TIME_FORMAT, cluster_offset,
GST_TIME_ARGS (cluster_time));
+ if (time == GST_CLOCK_TIME_NONE) {
+ GST_DEBUG_OBJECT (demux, "found last cluster");
+ prev_cluster_time = cluster_time;
+ prev_cluster_offset = cluster_offset;
+ break;
+ }
if (cluster_time > time) {
GST_DEBUG_OBJECT (demux, "overshot target");
/* cluster overshoots */
goto retry;
}
} else {
+ /* cluster undershoots */
+ GST_DEBUG_OBJECT (demux, "undershot target");
+ /* ok if close enough */
+ if (GST_CLOCK_DIFF (cluster_time, time) < 5 * GST_SECOND) {
+ GST_DEBUG_OBJECT (demux, "target close enough");
+ prev_cluster_time = cluster_time;
+ prev_cluster_offset = cluster_offset;
+ break;
+ }
+ if (otime > time) {
+ /* we are in between atime and otime => can bisect if worthwhile */
+ if (prev_cluster_time != GST_CLOCK_TIME_NONE &&
+ cluster_time > prev_cluster_time &&
+ (GST_CLOCK_DIFF (prev_cluster_time, cluster_time) * 10 <
+ GST_CLOCK_DIFF (cluster_time, time))) {
+ /* we moved at least one cluster forward,
+ * and it looks like target is still far away,
+ * let's estimate again */
+ GST_DEBUG_OBJECT (demux, "bisecting with new apos");
+ apos = cluster_offset;
+ atime = cluster_time;
+ goto retry;
+ }
+ }
/* cluster undershoots, goto next one */
prev_cluster_time = cluster_time;
prev_cluster_offset = cluster_offset;
goto exit;
}
+ /* In the bisect loop above we always undershoot and then jump forward
+ * cluster-by-cluster until we overshoot, so if we get here we've gone
+ * over and the previous cluster is where we need to go to. */
+ cluster_offset = prev_cluster_offset;
+ cluster_time = prev_cluster_time;
+
+ /* If we have video and can easily backtrack, check if we landed on a cluster
+ * that starts with a keyframe - and if not backtrack until we find one that
+ * does. */
+ if (demux->have_nonintraonly_v_streams && demux->max_backtrack_distance > 0) {
+ if (gst_matroska_demux_scan_back_for_keyframe_cluster (demux,
+ &cluster_offset, &cluster_time)) {
+ GST_INFO_OBJECT (demux, "Adjusted cluster to %" GST_TIME_FORMAT " @ "
+ "%" G_GUINT64_FORMAT, GST_TIME_ARGS (cluster_time), cluster_offset);
+ }
+ }
+
entry = g_new0 (GstMatroskaIndex, 1);
- entry->time = prev_cluster_time;
- entry->pos = prev_cluster_offset - demux->common.ebml_segment_start;
+ entry->time = cluster_time;
+ entry->pos = cluster_offset - demux->common.ebml_segment_start;
GST_DEBUG_OBJECT (demux, "simulated index entry; time %" GST_TIME_FORMAT
", pos %" G_GUINT64_FORMAT, GST_TIME_ARGS (entry->time), entry->pos);
case GST_EVENT_SEEK:
/* no seeking until we are (safely) ready */
if (demux->common.state != GST_MATROSKA_READ_STATE_DATA) {
- GST_DEBUG_OBJECT (demux, "not ready for seeking yet");
- gst_event_unref (event);
- return FALSE;
+ GST_DEBUG_OBJECT (demux,
+ "not ready for seeking yet, deferring seek event: %" GST_PTR_FORMAT,
+ event);
+ if (demux->deferred_seek_event)
+ gst_event_unref (demux->deferred_seek_event);
+ demux->deferred_seek_event = event;
+ demux->deferred_seek_pad = pad;
+ return TRUE;
}
{
return res;
}
+ static gboolean
+ gst_matroska_demux_handle_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_BITRATE:
+ {
+ if (G_UNLIKELY (demux->cached_length == G_MAXUINT64 ||
+ demux->common.offset >= demux->cached_length)) {
+ demux->cached_length =
+ gst_matroska_read_common_get_length (&demux->common);
+ }
+
+ if (demux->cached_length < G_MAXUINT64
+ && demux->common.segment.duration > 0) {
+ /* TODO: better results based on ranges/index tables */
+ guint bitrate =
+ gst_util_uint64_scale (8 * demux->cached_length, GST_SECOND,
+ demux->common.segment.duration);
+
+ GST_LOG_OBJECT (demux, "bitrate query byte length: %" G_GUINT64_FORMAT
+ " duration %" GST_TIME_FORMAT " resulting in a bitrate of %u",
+ demux->cached_length,
+ GST_TIME_ARGS (demux->common.segment.duration), bitrate);
+
+ gst_query_set_bitrate (query, bitrate);
+ res = TRUE;
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, (GstObject *) demux, query);
+ break;
+ }
+
+ return res;
+ }
+
static GstFlowReturn
gst_matroska_demux_seek_to_previous_keyframe (GstMatroskaDemux * demux)
{
switch (id) {
/* one track within the "all-tracks" header */
- case GST_MATROSKA_ID_TRACKENTRY:
- ret = gst_matroska_demux_add_stream (demux, ebml);
+ case GST_MATROSKA_ID_TRACKENTRY:{
+ GstMatroskaTrackContext *track;
+ ret = gst_matroska_demux_parse_stream (demux, ebml, &track);
+ if (track != NULL) {
+ if (gst_matroska_read_common_tracknumber_unique (&demux->common,
+ track->num)) {
+ gst_matroska_demux_add_stream (demux, track);
+ } else {
+ GST_ERROR_OBJECT (demux,
+ "TrackNumber %" G_GUINT64_FORMAT " is not unique", track->num);
+ ret = GST_FLOW_ERROR;
+ gst_matroska_track_free (track);
+ track = NULL;
+ }
+ }
break;
+ }
default:
ret = gst_matroska_read_common_parse_skip (&demux->common, ebml,
DEBUG_ELEMENT_STOP (demux, ebml, "Tracks", ret);
demux->tracks_parsed = TRUE;
+ GST_DEBUG_OBJECT (demux, "signaling no more pads");
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_update_tracks (GstMatroskaDemux * demux, GstEbmlRead * ebml)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint num_tracks_found = 0;
+ guint32 id;
+
+ GST_INFO_OBJECT (demux, "Reparsing Tracks element");
+
+ DEBUG_ELEMENT_START (demux, ebml, "Tracks");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (demux, ebml, "Tracks", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* one track within the "all-tracks" header */
+ case GST_MATROSKA_ID_TRACKENTRY:{
+ GstMatroskaTrackContext *new_track;
+ gint old_track_index;
+ GstMatroskaTrackContext *old_track;
+ ret = gst_matroska_demux_parse_stream (demux, ebml, &new_track);
+ if (new_track == NULL)
+ break;
+ num_tracks_found++;
+
+ if (gst_matroska_read_common_tracknumber_unique (&demux->common,
+ new_track->num)) {
+ GST_ERROR_OBJECT (demux,
+ "Unexpected new TrackNumber: %" G_GUINT64_FORMAT, new_track->num);
+ goto track_mismatch_error;
+ }
+
+ old_track_index =
+ gst_matroska_read_common_stream_from_num (&demux->common,
+ new_track->num);
+ g_assert (old_track_index != -1);
+ old_track = g_ptr_array_index (demux->common.src, old_track_index);
+
+ if (old_track->type != new_track->type) {
+ GST_ERROR_OBJECT (demux,
+ "Mismatch reparsing track %" G_GUINT64_FORMAT
+ " on track type. Expected %d, found %d", new_track->num,
+ old_track->type, new_track->type);
+ goto track_mismatch_error;
+ }
+
+ if (g_strcmp0 (old_track->codec_id, new_track->codec_id) != 0) {
+ GST_ERROR_OBJECT (demux,
+ "Mismatch reparsing track %" G_GUINT64_FORMAT
+ " on codec id. Expected '%s', found '%s'", new_track->num,
+ old_track->codec_id, new_track->codec_id);
+ goto track_mismatch_error;
+ }
+
+ /* The new track matches the old track. No problems on our side.
+ * Let's make it replace the old track. */
+ new_track->pad = old_track->pad;
+ new_track->index = old_track->index;
+ new_track->pos = old_track->pos;
+ g_ptr_array_index (demux->common.src, old_track_index) = new_track;
+ gst_pad_set_element_private (new_track->pad, new_track);
+
+ if (!gst_caps_is_equal (old_track->caps, new_track->caps)) {
+ gst_pad_set_caps (new_track->pad, new_track->caps);
+ }
+ gst_caps_replace (&old_track->caps, NULL);
+
+ if (!gst_tag_list_is_equal (old_track->tags, new_track->tags)) {
+ GST_DEBUG_OBJECT (old_track->pad, "Sending tags %p: %"
+ GST_PTR_FORMAT, new_track->tags, new_track->tags);
+ gst_pad_push_event (new_track->pad,
+ gst_event_new_tag (gst_tag_list_copy (new_track->tags)));
+ }
+
+ gst_matroska_track_free (old_track);
+ break;
+
+ track_mismatch_error:
+ gst_matroska_track_free (new_track);
+ new_track = NULL;
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common, ebml,
+ "Track", id);
+ break;
+ }
+ }
+ DEBUG_ELEMENT_STOP (demux, ebml, "Tracks", ret);
+
+ if (ret != GST_FLOW_ERROR && demux->common.num_streams != num_tracks_found) {
+ GST_ERROR_OBJECT (demux,
+ "Mismatch on the number of tracks. Expected %du tracks, found %du",
+ demux->common.num_streams, num_tracks_found);
+ ret = GST_FLOW_ERROR;
+ }
return ret;
}
gboolean delta_unit = FALSE;
guint64 duration = 0;
gint64 lace_time = 0;
+ GstEvent *protect_event;
stream = g_ptr_array_index (demux->common.src, stream_num);
} else {
lace_time = GST_CLOCK_TIME_NONE;
}
+ /* Send the GST_PROTECTION event */
+ while ((protect_event = g_queue_pop_head (&stream->protection_event_queue))) {
+ GST_TRACE_OBJECT (demux, "pushing protection event for stream %d:%s",
+ stream->index, GST_STR_NULL (stream->name));
+ gst_pad_push_event (stream->pad, protect_event);
+ }
/* need to refresh segment info ASAP */
if (GST_CLOCK_TIME_IS_VALID (lace_time) && demux->need_segment) {
GST_DEBUG_OBJECT (demux,
"using stored seek position %" GST_TIME_FORMAT,
GST_TIME_ARGS (demux->common.segment.position));
- clace_time = demux->common.segment.position + demux->stream_start_time;
+ clace_time = demux->common.segment.position;
segment->position = GST_CLOCK_TIME_NONE;
}
segment->start = clace_time;
delta_unit = TRUE;
invisible_frame = ((flags & 0x08)) &&
(!strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP8) ||
- !strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP9));
+ !strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP9) ||
+ !strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_AV1));
}
/* If we're doing a keyframe-only trickmode, only push keyframes on video
* streams */
if (delta_unit
- && demux->common.
- segment.flags & GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS) {
+ && demux->common.segment.
+ flags & GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS) {
GST_LOG_OBJECT (demux, "Skipping non-keyframe on stream %d",
stream->index);
ret = GST_FLOW_OK;
GST_BUFFER_PTS (sub) -= stream->codec_delay;
} else {
GST_BUFFER_PTS (sub) = 0;
+
+ /* Opus GstAudioClippingMeta units are scaled by 48000/sample_rate.
+ That is, if a Opus track has audio encoded at 24000 Hz and 132
+ samples need to be clipped, GstAudioClippingMeta.start will be
+ set to 264. (This is also the case for buffer offsets.)
+ Opus sample rates are always divisors of 48000 Hz, which is the
+ maximum allowed sample rate. */
start_clip =
gst_util_uint64_scale_round (stream->codec_delay, 48000,
GST_SECOND);
* search for cluster mark following current pos */
pos = demux->common.offset;
GST_WARNING_OBJECT (demux, "parse error, looking for next cluster");
- if ((ret = gst_matroska_demux_search_cluster (demux, &pos)) != GST_FLOW_OK) {
+ if ((ret = gst_matroska_demux_search_cluster (demux, &pos, TRUE)) !=
+ GST_FLOW_OK) {
/* did not work, give up */
return ret;
} else {
break;
case GST_MATROSKA_READ_STATE_SCANNING:
if (id != GST_MATROSKA_ID_CLUSTER &&
+ id != GST_MATROSKA_ID_PREVSIZE &&
id != GST_MATROSKA_ID_CLUSTERTIMECODE) {
if (demux->common.start_resync_offset != -1) {
/* we need to skip byte per byte if we are scanning for a new cluster
case GST_MATROSKA_READ_STATE_DATA:
case GST_MATROSKA_READ_STATE_SEEK:
switch (id) {
+ case GST_EBML_ID_HEADER:
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ demux->common.state = GST_MATROSKA_READ_STATE_SEGMENT;
+ gst_matroska_demux_check_seekability (demux);
+ break;
case GST_MATROSKA_ID_SEGMENTINFO:
if (!demux->common.segmentinfo_parsed) {
GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
}
break;
case GST_MATROSKA_ID_TRACKS:
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
if (!demux->tracks_parsed) {
- GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
ret = gst_matroska_demux_parse_tracks (demux, &ebml);
} else {
- GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ ret = gst_matroska_demux_update_tracks (demux, &ebml);
}
break;
case GST_MATROSKA_ID_CLUSTER:
goto no_tracks;
}
}
- if (G_UNLIKELY (demux->common.state
- == GST_MATROSKA_READ_STATE_HEADER)) {
+ if (demux->common.state == GST_MATROSKA_READ_STATE_HEADER) {
demux->common.state = GST_MATROSKA_READ_STATE_DATA;
demux->first_cluster_offset = demux->common.offset;
- GST_DEBUG_OBJECT (demux, "signaling no more pads");
- gst_element_no_more_pads (GST_ELEMENT (demux));
+
+ if (!demux->streaming &&
+ !GST_CLOCK_TIME_IS_VALID (demux->common.segment.duration)) {
+ GstMatroskaIndex *last = NULL;
+
+ GST_DEBUG_OBJECT (demux,
+ "estimating duration using last cluster");
+ if ((last = gst_matroska_demux_search_pos (demux,
+ GST_CLOCK_TIME_NONE)) != NULL) {
+ demux->last_cluster_offset =
+ last->pos + demux->common.ebml_segment_start;
+ demux->stream_last_time = last->time;
+ demux->common.segment.duration =
+ demux->stream_last_time - demux->stream_start_time;
+ /* above estimate should not be taken all too strongly */
+ demux->invalid_duration = TRUE;
+ GST_DEBUG_OBJECT (demux,
+ "estimated duration as %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->common.segment.duration));
+
+ g_free (last);
+ }
+ }
+
+ /* Peek at second cluster in order to figure out if we have cluster
+ * prev_size or not (which is never set on the first cluster for
+ * obvious reasons). This is useful in case someone initiates a
+ * seek or direction change before we reach the second cluster. */
+ if (!demux->streaming) {
+ ClusterInfo cluster = { 0, };
+
+ if (gst_matroska_demux_peek_cluster_info (demux, &cluster,
+ demux->first_cluster_offset) && cluster.size > 0) {
+ gst_matroska_demux_peek_cluster_info (demux, &cluster,
+ demux->first_cluster_offset + cluster.size);
+ }
+ demux->common.offset = demux->first_cluster_offset;
+ }
+
+ if (demux->deferred_seek_event) {
+ GstEvent *seek_event;
+ GstPad *seek_pad;
+ seek_event = demux->deferred_seek_event;
+ seek_pad = demux->deferred_seek_pad;
+ demux->deferred_seek_event = NULL;
+ demux->deferred_seek_pad = NULL;
+ GST_DEBUG_OBJECT (demux,
+ "Handling deferred seek event: %" GST_PTR_FORMAT, seek_event);
+ gst_matroska_demux_handle_seek_event (demux, seek_pad,
+ seek_event);
+ gst_event_unref (seek_event);
+ }
+
/* send initial segment - we wait till we know the first
incoming timestamp, so we can properly set the start of
the segment. */
}
demux->cluster_time = GST_CLOCK_TIME_NONE;
demux->cluster_offset = demux->common.offset;
+ demux->cluster_prevsize = 0;
if (G_UNLIKELY (!demux->seek_first && demux->seek_block)) {
GST_DEBUG_OBJECT (demux, "seek target block %" G_GUINT64_FORMAT
" not found in Cluster, trying next Cluster's first block instead",
goto parse_failed;
GST_DEBUG_OBJECT (demux, "ClusterTimeCode: %" G_GUINT64_FORMAT, num);
demux->cluster_time = num;
+ /* track last cluster */
+ if (demux->cluster_offset > demux->last_cluster_offset) {
+ demux->last_cluster_offset = demux->cluster_offset;
+ demux->stream_last_time =
+ demux->cluster_time * demux->common.time_scale;
+ }
#if 0
if (demux->common.element_index) {
if (demux->common.element_index_writer_id == -1)
GST_OBJECT_UNLOCK (demux);
}
break;
+ case GST_MATROSKA_ID_PREVSIZE:{
+ guint64 num;
+
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ if ((ret = gst_ebml_read_uint (&ebml, &id, &num)) != GST_FLOW_OK)
+ goto parse_failed;
+ GST_LOG_OBJECT (demux, "ClusterPrevSize: %" G_GUINT64_FORMAT, num);
+ demux->cluster_prevsize = num;
+ demux->seen_cluster_prevsize = TRUE;
+ break;
+ }
case GST_MATROSKA_ID_POSITION:
- case GST_MATROSKA_ID_PREVSIZE:
case GST_MATROSKA_ID_ENCRYPTEDBLOCK:
+ /* The WebM doesn't support the EncryptedBlock element.
+ * The Matroska spec doesn't give us more detail, how to parse this element,
+ * for example the field TransformID isn't specified yet.*/
case GST_MATROSKA_ID_SILENTTRACKS:
GST_DEBUG_OBJECT (demux,
"Skipping Cluster subelement 0x%x - ignoring", id);
demux->common.segment.position = GST_CLOCK_TIME_NONE;
demux->cluster_time = GST_CLOCK_TIME_NONE;
demux->cluster_offset = 0;
+ demux->cluster_prevsize = 0;
demux->need_segment = TRUE;
demux->segment_seqnum = gst_event_get_seqnum (event);
/* but keep some of the upstream segment */
demux->common.segment.duration = dur;
demux->cluster_time = GST_CLOCK_TIME_NONE;
demux->cluster_offset = 0;
+ demux->cluster_prevsize = 0;
GST_OBJECT_UNLOCK (demux);
/* fall-through */
}
static GstStaticCaps intra_caps = GST_STATIC_CAPS ("image/jpeg; "
"video/x-raw; image/png; video/x-dv; video/x-huffyuv; video/x-ffv; "
"video/x-compressed-yuv");
+ GstCaps *tmp = gst_static_caps_get (&intra_caps);
+
context->intra_only =
- gst_caps_can_intersect (gst_static_caps_get (&intra_caps), caps);
+ gst_caps_can_intersect (tmp, caps);
+ gst_caps_unref(tmp);
}
if (buf)
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP9)) {
caps = gst_caps_new_empty_simple ("video/x-vp9");
*codec_name = g_strdup_printf ("On2 VP9");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_AV1)) {
+ caps = gst_caps_new_empty_simple ("video/x-av1");
+ if (data) {
+ GstBuffer *priv;
+
+ priv = gst_buffer_new_wrapped (g_memdup (data, size), size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+ } else {
+ GST_WARNING ("No AV1 codec data found!");
+ }
+ *codec_name = g_strdup_printf ("AOM AV1");
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_PRORES)) {
guint32 fourcc;
const gchar *variant, *variant_descr = "";
0, 1, NULL);
}
- if (videocontext->parent.flags & GST_MATROSKA_VIDEOTRACK_INTERLACED)
- gst_structure_set (structure, "interlace-mode", G_TYPE_STRING,
- "mixed", NULL);
+ switch (videocontext->interlace_mode) {
+ case GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE:
+ gst_structure_set (structure,
+ "interlace-mode", G_TYPE_STRING, "progressive", NULL);
+ break;
+ case GST_MATROSKA_INTERLACE_MODE_INTERLACED:
+ gst_structure_set (structure,
+ "interlace-mode", G_TYPE_STRING, "mixed", NULL);
+ break;
+ default:
+ break;
+ }
}
if (videocontext->multiview_mode != GST_VIDEO_MULTIVIEW_MODE_NONE) {
if (gst_video_multiview_guess_half_aspect (videocontext->multiview_mode,
GST_FLAG_SET_MASK_EXACT, NULL);
}
+ if (videocontext->colorimetry.range != GST_VIDEO_COLOR_RANGE_UNKNOWN ||
+ videocontext->colorimetry.matrix != GST_VIDEO_COLOR_MATRIX_UNKNOWN ||
+ videocontext->colorimetry.transfer != GST_VIDEO_TRANSFER_UNKNOWN ||
+ videocontext->colorimetry.primaries !=
+ GST_VIDEO_COLOR_PRIMARIES_UNKNOWN) {
+ gchar *colorimetry =
+ gst_video_colorimetry_to_string (&videocontext->colorimetry);
+ gst_caps_set_simple (caps, "colorimetry", G_TYPE_STRING, colorimetry,
+ NULL);
+ GST_DEBUG ("setting colorimetry to %s", colorimetry);
+ g_free (colorimetry);
+ }
+
caps = gst_caps_simplify (caps);
}
*riff_audio_fmt = auds.format;
/* FIXME: Handle reorder map */
- caps = gst_riff_create_audio_caps (auds.format, NULL, &auds, NULL,
- codec_data, codec_name, NULL);
+ caps = gst_riff_create_audio_caps (auds.format, NULL, &auds, codec_data,
+ NULL, codec_name, NULL);
if (codec_data)
gst_buffer_unref (codec_data);
guint sample_width;
guint extra_data_size;
- GST_ERROR ("real audio raversion:%d", raversion);
+ GST_DEBUG ("real audio raversion:%d", raversion);
if (raversion == 8) {
/* COOK */
flavor = GST_READ_UINT16_BE (data + 22);
sample_width = GST_READ_UINT16_BE (data + 58);
extra_data_size = GST_READ_UINT32_BE (data + 74);
- GST_ERROR
+ GST_DEBUG
("flavor:%d, packet_size:%d, height:%d, leaf_size:%d, sample_width:%d, extra_data_size:%d",
flavor, packet_size, height, leaf_size, sample_width,
extra_data_size);
demux->max_gap_time = g_value_get_uint64 (value);
GST_OBJECT_UNLOCK (demux);
break;
+ case PROP_MAX_BACKTRACK_DISTANCE:
+ GST_OBJECT_LOCK (demux);
+ demux->max_backtrack_distance = g_value_get_uint (value);
+ GST_OBJECT_UNLOCK (demux);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
g_value_set_uint64 (value, demux->max_gap_time);
GST_OBJECT_UNLOCK (demux);
break;
+ case PROP_MAX_BACKTRACK_DISTANCE:
+ GST_OBJECT_LOCK (demux);
+ g_value_set_uint (value, demux->max_backtrack_distance);
+ GST_OBJECT_UNLOCK (demux);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
+ static const gchar *
+ gst_matroska_track_encryption_algorithm_name (gint val)
+ {
+ GEnumValue *en;
+ GEnumClass *enum_class =
+ g_type_class_ref (MATROSKA_TRACK_ENCRYPTION_ALGORITHM_TYPE);
+ en = g_enum_get_value (G_ENUM_CLASS (enum_class), val);
+ return en ? en->value_nick : NULL;
+ }
+
+ static const gchar *
+ gst_matroska_track_encryption_cipher_mode_name (gint val)
+ {
+ GEnumValue *en;
+ GEnumClass *enum_class =
+ g_type_class_ref (MATROSKA_TRACK_ENCRYPTION_CIPHER_MODE_TYPE);
+ en = g_enum_get_value (G_ENUM_CLASS (enum_class), val);
+ return en ? en->value_nick : NULL;
+ }
+
+ static const gchar *
+ gst_matroska_track_encoding_scope_name (gint val)
+ {
+ GEnumValue *en;
+ GEnumClass *enum_class =
+ g_type_class_ref (MATROSKA_TRACK_ENCODING_SCOPE_TYPE);
+
+ en = g_enum_get_value (G_ENUM_CLASS (enum_class), val);
+ return en ? en->value_nick : NULL;
+ }
+
gboolean
gst_matroska_demux_plugin_init (GstPlugin * plugin)
{
--/* GStreamer
-- * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
-- *
-- * This library is free software; you can redistribute it and/or
-- * modify it under the terms of the GNU Library General Public
-- * License as published by the Free Software Foundation; either
-- * version 2 of the License, or (at your option) any later version.
-- *
-- * This library is distributed in the hope that it will be useful,
-- * but WITHOUT ANY WARRANTY; without even the implied warranty of
-- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- * Library General Public License for more details.
-- *
-- * You should have received a copy of the GNU Library General Public
-- * License along with this library; if not, write to the
-- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
-- * Boston, MA 02110-1301, USA.
-- */
++ /* GStreamer
++ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
++ *
++ * This library is free software; you can redistribute it and/or
++ * modify it under the terms of the GNU Library General Public
++ * License as published by the Free Software Foundation; either
++ * version 2 of the License, or (at your option) any later version.
++ *
++ * This library is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ * Library General Public License for more details.
++ *
++ * You should have received a copy of the GNU Library General Public
++ * License along with this library; if not, write to the
++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
++ * Boston, MA 02110-1301, USA.
++ */
/**
* SECTION:element-rtpbin
* SSRC in the RTP packets to its own SSRC and wil forward the packets on the
* send_rtp_src_\%u pad after updating its internal state.
*
- * #GstRtpBin can also demultiplex incoming bundled streams. The first
- * #GstRtpSession will have a #GstRtpSsrcDemux element splitting the streams
- * based on their SSRC and potentially dispatched to a different #GstRtpSession.
- * Because retransmission SSRCs need to be merged with the corresponding media
- * stream the #GstRtpBin::on-bundled-ssrc signal is emitted so that the
- * application can find out to which session the SSRC belongs.
- *
* The session manager needs the clock-rate of the payload types it is handling
* and will signal the #GstRtpSession::request-pt-map signal when it needs such a
* mapping. One can clear the cached values with the #GstRtpSession::clear-pt-map
GST_STATIC_CAPS ("application/x-rtp;application/x-srtp")
);
- #define GST_RTP_BIN_GET_PRIVATE(obj) \
- (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_RTP_BIN, GstRtpBinPrivate))
-
#define GST_RTP_BIN_LOCK(bin) g_mutex_lock (&(bin)->priv->bin_lock)
#define GST_RTP_BIN_UNLOCK(bin) g_mutex_unlock (&(bin)->priv->bin_lock)
#define GST_RTP_BIN_SHUTDOWN_UNLOCK(bin) \
GST_RTP_BIN_DYN_UNLOCK (bin); \
+ /* Minimum time offset to apply. This compensates for rounding errors in NTP to
+ * RTP timestamp conversions */
+ #define MIN_TS_OFFSET (4 * GST_MSECOND)
+
struct _GstRtpBinPrivate
{
GMutex bin_lock;
SIGNAL_RESET_SYNC,
SIGNAL_GET_SESSION,
SIGNAL_GET_INTERNAL_SESSION,
+ SIGNAL_GET_STORAGE,
+ SIGNAL_GET_INTERNAL_STORAGE,
SIGNAL_ON_NEW_SSRC,
SIGNAL_ON_SSRC_COLLISION,
SIGNAL_REQUEST_RTCP_ENCODER,
SIGNAL_REQUEST_RTCP_DECODER,
+ SIGNAL_REQUEST_FEC_DECODER,
+ SIGNAL_REQUEST_FEC_ENCODER,
+
SIGNAL_NEW_JITTERBUFFER,
+ SIGNAL_NEW_STORAGE,
SIGNAL_REQUEST_AUX_SENDER,
SIGNAL_REQUEST_AUX_RECEIVER,
#define DEFAULT_MAX_MISORDER_TIME 2000
#define DEFAULT_RFC7273_SYNC FALSE
#define DEFAULT_MAX_STREAMS G_MAXUINT
+ #define DEFAULT_MAX_TS_OFFSET_ADJUSTMENT G_GUINT64_CONSTANT(0)
+ #define DEFAULT_MAX_TS_OFFSET G_GINT64_CONSTANT(3000000000)
enum
{
PROP_MAX_DROPOUT_TIME,
PROP_MAX_MISORDER_TIME,
PROP_RFC7273_SYNC,
- PROP_MAX_STREAMS
+ PROP_MAX_STREAMS,
+ PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ PROP_MAX_TS_OFFSET,
};
#define GST_RTP_BIN_RTCP_SYNC_TYPE (gst_rtp_bin_rtcp_sync_get_type())
static void free_stream (GstRtpBinStream * stream, GstRtpBin * bin);
static GstRtpBinSession *create_session (GstRtpBin * rtpbin, gint id);
static GstPad *complete_session_sink (GstRtpBin * rtpbin,
- GstRtpBinSession * session, gboolean bundle_demuxer_needed);
+ GstRtpBinSession * session);
static void
complete_session_receiver (GstRtpBin * rtpbin, GstRtpBinSession * session,
guint sessid);
static GstPad *complete_session_rtcp (GstRtpBin * rtpbin,
- GstRtpBinSession * session, guint sessid, gboolean bundle_demuxer_needed);
+ GstRtpBinSession * session, guint sessid);
/* Manages the RTP stream for one SSRC.
*
gulong buffer_ptreq_sig;
gulong buffer_ntpstop_sig;
gint percent;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gint prev_percent;
+#endif
/* the PT demuxer of the SSRC */
GstElement *demux;
gulong demux_newpad_sig;
* there they are pushed into an SSRC demuxer that splits the stream based on
* SSRC. Each of the SSRC streams go into their own jitterbuffer (managed with
* the GstRtpBinStream above).
+ *
+ * Before the SSRC demuxer, a storage element may be inserted for the purpose
+ * of Forward Error Correction.
*/
struct _GstRtpBinSession
{
gulong demux_newpad_sig;
gulong demux_padremoved_sig;
- /* Bundling support */
- GstElement *rtp_funnel;
- GstElement *rtcp_funnel;
- GstElement *bundle_demux;
- gulong bundle_demux_newpad_sig;
+ /* Fec support */
+ GstElement *storage;
GMutex lock;
GstPad *sync_src;
GstPad *send_rtp_sink;
GstPad *send_rtp_sink_ghost;
- GstPad *send_rtp_src;
GstPad *send_rtp_src_ghost;
GstPad *send_rtcp_src;
GstPad *send_rtcp_src_ghost;
GST_RTP_BIN_UNLOCK (rtpbin);
}
- static void
- new_bundled_ssrc_pad_found (GstElement * element, guint ssrc, GstPad * pad,
- GstRtpBinSession * session)
- {
- GValue result = G_VALUE_INIT;
- GValue params[2] = { G_VALUE_INIT, G_VALUE_INIT };
- guint session_id = 0;
- GstRtpBinSession *target_session = NULL;
- GstRtpBin *rtpbin = session->bin;
- gchar *name;
- GstPad *src_pad;
- GstPad *recv_rtp_sink = NULL;
- GstPad *recv_rtcp_sink = NULL;
- GstPadLinkReturn ret;
-
- GST_RTP_BIN_DYN_LOCK (rtpbin);
- GST_DEBUG_OBJECT (rtpbin, "new bundled SSRC pad %08x, %s:%s", ssrc,
- GST_DEBUG_PAD_NAME (pad));
-
- g_value_init (&result, G_TYPE_UINT);
- g_value_init (¶ms[0], GST_TYPE_ELEMENT);
- g_value_set_object (¶ms[0], rtpbin);
- g_value_init (¶ms[1], G_TYPE_UINT);
- g_value_set_uint (¶ms[1], ssrc);
-
- g_signal_emitv (params,
- gst_rtp_bin_signals[SIGNAL_ON_BUNDLED_SSRC], 0, &result);
- g_value_unset (¶ms[0]);
-
- session_id = g_value_get_uint (&result);
- if (session_id == 0) {
- target_session = session;
- } else {
- target_session = find_session_by_id (rtpbin, (gint) session_id);
- if (!target_session) {
- target_session = create_session (rtpbin, session_id);
- }
- if (!target_session) {
- /* create_session() warned already */
- GST_RTP_BIN_DYN_UNLOCK (rtpbin);
- return;
- }
-
- if (!target_session->recv_rtp_sink) {
- recv_rtp_sink = complete_session_sink (rtpbin, target_session, FALSE);
- }
-
- if (!target_session->recv_rtp_src)
- complete_session_receiver (rtpbin, target_session, session_id);
-
- if (!target_session->recv_rtcp_sink) {
- recv_rtcp_sink =
- complete_session_rtcp (rtpbin, target_session, session_id, FALSE);
- }
- }
-
- GST_DEBUG_OBJECT (rtpbin, "Assigning bundled ssrc %u to session %u", ssrc,
- session_id);
-
- if (!recv_rtp_sink) {
- recv_rtp_sink =
- gst_element_get_request_pad (target_session->rtp_funnel, "sink_%u");
- }
-
- if (!recv_rtcp_sink) {
- recv_rtcp_sink =
- gst_element_get_request_pad (target_session->rtcp_funnel, "sink_%u");
- }
-
- name = g_strdup_printf ("src_%u", ssrc);
- src_pad = gst_element_get_static_pad (element, name);
- ret = gst_pad_link (src_pad, recv_rtp_sink);
- g_free (name);
- gst_object_unref (src_pad);
- gst_object_unref (recv_rtp_sink);
- if (ret != GST_PAD_LINK_OK) {
- g_warning
- ("rtpbin: failed to link bundle demuxer to receive rtp funnel for session %u",
- session_id);
- }
-
- name = g_strdup_printf ("rtcp_src_%u", ssrc);
- src_pad = gst_element_get_static_pad (element, name);
- gst_pad_link (src_pad, recv_rtcp_sink);
- g_free (name);
- gst_object_unref (src_pad);
- gst_object_unref (recv_rtcp_sink);
- if (ret != GST_PAD_LINK_OK) {
- g_warning
- ("rtpbin: failed to link bundle demuxer to receive rtcp sink pad for session %u",
- session_id);
- }
-
- GST_RTP_BIN_DYN_UNLOCK (rtpbin);
- }
-
/* create a session with the given id. Must be called with RTP_BIN_LOCK */
static GstRtpBinSession *
create_session (GstRtpBin * rtpbin, gint id)
{
GstRtpBinSession *sess;
GstElement *session, *demux;
+ GstElement *storage = NULL;
GstState target;
if (!(session = gst_element_factory_make ("rtpsession", NULL)))
if (!(demux = gst_element_factory_make ("rtpssrcdemux", NULL)))
goto no_demux;
+ if (!(storage = gst_element_factory_make ("rtpstorage", NULL)))
+ goto no_storage;
+
+ /* need to sink the storage or otherwise signal handlers from bindings will
+ * take ownership of it and we don't own it anymore */
+ gst_object_ref_sink (storage);
+ g_signal_emit (rtpbin, gst_rtp_bin_signals[SIGNAL_NEW_STORAGE], 0, storage,
+ id);
+
sess = g_new0 (GstRtpBinSession, 1);
g_mutex_init (&sess->lock);
sess->id = id;
sess->bin = rtpbin;
sess->session = session;
sess->demux = demux;
-
- sess->rtp_funnel = gst_element_factory_make ("funnel", NULL);
- sess->rtcp_funnel = gst_element_factory_make ("funnel", NULL);
+ sess->storage = storage;
sess->ptmap = g_hash_table_new_full (NULL, NULL, NULL,
(GDestroyNotify) gst_caps_unref);
gst_bin_add (GST_BIN_CAST (rtpbin), session);
gst_bin_add (GST_BIN_CAST (rtpbin), demux);
- gst_bin_add (GST_BIN_CAST (rtpbin), sess->rtp_funnel);
- gst_bin_add (GST_BIN_CAST (rtpbin), sess->rtcp_funnel);
+ gst_bin_add (GST_BIN_CAST (rtpbin), storage);
+
+ /* unref the storage again, the bin has a reference now and
+ * we don't need it anymore */
+ gst_object_unref (storage);
GST_OBJECT_LOCK (rtpbin);
target = GST_STATE_TARGET (rtpbin);
/* change state only to what's needed */
gst_element_set_state (demux, target);
gst_element_set_state (session, target);
- gst_element_set_state (sess->rtp_funnel, target);
- gst_element_set_state (sess->rtcp_funnel, target);
+ gst_element_set_state (storage, target);
return sess;
g_warning ("rtpbin: could not create rtpssrcdemux element");
return NULL;
}
+ no_storage:
+ {
+ gst_object_unref (session);
+ gst_object_unref (demux);
+ g_warning ("rtpbin: could not create rtpstorage element");
+ return NULL;
+ }
}
static gboolean
GST_DEBUG_OBJECT (bin, "requested element %p already in bin", element);
} else {
GST_DEBUG_OBJECT (bin, "adding requested element %p", element);
+
+ if (g_object_is_floating (element))
+ element = gst_object_ref_sink (element);
+
if (!gst_bin_add (GST_BIN_CAST (bin), element))
goto add_failed;
if (!gst_element_sync_state_with_parent (element))
add_failed:
{
GST_WARNING_OBJECT (bin, "unable to add element");
+ gst_object_unref (element);
return FALSE;
}
}
if (find) {
priv->elements = g_list_delete_link (priv->elements, find);
- if (!g_list_find (priv->elements, element))
+ if (!g_list_find (priv->elements, element)) {
+ gst_element_set_locked_state (element, TRUE);
gst_bin_remove (GST_BIN_CAST (bin), element);
- else
- gst_object_unref (element);
+ gst_element_set_state (element, GST_STATE_NULL);
+ }
+
+ gst_object_unref (element);
}
}
gst_element_set_locked_state (sess->demux, TRUE);
gst_element_set_locked_state (sess->session, TRUE);
+ gst_element_set_locked_state (sess->storage, TRUE);
gst_element_set_state (sess->demux, GST_STATE_NULL);
gst_element_set_state (sess->session, GST_STATE_NULL);
+ gst_element_set_state (sess->storage, GST_STATE_NULL);
remove_recv_rtp (bin, sess);
remove_recv_rtcp (bin, sess);
gst_bin_remove (GST_BIN_CAST (bin), sess->session);
gst_bin_remove (GST_BIN_CAST (bin), sess->demux);
+ gst_bin_remove (GST_BIN_CAST (bin), sess->storage);
g_slist_foreach (sess->elements, (GFunc) remove_bin_element, bin);
g_slist_free (sess->elements);
}
static GstElement *
+ gst_rtp_bin_get_storage (GstRtpBin * bin, guint session_id)
+ {
+ GstRtpBinSession *session;
+ GstElement *res = NULL;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "retrieving internal storage object, index: %u",
+ session_id);
+ session = find_session_by_id (bin, (gint) session_id);
+ if (session && session->storage) {
+ res = gst_object_ref (session->storage);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+
+ return res;
+ }
+
+ static GObject *
+ gst_rtp_bin_get_internal_storage (GstRtpBin * bin, guint session_id)
+ {
+ GObject *internal_storage = NULL;
+ GstRtpBinSession *session;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "retrieving internal storage object, index: %u",
+ session_id);
+ session = find_session_by_id (bin, (gint) session_id);
+ if (session && session->storage) {
+ g_object_get (session->storage, "internal-storage", &internal_storage,
+ NULL);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+
+ return internal_storage;
+ }
+
+ static GstElement *
gst_rtp_bin_request_encoder (GstRtpBin * bin, guint session_id)
{
GST_DEBUG_OBJECT (bin, "return NULL encoder");
static void
stream_set_ts_offset (GstRtpBin * bin, GstRtpBinStream * stream,
- gint64 ts_offset, gboolean check)
+ gint64 ts_offset, gint64 max_ts_offset, gint64 min_ts_offset,
+ gboolean allow_positive_ts_offset)
{
gint64 prev_ts_offset;
"ts-offset %" G_GINT64_FORMAT ", prev %" G_GINT64_FORMAT
", diff: %" G_GINT64_FORMAT, ts_offset, prev_ts_offset, diff);
- if (check) {
- /* only change diff when it changed more than 4 milliseconds. This
- * compensates for rounding errors in NTP to RTP timestamp
- * conversions */
- if (ABS (diff) < 4 * GST_MSECOND) {
- GST_DEBUG_OBJECT (bin, "offset too small, ignoring");
+ /* ignore minor offsets */
+ if (ABS (diff) < min_ts_offset) {
+ GST_DEBUG_OBJECT (bin, "offset too small, ignoring");
+ return;
+ }
+
+ /* sanity check offset */
+ if (max_ts_offset > 0) {
+ if (ts_offset > 0 && !allow_positive_ts_offset) {
+ GST_DEBUG_OBJECT (bin,
+ "offset is positive (clocks are out of sync), ignoring");
return;
}
- if (ABS (diff) > (3 * GST_SECOND)) {
- GST_WARNING_OBJECT (bin, "offset unusually large, ignoring");
+ if (ABS (ts_offset) > max_ts_offset) {
+ GST_DEBUG_OBJECT (bin, "offset too large, ignoring");
return;
}
}
+
g_object_set (stream->buffer, "ts-offset", ts_offset, NULL);
}
GST_DEBUG_OBJECT (bin, "stream SSRC %08x, delta %" G_GINT64_FORMAT,
"local NTP time %" G_GUINT64_FORMAT ", SR NTP time %" G_GUINT64_FORMAT,
local_ntpnstime, ntpnstime);
GST_DEBUG_OBJECT (bin,
+ "local running time %" G_GUINT64_FORMAT ", SR RTP running time %"
+ G_GUINT64_FORMAT, local_running_time, running_time);
+ GST_DEBUG_OBJECT (bin,
"NTP diff %" G_GINT64_FORMAT ", RT diff %" G_GINT64_FORMAT, ntpdiff,
rtdiff);
/* combine to get the final diff to apply to the running_time */
stream->rt_delta = rtdiff - ntpdiff;
- stream_set_ts_offset (bin, stream, stream->rt_delta, FALSE);
+ stream_set_ts_offset (bin, stream, stream->rt_delta, bin->max_ts_offset,
+ 0, FALSE);
} else {
gint64 min, rtp_min, clock_base = stream->clock_base;
gboolean all_sync, use_rtp;
else
ts_offset = ostream->rt_delta - min;
- stream_set_ts_offset (bin, ostream, ts_offset, TRUE);
+ stream_set_ts_offset (bin, ostream, ts_offset, bin->max_ts_offset,
+ MIN_TS_OFFSET, TRUE);
}
}
gst_rtp_bin_send_sync_event (stream);
create_stream (GstRtpBinSession * session, guint32 ssrc)
{
GstElement *buffer, *demux = NULL;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GstElement *queue2 = NULL;
+#endif
GstRtpBinStream *stream;
GstRtpBin *rtpbin;
GstState target;
if (!(buffer = gst_element_factory_make ("rtpjitterbuffer", NULL)))
goto no_jitterbuffer;
- if (!rtpbin->ignore_pt)
+ if (!rtpbin->ignore_pt) {
if (!(demux = gst_element_factory_make ("rtpptdemux", NULL)))
goto no_demux;
-
+ }
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (session->bin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE)
+ if (!(queue2 = gst_element_factory_make ("queue2", NULL)))
+ goto no_queue2;
+#endif
stream = g_new0 (GstRtpBinStream, 1);
stream->ssrc = ssrc;
stream->bin = rtpbin;
stream->rt_delta = 0;
stream->rtp_delta = 0;
stream->percent = 100;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ stream->prev_percent = 0;
+#endif
stream->clock_base = -100 * GST_SECOND;
session->streams = g_slist_prepend (session->streams, stream);
g_object_set (buffer, "max-dropout-time", rtpbin->max_dropout_time,
"max-misorder-time", rtpbin->max_misorder_time, NULL);
g_object_set (buffer, "rfc7273-sync", rtpbin->rfc7273_sync, NULL);
+ g_object_set (buffer, "max-ts-offset-adjustment",
+ rtpbin->max_ts_offset_adjustment, NULL);
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* configure queue2 to use live buffering */
+ if (queue2) {
+ g_object_set_data (G_OBJECT (queue2), "GstRTPBin.stream", stream);
+ g_object_set (queue2, "use-buffering", TRUE, NULL);
+ g_object_set (queue2, "buffer-mode", GST_BUFFERING_LIVE, NULL);
+ }
+#endif
+ /* need to sink the jitterbufer or otherwise signal handlers from bindings will
+ * take ownership of it and we don't own it anymore */
+ gst_object_ref_sink (buffer);
g_signal_emit (rtpbin, gst_rtp_bin_signals[SIGNAL_NEW_JITTERBUFFER], 0,
buffer, session->id, ssrc);
if (!rtpbin->ignore_pt)
gst_bin_add (GST_BIN_CAST (rtpbin), demux);
+
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (queue2)
+ gst_bin_add (GST_BIN_CAST (rtpbin), queue2);
+#endif
+
gst_bin_add (GST_BIN_CAST (rtpbin), buffer);
+ /* unref the jitterbuffer again, the bin has a reference now and
+ * we don't need it anymore */
+ gst_object_unref (buffer);
+
/* link stuff */
- } else if (demux)
- gst_element_link_pads_full (buffer, "src", demux, "sink",
- GST_PAD_LINK_CHECK_NOTHING);
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (queue2) {
+ gst_element_link_pads_full (buffer, "src", queue2, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
+ if (demux) {
+ gst_element_link_pads_full (queue2, "src", demux, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
+ }
++ } else if (demux) {
++ gst_element_link_pads_full (buffer, "src", demux, "sink",
++ GST_PAD_LINK_CHECK_NOTHING);
++ }
+#else
if (demux)
gst_element_link_pads_full (buffer, "src", demux, "sink",
GST_PAD_LINK_CHECK_NOTHING);
+#endif
if (rtpbin->buffering) {
guint64 last_out;
gst_element_set_state (buffer, target);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (queue2)
+ gst_element_set_state (queue2, target);
+#endif
+
return stream;
/* ERRORS */
g_warning ("rtpbin: could not create rtpptdemux element");
return NULL;
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+no_queue2:
+ {
+ gst_object_unref (buffer);
+ gst_object_unref (demux);
+ g_warning ("rtpbin: could not create queue2 element");
+ return NULL;
+ }
+#endif
}
/* called with RTP_BIN_LOCK */
static void gst_rtp_bin_handle_message (GstBin * bin, GstMessage * message);
#define gst_rtp_bin_parent_class parent_class
- G_DEFINE_TYPE (GstRtpBin, gst_rtp_bin, GST_TYPE_BIN);
+ G_DEFINE_TYPE_WITH_PRIVATE (GstRtpBin, gst_rtp_bin, GST_TYPE_BIN);
static gboolean
_gst_element_accumulator (GSignalInvocationHint * ihint,
gstelement_class = (GstElementClass *) klass;
gstbin_class = (GstBinClass *) klass;
- g_type_class_add_private (klass, sizeof (GstRtpBinPrivate));
-
gobject_class->dispose = gst_rtp_bin_dispose;
gobject_class->finalize = gst_rtp_bin_finalize;
gobject_class->set_property = gst_rtp_bin_set_property;
RTP_TYPE_SESSION, 1, G_TYPE_UINT);
/**
+ * GstRtpBin::get-internal-storage:
+ * @rtpbin: the object which received the signal
+ * @id: the session id
+ *
+ * Request the internal RTPStorage object as #GObject in session @id.
+ *
+ * Since: 1.14
+ */
+ gst_rtp_bin_signals[SIGNAL_GET_INTERNAL_STORAGE] =
+ g_signal_new ("get-internal-storage", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ get_internal_storage), NULL, NULL, g_cclosure_marshal_generic,
+ G_TYPE_OBJECT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::get-storage:
+ * @rtpbin: the object which received the signal
+ * @id: the session id
+ *
+ * Request the RTPStorage element as #GObject in session @id.
+ *
+ * Since: 1.16
+ */
+ gst_rtp_bin_signals[SIGNAL_GET_STORAGE] =
+ g_signal_new ("get-storage", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ get_storage), NULL, NULL, g_cclosure_marshal_generic,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
* GstRtpBin::on-new-ssrc:
* @rtpbin: the object which received the signal
* @session: the session
G_TYPE_NONE, 3, GST_TYPE_ELEMENT, G_TYPE_UINT, G_TYPE_UINT);
/**
+ * GstRtpBin::new-storage:
+ * @rtpbin: the object which received the signal
+ * @storage: the new storage
+ * @session: the session
+ *
+ * Notify that a new @storage was created for @session.
+ * This signal can, for example, be used to configure @storage.
+ *
+ * Since: 1.14
+ */
+ gst_rtp_bin_signals[SIGNAL_NEW_STORAGE] =
+ g_signal_new ("new-storage", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ new_storage), NULL, NULL, g_cclosure_marshal_generic,
+ G_TYPE_NONE, 2, GST_TYPE_ELEMENT, G_TYPE_UINT);
+
+ /**
* GstRtpBin::request-aux-sender:
* @rtpbin: the object which received the signal
* @session: the session
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
request_aux_sender), _gst_element_accumulator, NULL,
g_cclosure_marshal_generic, GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
/**
* GstRtpBin::request-aux-receiver:
* @rtpbin: the object which received the signal
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
request_aux_receiver), _gst_element_accumulator, NULL,
g_cclosure_marshal_generic, GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-fec-decoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session index
+ *
+ * Request a FEC decoder element for the given @session. The element
+ * will be added to the bin after the pt demuxer.
+ *
+ * If no handler is connected, no FEC decoder will be used.
+ *
+ * Since: 1.14
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_FEC_DECODER] =
+ g_signal_new ("request-fec-decoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_fec_decoder), _gst_element_accumulator, NULL,
+ g_cclosure_marshal_generic, GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-fec-encoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session index
+ *
+ * Request a FEC encoder element for the given @session. The element
+ * will be added to the bin after the RTPSession.
+ *
+ * If no handler is connected, no FEC encoder will be used.
+ *
+ * Since: 1.14
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_FEC_ENCODER] =
+ g_signal_new ("request-fec-encoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_fec_encoder), _gst_element_accumulator, NULL,
+ g_cclosure_marshal_generic, GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
/**
* GstRtpBin::on-new-sender-ssrc:
* @rtpbin: the object which received the signal
on_sender_ssrc_active), NULL, NULL, g_cclosure_marshal_generic,
G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
-
- /**
- * GstRtpBin::on-bundled-ssrc:
- * @rtpbin: the object which received the signal
- * @ssrc: the bundled SSRC
- *
- * Notify of a new incoming bundled SSRC. If no handler is connected to the
- * signal then the #GstRtpSession created for the recv_rtp_sink_\%u
- * request pad will be managing this new SSRC. However if there is a handler
- * connected then the application can decided to dispatch this new stream to
- * another session by providing its ID as return value of the handler. This
- * can be particularly useful to keep retransmission SSRCs grouped with the
- * session for which they handle retransmission.
- *
- * Since: 1.12
- */
- gst_rtp_bin_signals[SIGNAL_ON_BUNDLED_SSRC] =
- g_signal_new ("on-bundled-ssrc", G_TYPE_FROM_CLASS (klass),
- G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
- on_bundled_ssrc), NULL, NULL,
- g_cclosure_marshal_generic, G_TYPE_UINT, 1, G_TYPE_UINT);
-
-
g_object_class_install_property (gobject_class, PROP_SDES,
g_param_spec_boxed ("sdes", "SDES",
"The SDES items of this session",
0, G_MAXUINT, DEFAULT_MAX_STREAMS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpBin:max-ts-offset-adjustment:
+ *
+ * Syncing time stamps to NTP time adds a time offset. This parameter
+ * specifies the maximum number of nanoseconds per frame that this time offset
+ * may be adjusted with. This is used to avoid sudden large changes to time
+ * stamps.
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ g_param_spec_uint64 ("max-ts-offset-adjustment",
+ "Max Timestamp Offset Adjustment",
+ "The maximum number of nanoseconds per frame that time stamp offsets "
+ "may be adjusted (0 = no limit).", 0, G_MAXUINT64,
+ DEFAULT_MAX_TS_OFFSET_ADJUSTMENT, G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:max-ts-offset:
+ *
+ * Used to set an upper limit of how large a time offset may be. This
+ * is used to protect against unrealistic values as a result of either
+ * client,server or clock issues.
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET,
+ g_param_spec_int64 ("max-ts-offset", "Max TS Offset",
+ "The maximum absolute value of the time offset in (nanoseconds). "
+ "Note, if the ntp-sync parameter is set the default value is "
+ "changed to 0 (no limit)", 0, G_MAXINT64, DEFAULT_MAX_TS_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_rtp_bin_change_state);
gstelement_class->request_new_pad =
GST_DEBUG_FUNCPTR (gst_rtp_bin_request_new_pad);
klass->get_session = GST_DEBUG_FUNCPTR (gst_rtp_bin_get_session);
klass->get_internal_session =
GST_DEBUG_FUNCPTR (gst_rtp_bin_get_internal_session);
+ klass->get_storage = GST_DEBUG_FUNCPTR (gst_rtp_bin_get_storage);
+ klass->get_internal_storage =
+ GST_DEBUG_FUNCPTR (gst_rtp_bin_get_internal_storage);
klass->request_rtp_encoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_encoder);
klass->request_rtp_decoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_decoder);
klass->request_rtcp_encoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_encoder);
{
gchar *cname;
- rtpbin->priv = GST_RTP_BIN_GET_PRIVATE (rtpbin);
+ rtpbin->priv = gst_rtp_bin_get_instance_private (rtpbin);
g_mutex_init (&rtpbin->priv->bin_lock);
g_mutex_init (&rtpbin->priv->dyn_lock);
rtpbin->max_misorder_time = DEFAULT_MAX_MISORDER_TIME;
rtpbin->rfc7273_sync = DEFAULT_RFC7273_SYNC;
rtpbin->max_streams = DEFAULT_MAX_STREAMS;
+ rtpbin->max_ts_offset_adjustment = DEFAULT_MAX_TS_OFFSET_ADJUSTMENT;
+ rtpbin->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ rtpbin->max_ts_offset_is_set = FALSE;
/* some default SDES entries */
cname = g_strdup_printf ("user%u@host-%x", g_random_int (), g_random_int ());
break;
case PROP_NTP_SYNC:
rtpbin->ntp_sync = g_value_get_boolean (value);
+ /* The default value of max_ts_offset depends on ntp_sync. If user
+ * hasn't set it then change default value */
+ if (!rtpbin->max_ts_offset_is_set) {
+ if (rtpbin->ntp_sync) {
+ rtpbin->max_ts_offset = 0;
+ } else {
+ rtpbin->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ }
+ }
break;
case PROP_RTCP_SYNC:
g_atomic_int_set (&rtpbin->rtcp_sync, g_value_get_enum (value));
case PROP_MAX_STREAMS:
rtpbin->max_streams = g_value_get_uint (value);
break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ rtpbin->max_ts_offset_adjustment = g_value_get_uint64 (value);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "max-ts-offset-adjustment", value);
+ break;
+ case PROP_MAX_TS_OFFSET:
+ rtpbin->max_ts_offset = g_value_get_int64 (value);
+ rtpbin->max_ts_offset_is_set = TRUE;
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
case PROP_MAX_STREAMS:
g_value_set_uint (value, rtpbin->max_streams);
break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ g_value_set_uint64 (value, rtpbin->max_ts_offset_adjustment);
+ break;
+ case PROP_MAX_TS_OFFSET:
+ g_value_set_int64 (value, rtpbin->max_ts_offset);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
gint min_percent = 100;
GSList *sessions, *streams;
GstRtpBinStream *stream;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gboolean buffering_flag = FALSE, update_buffering_status = TRUE;
+#endif
gboolean change = FALSE, active = FALSE;
GstClockTime min_out_time;
GstBufferingMode mode;
for (streams = session->streams; streams;
streams = g_slist_next (streams)) {
GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GstPad *temp_pad_src = NULL;
+ GstCaps *temp_caps_src = NULL;
+ GstStructure *caps_structure;
+ const gchar *caps_str_media = NULL;
+ temp_pad_src = gst_element_get_static_pad (stream->buffer, "src");
- temp_caps_src = gst_pad_get_current_caps(temp_pad_src);
- GST_DEBUG_OBJECT (bin, "stream %p percent %d : temp_caps_src=%"GST_PTR_FORMAT, stream,stream->percent,temp_caps_src);
- if (temp_caps_src)
- {
++ temp_caps_src = gst_pad_get_current_caps (temp_pad_src);
++ GST_DEBUG_OBJECT (bin,
++ "stream %p percent %d : temp_caps_src=%" GST_PTR_FORMAT,
++ stream, stream->percent, temp_caps_src);
++ if (temp_caps_src) {
+ caps_structure = gst_caps_get_structure (temp_caps_src, 0);
- caps_str_media = gst_structure_get_string (caps_structure, "media");
- if (caps_str_media != NULL)
- {
- if ((strcmp(caps_str_media,"video") != 0)&&(strcmp(caps_str_media,"audio") != 0))
- {
- GST_DEBUG_OBJECT (bin, "Non Audio/Video Stream.. ignoring the same !!");
- gst_caps_unref( temp_caps_src );
- gst_object_unref( temp_pad_src );
++ caps_str_media =
++ gst_structure_get_string (caps_structure, "media");
++ if (caps_str_media != NULL) {
++ if ((strcmp (caps_str_media, "video") != 0)
++ && (strcmp (caps_str_media, "audio") != 0)) {
++ GST_DEBUG_OBJECT (bin,
++ "Non Audio/Video Stream.. ignoring the same !!");
++ gst_caps_unref (temp_caps_src);
++ gst_object_unref (temp_pad_src);
+ continue;
- }
- else if(stream->percent >= 100)
- {
++ } else if (stream->percent >= 100) {
+ /* Most of the time buffering icon displays in rtsp playback.
- Optimizing the buffering updation code. Whenever any stream percentage
- reaches 100 do not post buffering messages.*/
- if(stream->prev_percent < 100)
- {
++ Optimizing the buffering updation code. Whenever any stream percentage
++ reaches 100 do not post buffering messages. */
++ if (stream->prev_percent < 100)
+ buffering_flag = TRUE;
- }
+ else
- {
+ update_buffering_status = FALSE;
- }
+ }
+ }
- gst_caps_unref( temp_caps_src );
++ gst_caps_unref (temp_caps_src);
+ }
- gst_object_unref( temp_pad_src );
++ gst_object_unref (temp_pad_src);
+#else
GST_DEBUG_OBJECT (bin, "stream %p percent %d", stream,
stream->percent);
-
+#endif
/* find min percent */
if (min_percent > stream->percent)
min_percent = stream->percent;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* Updating prev stream percentage */
+ stream->prev_percent = stream->percent;
+#endif
}
} else {
GST_INFO_OBJECT (bin,
GST_RTP_SESSION_UNLOCK (session);
}
GST_DEBUG_OBJECT (bin, "min percent %d", min_percent);
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (rtpbin->buffer_mode != RTP_JITTER_BUFFER_MODE_SLAVE) {
+ if (rtpbin->buffering) {
+ if (min_percent == 100) {
+ rtpbin->buffering = FALSE;
+ active = TRUE;
+ change = TRUE;
+ }
+ } else {
+ if (min_percent < 100) {
+ /* pause the streams */
+ rtpbin->buffering = TRUE;
+ active = FALSE;
+ change = TRUE;
+ }
+ }
+ }
+#else
if (rtpbin->buffering) {
if (min_percent == 100) {
rtpbin->buffering = FALSE;
change = TRUE;
}
}
+#endif
GST_RTP_BIN_UNLOCK (rtpbin);
gst_message_unref (message);
- if (rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE)
- {
- if(update_buffering_status==FALSE)
- {
- break;
- }
- if(buffering_flag)
- {
- min_percent=100;
- GST_DEBUG_OBJECT (bin, "forcefully change min_percent to 100!!!");
- }
- }
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE) {
++ if (update_buffering_status == FALSE)
++ break;
++ if (buffering_flag) {
++ min_percent = 100;
++ GST_DEBUG_OBJECT (bin, "forcefully change min_percent to 100!!!");
++ }
++ }
+#endif
/* make a new buffering message with the min value */
message =
gst_message_new_buffering (GST_OBJECT_CAST (bin), min_percent);
gst_message_set_buffering_stats (message, mode, avg_in, avg_out,
buffering_left);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE)
+ goto slave_buffering;
+#endif
if (G_UNLIKELY (change)) {
GstClock *clock;
guint64 running_time = 0;
GST_RTP_BIN_UNLOCK (rtpbin);
}
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+slave_buffering:
+#endif
GST_BIN_CLASS (parent_class)->handle_message (bin, message);
break;
}
return TRUE;
}
- /* a new pad (SSRC) was created in @session. This signal is emited from the
- * payload demuxer. */
static void
- new_payload_found (GstElement * element, guint pt, GstPad * pad,
- GstRtpBinStream * stream)
+ expose_recv_src_pad (GstRtpBin * rtpbin, GstPad * pad, GstRtpBinStream * stream,
+ guint8 pt)
{
- GstRtpBin *rtpbin;
GstElementClass *klass;
GstPadTemplate *templ;
gchar *padname;
GstPad *gpad;
- rtpbin = stream->bin;
+ gst_object_ref (pad);
- GST_DEBUG_OBJECT (rtpbin, "new payload pad %u", pt);
+ if (stream->session->storage) {
+ GstElement *fec_decoder =
+ session_request_element (stream->session, SIGNAL_REQUEST_FEC_DECODER);
+
+ if (fec_decoder) {
+ GstPad *sinkpad, *srcpad;
+ GstPadLinkReturn ret;
+
+ sinkpad = gst_element_get_static_pad (fec_decoder, "sink");
+
+ if (!sinkpad)
+ goto fec_decoder_sink_failed;
+
+ ret = gst_pad_link (pad, sinkpad);
+ gst_object_unref (sinkpad);
+
+ if (ret != GST_PAD_LINK_OK)
+ goto fec_decoder_link_failed;
+
+ srcpad = gst_element_get_static_pad (fec_decoder, "src");
+
+ if (!srcpad)
+ goto fec_decoder_src_failed;
+
+ gst_pad_sticky_events_foreach (pad, copy_sticky_events, srcpad);
+ gst_object_unref (pad);
+ pad = srcpad;
+ }
+ }
GST_RTP_BIN_SHUTDOWN_LOCK (rtpbin, shutdown);
gst_pad_sticky_events_foreach (pad, copy_sticky_events, gpad);
gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), gpad);
+ done:
+ gst_object_unref (pad);
+
return;
shutdown:
{
GST_DEBUG ("ignoring, we are shutting down");
- return;
+ goto done;
+ }
+ fec_decoder_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get fec encoder sink pad for session %u",
+ stream->session->id);
+ goto done;
+ }
+ fec_decoder_src_failed:
+ {
+ g_warning ("rtpbin: failed to get fec encoder src pad for session %u",
+ stream->session->id);
+ goto done;
+ }
+ fec_decoder_link_failed:
+ {
+ g_warning ("rtpbin: failed to link fec decoder for session %u",
+ stream->session->id);
+ goto done;
}
}
+ /* a new pad (SSRC) was created in @session. This signal is emited from the
+ * payload demuxer. */
+ static void
+ new_payload_found (GstElement * element, guint pt, GstPad * pad,
+ GstRtpBinStream * stream)
+ {
+ GstRtpBin *rtpbin;
+
+ rtpbin = stream->bin;
+
+ GST_DEBUG_OBJECT (rtpbin, "new payload pad %u", pt);
+
+ expose_recv_src_pad (rtpbin, pad, stream, pt);
+ }
+
static void
payload_pad_removed (GstElement * element, GstPad * pad,
GstRtpBinStream * stream)
}
}
+ static GstCaps *
+ ptdemux_pt_map_requested (GstElement * element, guint pt,
+ GstRtpBinSession * session)
+ {
+ GstCaps *ret = pt_map_requested (element, pt, session);
+
+ if (ret && gst_caps_get_size (ret) == 1) {
+ const GstStructure *s = gst_caps_get_structure (ret, 0);
+ gboolean is_fec;
+
+ if (gst_structure_get_boolean (s, "is-fec", &is_fec) && is_fec) {
+ GValue v = G_VALUE_INIT;
+ GValue v2 = G_VALUE_INIT;
+
+ GST_INFO_OBJECT (session->bin, "Will ignore FEC pt %u in session %u", pt,
+ session->id);
+ g_value_init (&v, GST_TYPE_ARRAY);
+ g_value_init (&v2, G_TYPE_INT);
+ g_object_get_property (G_OBJECT (element), "ignored-payload-types", &v);
+ g_value_set_int (&v2, pt);
+ gst_value_array_append_value (&v, &v2);
+ g_value_unset (&v2);
+ g_object_set_property (G_OBJECT (element), "ignored-payload-types", &v);
+ g_value_unset (&v);
+ }
+ }
+
+ return ret;
+ }
+
static void
payload_type_change (GstElement * element, guint pt, GstRtpBinSession * session)
{
0, session->id, pt);
}
- /* emited when caps changed for the session */
+ /* emitted when caps changed for the session */
static void
caps_changed (GstPad * pad, GParamSpec * pspec, GstRtpBinSession * session)
{
stream->demux_padremoved_sig = g_signal_connect (stream->demux,
"pad-removed", (GCallback) payload_pad_removed, stream);
- /* connect to the request-pt-map signal. This signal will be emited by the
+ /* connect to the request-pt-map signal. This signal will be emitted by the
* demuxer so that it can apply a proper caps on the buffers for the
* depayloaders. */
stream->demux_ptreq_sig = g_signal_connect (stream->demux,
- "request-pt-map", (GCallback) pt_map_requested, session);
+ "request-pt-map", (GCallback) ptdemux_pt_map_requested, session);
/* connect to the signal so it can be forwarded. */
stream->demux_ptchange_sig = g_signal_connect (stream->demux,
"payload-type-change", (GCallback) payload_type_change, session);
+
+ GST_RTP_SESSION_UNLOCK (session);
+ GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
} else {
/* add rtpjitterbuffer src pad to pads */
- GstElementClass *klass;
- GstPadTemplate *templ;
- gchar *padname;
- GstPad *gpad, *pad;
+ GstPad *pad;
pad = gst_element_get_static_pad (stream->buffer, "src");
- /* ghost the pad to the parent */
- klass = GST_ELEMENT_GET_CLASS (rtpbin);
- templ = gst_element_class_get_pad_template (klass, "recv_rtp_src_%u_%u_%u");
- padname = g_strdup_printf ("recv_rtp_src_%u_%u_%u",
- stream->session->id, stream->ssrc, 255);
- gpad = gst_ghost_pad_new_from_template (padname, pad, templ);
- g_free (padname);
+ GST_RTP_SESSION_UNLOCK (session);
+ GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
- gst_pad_set_active (gpad, TRUE);
- gst_pad_sticky_events_foreach (pad, copy_sticky_events, gpad);
- gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), gpad);
+ expose_recv_src_pad (rtpbin, pad, stream, 255);
gst_object_unref (pad);
}
- GST_RTP_SESSION_UNLOCK (session);
- GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
-
return;
/* ERRORS */
}
}
- static void
- session_maybe_create_bundle_demuxer (GstRtpBinSession * session)
- {
- GstRtpBin *rtpbin;
-
- if (session->bundle_demux)
- return;
-
- rtpbin = session->bin;
- if (g_signal_has_handler_pending (rtpbin,
- gst_rtp_bin_signals[SIGNAL_ON_BUNDLED_SSRC], 0, TRUE)) {
- GST_DEBUG_OBJECT (rtpbin, "Adding a bundle SSRC demuxer to session %u",
- session->id);
- session->bundle_demux = gst_element_factory_make ("rtpssrcdemux", NULL);
- session->bundle_demux_newpad_sig = g_signal_connect (session->bundle_demux,
- "new-ssrc-pad", (GCallback) new_bundled_ssrc_pad_found, session);
-
- gst_bin_add (GST_BIN_CAST (rtpbin), session->bundle_demux);
- gst_element_sync_state_with_parent (session->bundle_demux);
- } else {
- GST_DEBUG_OBJECT (rtpbin,
- "No handler for the on-bundled-ssrc signal so no need for a bundle SSRC demuxer in session %u",
- session->id);
- }
- }
-
static GstPad *
- complete_session_sink (GstRtpBin * rtpbin, GstRtpBinSession * session,
- gboolean bundle_demuxer_needed)
+ complete_session_sink (GstRtpBin * rtpbin, GstRtpBinSession * session)
{
guint sessid = session->id;
GstPad *recv_rtp_sink;
- GstPad *funnel_src;
GstElement *decoder;
g_assert (!session->recv_rtp_sink);
g_signal_connect (session->recv_rtp_sink, "notify::caps",
(GCallback) caps_changed, session);
- if (bundle_demuxer_needed)
- session_maybe_create_bundle_demuxer (session);
-
GST_DEBUG_OBJECT (rtpbin, "requesting RTP decoder");
decoder = session_request_element (session, SIGNAL_REQUEST_RTP_DECODER);
if (decoder) {
if (decsrc == NULL)
goto dec_src_failed;
- if (session->bundle_demux) {
- GstPad *demux_sink;
- demux_sink = gst_element_get_static_pad (session->bundle_demux, "sink");
- ret = gst_pad_link (decsrc, demux_sink);
- gst_object_unref (demux_sink);
- } else {
- ret = gst_pad_link (decsrc, session->recv_rtp_sink);
- }
+ ret = gst_pad_link (decsrc, session->recv_rtp_sink);
+
gst_object_unref (decsrc);
if (ret != GST_PAD_LINK_OK)
} else {
GST_DEBUG_OBJECT (rtpbin, "no RTP decoder given");
- if (session->bundle_demux) {
- recv_rtp_sink =
- gst_element_get_static_pad (session->bundle_demux, "sink");
- } else {
- recv_rtp_sink =
- gst_element_get_request_pad (session->rtp_funnel, "sink_%u");
- }
+ recv_rtp_sink = gst_object_ref (session->recv_rtp_sink);
}
- funnel_src = gst_element_get_static_pad (session->rtp_funnel, "src");
- gst_pad_link (funnel_src, session->recv_rtp_sink);
- gst_object_unref (funnel_src);
-
return recv_rtp_sink;
/* ERRORS */
if (session->recv_rtp_src == NULL)
goto pad_failed;
- /* find out if we need AUX elements or if we can go into the SSRC demuxer
- * directly */
+ /* find out if we need AUX elements */
aux = session_request_element (session, SIGNAL_REQUEST_AUX_RECEIVER);
if (aux) {
gchar *pname;
if (ret != GST_PAD_LINK_OK)
goto aux_link_failed;
- /* this can be NULL when this AUX element is not to be linked to
- * an SSRC demuxer */
+ /* this can be NULL when this AUX element is not to be linked any further */
pname = g_strdup_printf ("src_%u", sessid);
recv_rtp_src = gst_element_get_static_pad (aux, pname);
g_free (pname);
recv_rtp_src = gst_object_ref (session->recv_rtp_src);
}
+ /* Add a storage element if needed */
+ if (recv_rtp_src && session->storage) {
+ GstPadLinkReturn ret;
+ GstPad *sinkpad = gst_element_get_static_pad (session->storage, "sink");
+
+ ret = gst_pad_link (recv_rtp_src, sinkpad);
+
+ gst_object_unref (sinkpad);
+ gst_object_unref (recv_rtp_src);
+
+ if (ret != GST_PAD_LINK_OK)
+ goto storage_link_failed;
+
+ recv_rtp_src = gst_element_get_static_pad (session->storage, "src");
+ }
+
if (recv_rtp_src) {
GstPad *sinkdpad;
g_warning ("rtpbin: failed to link AUX pad to session %u", sessid);
return;
}
+ storage_link_failed:
+ {
+ g_warning ("rtpbin: failed to link storage");
+ return;
+ }
}
/* Create a pad for receiving RTP for the session in @name. Must be called with
return session->recv_rtp_sink_ghost;
/* setup the session sink pad */
- recv_rtp_sink = complete_session_sink (rtpbin, session, TRUE);
+ recv_rtp_sink = complete_session_sink (rtpbin, session);
if (!recv_rtp_sink)
goto session_sink_failed;
-
GST_DEBUG_OBJECT (rtpbin, "ghosting session sink pad");
session->recv_rtp_sink_ghost =
gst_ghost_pad_new_from_template (name, recv_rtp_sink, templ);
g_signal_handler_disconnect (session->demux, session->demux_padremoved_sig);
session->demux_padremoved_sig = 0;
}
- if (session->bundle_demux_newpad_sig) {
- g_signal_handler_disconnect (session->bundle_demux,
- session->bundle_demux_newpad_sig);
- session->bundle_demux_newpad_sig = 0;
- }
if (session->recv_rtp_src) {
gst_object_unref (session->recv_rtp_src);
session->recv_rtp_src = NULL;
static GstPad *
complete_session_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session,
- guint sessid, gboolean bundle_demuxer_needed)
+ guint sessid)
{
GstElement *decoder;
GstPad *sinkdpad;
GstPad *decsink = NULL;
- GstPad *funnel_src;
/* get recv_rtp pad and store */
GST_DEBUG_OBJECT (rtpbin, "getting RTCP sink pad");
if (session->recv_rtcp_sink == NULL)
goto pad_failed;
- if (bundle_demuxer_needed)
- session_maybe_create_bundle_demuxer (session);
-
GST_DEBUG_OBJECT (rtpbin, "getting RTCP decoder");
decoder = session_request_element (session, SIGNAL_REQUEST_RTCP_DECODER);
if (decoder) {
if (decsrc == NULL)
goto dec_src_failed;
- if (session->bundle_demux) {
- GstPad *demux_sink;
- demux_sink =
- gst_element_get_static_pad (session->bundle_demux, "rtcp_sink");
- ret = gst_pad_link (decsrc, demux_sink);
- gst_object_unref (demux_sink);
- } else {
- ret = gst_pad_link (decsrc, session->recv_rtcp_sink);
- }
+ ret = gst_pad_link (decsrc, session->recv_rtcp_sink);
+
gst_object_unref (decsrc);
if (ret != GST_PAD_LINK_OK)
goto dec_link_failed;
} else {
GST_DEBUG_OBJECT (rtpbin, "no RTCP decoder given");
- if (session->bundle_demux) {
- decsink = gst_element_get_static_pad (session->bundle_demux, "rtcp_sink");
- } else {
- decsink = gst_element_get_request_pad (session->rtcp_funnel, "sink_%u");
- }
+ decsink = gst_object_ref (session->recv_rtcp_sink);
}
/* get srcpad, link to SSRCDemux */
gst_pad_link_full (session->sync_src, sinkdpad, GST_PAD_LINK_CHECK_NOTHING);
gst_object_unref (sinkdpad);
- funnel_src = gst_element_get_static_pad (session->rtcp_funnel, "src");
- gst_pad_link (funnel_src, session->recv_rtcp_sink);
- gst_object_unref (funnel_src);
-
return decsink;
pad_failed:
if (session->recv_rtcp_sink_ghost != NULL)
return session->recv_rtcp_sink_ghost;
- decsink = complete_session_rtcp (rtpbin, session, sessid, TRUE);
+ decsink = complete_session_rtcp (rtpbin, session, sessid);
if (!decsink)
goto create_error;
GstElement *encoder;
GstElementClass *klass;
GstPadTemplate *templ;
+ gboolean ret = FALSE;
/* get srcpad */
- session->send_rtp_src =
- gst_element_get_static_pad (session->session, "send_rtp_src");
- if (session->send_rtp_src == NULL)
+ send_rtp_src = gst_element_get_static_pad (session->session, "send_rtp_src");
+
+ if (send_rtp_src == NULL)
goto no_srcpad;
GST_DEBUG_OBJECT (rtpbin, "getting RTP encoder");
if (encsrc == NULL)
goto enc_src_failed;
- send_rtp_src = encsrc;
-
ename = g_strdup_printf ("rtp_sink_%u", sessid);
encsink = gst_element_get_static_pad (encoder, ename);
g_free (ename);
if (encsink == NULL)
goto enc_sink_failed;
- ret = gst_pad_link (session->send_rtp_src, encsink);
+ ret = gst_pad_link (send_rtp_src, encsink);
gst_object_unref (encsink);
+ gst_object_unref (send_rtp_src);
+
+ send_rtp_src = encsrc;
if (ret != GST_PAD_LINK_OK)
goto enc_link_failed;
} else {
GST_DEBUG_OBJECT (rtpbin, "no RTP encoder given");
- send_rtp_src = gst_object_ref (session->send_rtp_src);
}
/* ghost the new source pad */
templ = gst_element_class_get_pad_template (klass, "send_rtp_src_%u");
session->send_rtp_src_ghost =
gst_ghost_pad_new_from_template (gname, send_rtp_src, templ);
- gst_object_unref (send_rtp_src);
gst_pad_set_active (session->send_rtp_src_ghost, TRUE);
gst_pad_sticky_events_foreach (send_rtp_src, copy_sticky_events,
session->send_rtp_src_ghost);
gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), session->send_rtp_src_ghost);
g_free (gname);
- return TRUE;
+ ret = TRUE;
+
+ done:
+ if (send_rtp_src)
+ gst_object_unref (send_rtp_src);
+
+ return ret;
/* ERRORS */
no_srcpad:
{
g_warning ("rtpbin: failed to get rtp source pad for session %u", sessid);
- return FALSE;
+ goto done;
}
enc_src_failed:
{
- g_warning ("rtpbin: failed to get encoder src pad for session %u", sessid);
- return FALSE;
+ g_warning ("rtpbin: failed to get %" GST_PTR_FORMAT
+ " src pad for session %u", encoder, sessid);
+ goto done;
}
enc_sink_failed:
{
- g_warning ("rtpbin: failed to get encoder sink pad for session %u", sessid);
- gst_object_unref (send_rtp_src);
- return FALSE;
+ g_warning ("rtpbin: failed to get %" GST_PTR_FORMAT
+ " sink pad for session %u", encoder, sessid);
+ goto done;
}
enc_link_failed:
{
- g_warning ("rtpbin: failed to link rtp encoder for session %u", sessid);
- gst_object_unref (send_rtp_src);
- return FALSE;
+ g_warning ("rtpbin: failed to link %" GST_PTR_FORMAT " for session %u",
+ encoder, sessid);
+ goto done;
}
}
}
existing_session:
{
- g_warning ("rtpbin: session %u is already a sender", sessid);
- return FALSE;
+ GST_DEBUG_OBJECT (rtpbin,
+ "skipping src_%i setup, since it is already configured.", sessid);
+ return TRUE;
}
pad_failed:
{
guint sessid;
GstPad *send_rtp_sink;
GstElement *aux;
+ GstElement *encoder;
+ GstElement *prev = NULL;
GstRtpBinSession *session;
/* first get the session number */
if (session->send_rtp_sink != NULL)
goto existing_session;
+ encoder = session_request_element (session, SIGNAL_REQUEST_FEC_ENCODER);
+
+ if (encoder) {
+ GST_DEBUG_OBJECT (rtpbin, "Linking FEC encoder");
+
+ send_rtp_sink = gst_element_get_static_pad (encoder, "sink");
+
+ if (!send_rtp_sink)
+ goto enc_sink_failed;
+
+ prev = encoder;
+ }
+
GST_DEBUG_OBJECT (rtpbin, "getting RTP AUX sender");
aux = session_request_element (session, SIGNAL_REQUEST_AUX_SENDER);
if (aux) {
+ GstPad *sinkpad;
GST_DEBUG_OBJECT (rtpbin, "linking AUX sender");
if (!setup_aux_sender (rtpbin, session, aux))
goto aux_session_failed;
pname = g_strdup_printf ("sink_%u", sessid);
- send_rtp_sink = gst_element_get_static_pad (aux, pname);
+ sinkpad = gst_element_get_static_pad (aux, pname);
g_free (pname);
- if (send_rtp_sink == NULL)
+ if (sinkpad == NULL)
goto aux_sink_failed;
+
+ if (!prev) {
+ send_rtp_sink = sinkpad;
+ } else {
+ GstPad *srcpad = gst_element_get_static_pad (prev, "src");
+ GstPadLinkReturn ret;
+
+ ret = gst_pad_link (srcpad, sinkpad);
+ gst_object_unref (srcpad);
+ if (ret != GST_PAD_LINK_OK) {
+ goto aux_link_failed;
+ }
+ }
+ prev = aux;
} else {
/* get send_rtp pad and store */
session->send_rtp_sink =
if (!complete_session_src (rtpbin, session))
goto session_src_failed;
- send_rtp_sink = gst_object_ref (session->send_rtp_sink);
+ if (!prev) {
+ send_rtp_sink = gst_object_ref (session->send_rtp_sink);
+ } else {
+ GstPad *srcpad = gst_element_get_static_pad (prev, "src");
+ GstPadLinkReturn ret;
+
+ ret = gst_pad_link (srcpad, session->send_rtp_sink);
+ gst_object_unref (srcpad);
+ if (ret != GST_PAD_LINK_OK)
+ goto session_link_failed;
+ }
}
session->send_rtp_sink_ghost =
g_warning ("rtpbin: failed to get AUX sink pad for session %u", sessid);
return NULL;
}
+ aux_link_failed:
+ {
+ g_warning ("rtpbin: failed to link %" GST_PTR_FORMAT " for session %u",
+ aux, sessid);
+ return NULL;
+ }
pad_failed:
{
g_warning ("rtpbin: failed to get session pad for session %u", sessid);
g_warning ("rtpbin: failed to setup source pads for session %u", sessid);
return NULL;
}
+ session_link_failed:
+ {
+ g_warning ("rtpbin: failed to link %" GST_PTR_FORMAT " for session %u",
+ session, sessid);
+ return NULL;
+ }
+ enc_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get %" GST_PTR_FORMAT
+ " sink pad for session %u", encoder, sessid);
+ return NULL;
+ }
}
static void
session->send_rtp_src_ghost);
session->send_rtp_src_ghost = NULL;
}
- if (session->send_rtp_src) {
- gst_object_unref (session->send_rtp_src);
- session->send_rtp_src = NULL;
- }
if (session->send_rtp_sink) {
gst_element_release_request_pad (GST_ELEMENT_CAST (session->session),
session->send_rtp_sink);
* RTP_BIN_LOCK.
*/
static GstPad *
- create_rtcp (GstRtpBin * rtpbin, GstPadTemplate * templ, const gchar * name)
+ create_send_rtcp (GstRtpBin * rtpbin, GstPadTemplate * templ,
+ const gchar * name)
{
guint sessid;
GstPad *encsrc;
/* get or create session */
session = find_session_by_id (rtpbin, sessid);
- if (!session)
- goto no_session;
+ if (!session) {
+ GST_DEBUG_OBJECT (rtpbin, "creating session %u", sessid);
+ /* create session now */
+ session = create_session (rtpbin, sessid);
+ if (session == NULL)
+ goto create_error;
+ }
/* check if pad was requested */
if (session->send_rtcp_src_ghost != NULL)
g_warning ("rtpbin: invalid name given");
return NULL;
}
- no_session:
+ create_error:
{
- g_warning ("rtpbin: session with id %d does not exist", sessid);
+ /* create_session already warned */
return NULL;
}
pad_failed:
result = create_send_rtp (rtpbin, templ, pad_name);
} else if (templ == gst_element_class_get_pad_template (klass,
"send_rtcp_src_%u")) {
- result = create_rtcp (rtpbin, templ, pad_name);
+ result = create_send_rtcp (rtpbin, templ, pad_name);
} else
goto wrong_template;
*/
/**
* SECTION:element-rtspsrc
+ * @title: rtspsrc
*
* Makes a connection to an RTSP server and read the data.
* rtspsrc strictly follows RFC 2326 and therefore does not (yet) support
* rtspsrc acts like a live source and will therefore only generate data in the
* PLAYING state.
*
- * <refsect2>
- * <title>Example launch line</title>
+ * If a RTP session times out then the rtspsrc will generate an element message
+ * named "GstRTSPSrcTimeout". Currently this is only supported for timeouts
+ * triggered by RTCP.
+ *
+ * The message's structure contains three fields:
+ *
+ * #GstRTSPSrcTimeoutCause `cause`: the cause of the timeout.
+ *
+ * #gint `stream-number`: an internal identifier of the stream that timed out.
+ *
+ * #guint `ssrc`: the SSRC of the stream that timed out.
+ *
+ * ## Example launch line
* |[
* gst-launch-1.0 rtspsrc location=rtsp://some.server/url ! fakesink
* ]| Establish a connection to an RTSP server and send the raw RTP packets to a
* fakesink.
- * </refsect2>
+ *
*/
#ifdef HAVE_CONFIG_H
SIGNAL_SELECT_STREAM,
SIGNAL_NEW_MANAGER,
SIGNAL_REQUEST_RTCP_KEY,
+ SIGNAL_ACCEPT_CERTIFICATE,
+ SIGNAL_BEFORE_SEND,
+ SIGNAL_PUSH_BACKCHANNEL_BUFFER,
+ SIGNAL_GET_PARAMETER,
+ SIGNAL_GET_PARAMETERS,
+ SIGNAL_SET_PARAMETER,
LAST_SIGNAL
};
return ntp_time_source_type;
}
+ enum _GstRtspBackchannel
+ {
+ BACKCHANNEL_NONE,
+ BACKCHANNEL_ONVIF
+ };
+
+ #define GST_TYPE_RTSP_BACKCHANNEL (gst_rtsp_backchannel_get_type())
+ static GType
+ gst_rtsp_backchannel_get_type (void)
+ {
+ static GType backchannel_type = 0;
+ static const GEnumValue backchannel_values[] = {
+ {BACKCHANNEL_NONE, "No backchannel", "none"},
+ {BACKCHANNEL_ONVIF, "ONVIF audio backchannel", "onvif"},
+ {0, NULL, NULL},
+ };
+
+ if (G_UNLIKELY (backchannel_type == 0)) {
+ backchannel_type =
+ g_enum_register_static ("GstRTSPBackchannel", backchannel_values);
+ }
+ return backchannel_type;
+ }
+
+ #define BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL "www.onvif.org/ver20/backchannel"
+
#define DEFAULT_LOCATION NULL
#define DEFAULT_PROTOCOLS GST_RTSP_LOWER_TRANS_UDP | GST_RTSP_LOWER_TRANS_UDP_MCAST | GST_RTSP_LOWER_TRANS_TCP
#define DEFAULT_DEBUG FALSE
#define DEFAULT_USER_AGENT "GStreamer/" PACKAGE_VERSION
#define DEFAULT_MAX_RTCP_RTP_TIME_DIFF 1000
#define DEFAULT_RFC7273_SYNC FALSE
+ #define DEFAULT_MAX_TS_OFFSET_ADJUSTMENT G_GUINT64_CONSTANT(0)
+ #define DEFAULT_MAX_TS_OFFSET G_GINT64_CONSTANT(3000000000)
+ #define DEFAULT_VERSION GST_RTSP_VERSION_1_0
+ #define DEFAULT_BACKCHANNEL GST_RTSP_BACKCHANNEL_NONE
+ #define DEFAULT_TEARDOWN_TIMEOUT (100 * GST_MSECOND)
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+#define DEFAULT_START_POSITION 0
+#endif
+
enum
{
PROP_0,
PROP_DEBUG,
PROP_RETRY,
PROP_TIMEOUT,
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ PROP_START_POSITION,
+ PROP_RESUME_POSITION,
+#endif
PROP_TCP_TIMEOUT,
PROP_LATENCY,
PROP_DROP_ON_LATENCY,
PROP_NTP_TIME_SOURCE,
PROP_USER_AGENT,
PROP_MAX_RTCP_RTP_TIME_DIFF,
- PROP_RFC7273_SYNC
+ PROP_RFC7273_SYNC,
+ PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ PROP_MAX_TS_OFFSET,
+ PROP_DEFAULT_VERSION,
+ PROP_BACKCHANNEL,
+ PROP_TEARDOWN_TIMEOUT,
};
#define GST_TYPE_RTSP_NAT_METHOD (gst_rtsp_nat_method_get_type())
"rtsp-status-reason", G_TYPE_STRING, GST_STR_NULL((response_msg)->type_data.response.reason), NULL)); \
} while (0)
+ typedef struct _ParameterRequest
+ {
+ gint cmd;
+ gchar *content_type;
+ GString *body;
+ GstPromise *promise;
+ } ParameterRequest;
+
static void gst_rtspsrc_finalize (GObject * object);
static void gst_rtspsrc_set_property (GObject * object, guint prop_id,
static GstRTSPResult gst_rtspsrc_open (GstRTSPSrc * src, gboolean async);
static GstRTSPResult gst_rtspsrc_play (GstRTSPSrc * src, GstSegment * segment,
- gboolean async);
+ gboolean async, const gchar * seek_style);
static GstRTSPResult gst_rtspsrc_pause (GstRTSPSrc * src, gboolean async);
static GstRTSPResult gst_rtspsrc_close (GstRTSPSrc * src, gboolean async,
gboolean only_close);
GstRTSPStream * stream, GstEvent * event);
static gboolean gst_rtspsrc_push_event (GstRTSPSrc * src, GstEvent * event);
static void gst_rtspsrc_connection_flush (GstRTSPSrc * src, gboolean flush);
+ static GstRTSPResult gst_rtsp_conninfo_close (GstRTSPSrc * src,
+ GstRTSPConnInfo * info, gboolean free);
static void
gst_rtspsrc_print_rtsp_message (GstRTSPSrc * src, const GstRTSPMessage * msg);
static void
gst_rtspsrc_print_sdp_message (GstRTSPSrc * src, const GstSDPMessage * msg);
- static GstRTSPResult gst_rtsp_conninfo_close (GstRTSPSrc * src,
- GstRTSPConnInfo * info, gboolean free);
+
+ static GstRTSPResult
+ gst_rtspsrc_get_parameter (GstRTSPSrc * src, ParameterRequest * req);
+
+ static GstRTSPResult
+ gst_rtspsrc_set_parameter (GstRTSPSrc * src, ParameterRequest * req);
+
+ static gboolean get_parameter (GstRTSPSrc * src, const gchar * parameter,
+ const gchar * content_type, GstPromise * promise);
+
+ static gboolean get_parameters (GstRTSPSrc * src, gchar ** parameters,
+ const gchar * content_type, GstPromise * promise);
+
+ static gboolean set_parameter (GstRTSPSrc * src, const gchar * name,
+ const gchar * value, const gchar * content_type, GstPromise * promise);
+
+ static GstFlowReturn gst_rtspsrc_push_backchannel_buffer (GstRTSPSrc * src,
+ guint id, GstSample * sample);
typedef struct
{
} PtMapItem;
/* commands we send to out loop to notify it of events */
- #define CMD_OPEN (1 << 0)
- #define CMD_PLAY (1 << 1)
- #define CMD_PAUSE (1 << 2)
- #define CMD_CLOSE (1 << 3)
- #define CMD_WAIT (1 << 4)
- #define CMD_RECONNECT (1 << 5)
- #define CMD_LOOP (1 << 6)
+ #define CMD_OPEN (1 << 0)
+ #define CMD_PLAY (1 << 1)
+ #define CMD_PAUSE (1 << 2)
+ #define CMD_CLOSE (1 << 3)
+ #define CMD_WAIT (1 << 4)
+ #define CMD_RECONNECT (1 << 5)
+ #define CMD_LOOP (1 << 6)
+ #define CMD_GET_PARAMETER (1 << 7)
+ #define CMD_SET_PARAMETER (1 << 8)
/* mask for all commands */
- #define CMD_ALL ((CMD_LOOP << 1) - 1)
+ #define CMD_ALL ((CMD_SET_PARAMETER << 1) - 1)
#define GST_ELEMENT_PROGRESS(el, type, code, text) \
G_STMT_START { \
return "RECONNECT";
case CMD_LOOP:
return "LOOP";
+ case CMD_GET_PARAMETER:
+ return "GET_PARAMETER";
+ case CMD_SET_PARAMETER:
+ return "SET_PARAMETER";
}
return "unknown";
}
#endif
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+static void
+gst_rtspsrc_post_error_message (GstRTSPSrc * src, GstRTSPSrcError error_id,
+ const gchar * error_string)
+{
+ GstMessage *message;
+ GstStructure *structure;
+ gboolean ret = TRUE;
+
+ GST_ERROR_OBJECT (src, "[%d] %s", error_id, error_string);
+
+ structure = gst_structure_new ("streaming_error",
+ "error_id", G_TYPE_UINT, error_id,
+ "error_string", G_TYPE_STRING, error_string, NULL);
+
+ message =
+ gst_message_new_custom (GST_MESSAGE_ERROR, GST_OBJECT (src), structure);
+
+ ret = gst_element_post_message (GST_ELEMENT (src), message);
+ if (!ret)
+ GST_ERROR_OBJECT (src, "fail to post error message.");
+
+ return;
+}
+#endif
+
static gboolean
default_select_stream (GstRTSPSrc * src, guint id, GstCaps * caps)
{
return myboolean;
}
+ static gboolean
+ default_before_send (GstRTSPSrc * src, GstRTSPMessage * msg)
+ {
+ GST_DEBUG_OBJECT (src, "default handler");
+ return TRUE;
+ }
+
+ static gboolean
+ before_send_accum (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer data)
+ {
+ gboolean myboolean;
+
+ myboolean = g_value_get_boolean (handler_return);
+ g_value_set_boolean (return_accu, myboolean);
+
+ /* prevent send if FALSE */
+ return myboolean;
+ }
+
static void
gst_rtspsrc_class_init (GstRTSPSrcClass * klass)
{
"Retry TCP transport after UDP timeout microseconds (0 = disabled)",
0, G_MAXUINT64, DEFAULT_TIMEOUT,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ g_object_class_install_property (gobject_class, PROP_START_POSITION,
+ g_param_spec_uint64 ("pending-start-position", "set start position",
+ "Set start position before PLAYING request.",
+ 0, G_MAXUINT64, DEFAULT_START_POSITION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RESUME_POSITION,
+ g_param_spec_uint64 ("resume-position", "set resume position",
+ "Set resume position before PLAYING request after pause.",
+ 0, G_MAXUINT64, DEFAULT_START_POSITION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif
g_object_class_install_property (gobject_class, PROP_TCP_TIMEOUT,
g_param_spec_uint64 ("tcp-timeout", "TCP Timeout",
"Fail after timeout microseconds on TCP connections (0 = disabled)",
/**
* GstRTSPSrc:port-range:
*
- * Configure the client port numbers that can be used to recieve RTP and
+ * Configure the client port numbers that can be used to receive RTP and
* RTCP.
*/
g_object_class_install_property (gobject_class, PROP_PORT_RANGE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
+ * GstRTSPSrc:default-rtsp-version:
+ *
+ * The preferred RTSP version to use while negotiating the version with the server.
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_DEFAULT_VERSION,
+ g_param_spec_enum ("default-rtsp-version",
+ "The RTSP version to try first",
+ "The RTSP version that should be tried first when negotiating version.",
+ GST_TYPE_RTSP_VERSION, DEFAULT_VERSION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:max-ts-offset-adjustment:
+ *
+ * Syncing time stamps to NTP time adds a time offset. This parameter
+ * specifies the maximum number of nanoseconds per frame that this time offset
+ * may be adjusted with. This is used to avoid sudden large changes to time
+ * stamps.
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ g_param_spec_uint64 ("max-ts-offset-adjustment",
+ "Max Timestamp Offset Adjustment",
+ "The maximum number of nanoseconds per frame that time stamp offsets "
+ "may be adjusted (0 = no limit).", 0, G_MAXUINT64,
+ DEFAULT_MAX_TS_OFFSET_ADJUSTMENT, G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:max-ts-offset:
+ *
+ * Used to set an upper limit of how large a time offset may be. This
+ * is used to protect against unrealistic values as a result of either
+ * client,server or clock issues.
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET,
+ g_param_spec_int64 ("max-ts-offset", "Max TS Offset",
+ "The maximum absolute value of the time offset in (nanoseconds). "
+ "Note, if the ntp-sync parameter is set the default value is "
+ "changed to 0 (no limit)", 0, G_MAXINT64, DEFAULT_MAX_TS_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:backchannel
+ *
+ * Select a type of backchannel to setup with the RTSP server.
+ * Default value is "none". Allowed values are "none" and "onvif".
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_BACKCHANNEL,
+ g_param_spec_enum ("backchannel", "Backchannel type",
+ "The type of backchannel to setup. Default is 'none'.",
+ GST_TYPE_RTSP_BACKCHANNEL, BACKCHANNEL_NONE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtspSrc:teardown-timeout
+ *
+ * When transitioning PAUSED-READY, allow up to timeout (in nanoseconds)
+ * delay in order to send teardown (0 = disabled)
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_TEARDOWN_TIMEOUT,
+ g_param_spec_uint64 ("teardown-timeout", "Teardown Timeout",
+ "When transitioning PAUSED-READY, allow up to timeout (in nanoseconds) "
+ "delay in order to send teardown (0 = disabled)",
+ 0, G_MAXUINT64, DEFAULT_TEARDOWN_TIMEOUT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
* GstRTSPSrc::handle-request:
* @rtspsrc: a #GstRTSPSrc
* @request: a #GstRTSPMessage
* @rtspsrc: a #GstRTSPSrc
* @sdp: a #GstSDPMessage
*
- * Emited when the client has retrieved the SDP and before it configures the
+ * Emitted when the client has retrieved the SDP and before it configures the
* streams in the SDP. @sdp can be inspected and modified.
*
* This signal is called from the streaming thread, you should therefore not
* @num: the stream number
* @caps: the stream caps
*
- * Emited before the client decides to configure the stream @num with
+ * Emitted before the client decides to configure the stream @num with
* @caps.
*
* Returns: %TRUE when the stream should be selected, %FALSE when the stream
* @rtspsrc: a #GstRTSPSrc
* @manager: a #GstElement
*
- * Emited after a new manager (like rtpbin) was created and the default
+ * Emitted after a new manager (like rtpbin) was created and the default
* properties were configured.
*
* Since: 1.4
* @rtspsrc: a #GstRTSPSrc
* @num: the stream number
*
- * Signal emited to get the crypto parameters relevant to the RTCP
+ * Signal emitted to get the crypto parameters relevant to the RTCP
* stream. User should provide the key and the RTCP encryption ciphers
* and authentication, and return them wrapped in a GstCaps.
*
g_signal_new ("request-rtcp-key", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, GST_TYPE_CAPS, 1, G_TYPE_UINT);
+ /**
+ * GstRTSPSrc::accept-certificate:
+ * @rtspsrc: a #GstRTSPSrc
+ * @peer_cert: the peer's #GTlsCertificate
+ * @errors: the problems with @peer_cert
+ * @user_data: user data set when the signal handler was connected.
+ *
+ * This will directly map to #GTlsConnection 's "accept-certificate"
+ * signal and be performed after the default checks of #GstRTSPConnection
+ * (checking against the #GTlsDatabase with the given #GTlsCertificateFlags)
+ * have failed. If no #GTlsDatabase is set on this connection, only this
+ * signal will be emitted.
+ *
+ * Since: 1.14
+ */
+ gst_rtspsrc_signals[SIGNAL_ACCEPT_CERTIFICATE] =
+ g_signal_new ("accept-certificate", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, g_signal_accumulator_true_handled, NULL, NULL,
+ G_TYPE_BOOLEAN, 3, G_TYPE_TLS_CONNECTION, G_TYPE_TLS_CERTIFICATE,
+ G_TYPE_TLS_CERTIFICATE_FLAGS);
+
+ /*
+ * GstRTSPSrc::before-send
+ * @rtspsrc: a #GstRTSPSrc
+ * @num: the stream number
+ *
+ * Emitted before each RTSP request is sent, in order to allow
+ * the application to modify send parameters or to skip the message entirely.
+ * This can be used, for example, to work with ONVIF Profile G servers,
+ * which need a different/additional range, rate-control, and intra/x
+ * parameters.
+ *
+ * Returns: %TRUE when the command should be sent, %FALSE when the
+ * command should be dropped.
+ *
+ * Since: 1.14
+ */
+ gst_rtspsrc_signals[SIGNAL_BEFORE_SEND] =
+ g_signal_new_class_handler ("before-send", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_FIRST | G_SIGNAL_RUN_CLEANUP,
+ (GCallback) default_before_send, before_send_accum, NULL,
+ g_cclosure_marshal_generic, G_TYPE_BOOLEAN,
+ 1, GST_TYPE_RTSP_MESSAGE | G_SIGNAL_TYPE_STATIC_SCOPE);
+
+ /**
+ * GstRTSPSrc::push-backchannel-buffer:
+ * @rtspsrc: a #GstRTSPSrc
+ * @buffer: RTP buffer to send back
+ *
+ *
+ */
+ gst_rtspsrc_signals[SIGNAL_PUSH_BACKCHANNEL_BUFFER] =
+ g_signal_new ("push-backchannel-buffer", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRTSPSrcClass,
+ push_backchannel_buffer), NULL, NULL, NULL, GST_TYPE_FLOW_RETURN, 2,
+ G_TYPE_UINT, GST_TYPE_BUFFER);
+
+ /**
+ * GstRTSPSrc::get-parameter:
+ * @rtspsrc: a #GstRTSPSrc
+ * @parameter: the parameter name
+ * @parameter: the content type
+ * @parameter: a pointer to #GstPromise
+ *
+ * Handle the GET_PARAMETER signal.
+ *
+ * Returns: %TRUE when the command could be issued, %FALSE otherwise
+ *
+ */
+ gst_rtspsrc_signals[SIGNAL_GET_PARAMETER] =
+ g_signal_new ("get-parameter", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRTSPSrcClass,
+ get_parameter), NULL, NULL, g_cclosure_marshal_generic,
+ G_TYPE_BOOLEAN, 3, G_TYPE_STRING, G_TYPE_STRING, GST_TYPE_PROMISE);
+
+ /**
+ * GstRTSPSrc::get-parameters:
+ * @rtspsrc: a #GstRTSPSrc
+ * @parameter: a NULL-terminated array of parameters
+ * @parameter: the content type
+ * @parameter: a pointer to #GstPromise
+ *
+ * Handle the GET_PARAMETERS signal.
+ *
+ * Returns: %TRUE when the command could be issued, %FALSE otherwise
+ *
+ */
+ gst_rtspsrc_signals[SIGNAL_GET_PARAMETERS] =
+ g_signal_new ("get-parameters", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRTSPSrcClass,
+ get_parameters), NULL, NULL, g_cclosure_marshal_generic,
+ G_TYPE_BOOLEAN, 3, G_TYPE_STRV, G_TYPE_STRING, GST_TYPE_PROMISE);
+
+ /**
+ * GstRTSPSrc::set-parameter:
+ * @rtspsrc: a #GstRTSPSrc
+ * @parameter: the parameter name
+ * @parameter: the parameter value
+ * @parameter: the content type
+ * @parameter: a pointer to #GstPromise
+ *
+ * Handle the SET_PARAMETER signal.
+ *
+ * Returns: %TRUE when the command could be issued, %FALSE otherwise
+ *
+ */
+ gst_rtspsrc_signals[SIGNAL_SET_PARAMETER] =
+ g_signal_new ("set-parameter", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRTSPSrcClass,
+ set_parameter), NULL, NULL, g_cclosure_marshal_generic,
+ G_TYPE_BOOLEAN, 4, G_TYPE_STRING, G_TYPE_STRING, G_TYPE_STRING,
+ GST_TYPE_PROMISE);
+
gstelement_class->send_event = gst_rtspsrc_send_event;
gstelement_class->provide_clock = gst_rtspsrc_provide_clock;
gstelement_class->change_state = gst_rtspsrc_change_state;
gstbin_class->handle_message = gst_rtspsrc_handle_message;
+ klass->push_backchannel_buffer = gst_rtspsrc_push_backchannel_buffer;
+ klass->get_parameter = GST_DEBUG_FUNCPTR (get_parameter);
+ klass->get_parameters = GST_DEBUG_FUNCPTR (get_parameters);
+ klass->set_parameter = GST_DEBUG_FUNCPTR (set_parameter);
+
gst_rtsp_ext_list_init ();
}
+ static gboolean
+ validate_set_get_parameter_name (const gchar * parameter_name)
+ {
+ gchar *ptr = (gchar *) parameter_name;
+
+ while (*ptr) {
+ /* Don't allow '\r', '\n', \'t', ' ' etc in the parameter name */
+ if (g_ascii_isspace (*ptr) || g_ascii_iscntrl (*ptr)) {
+ GST_DEBUG ("invalid parameter name '%s'", parameter_name);
+ return FALSE;
+ }
+ ptr++;
+ }
+ return TRUE;
+ }
+
+ static gboolean
+ validate_set_get_parameters (gchar ** parameter_names)
+ {
+ while (*parameter_names) {
+ if (!validate_set_get_parameter_name (*parameter_names)) {
+ return FALSE;
+ }
+ parameter_names++;
+ }
+ return TRUE;
+ }
+
+ static gboolean
+ get_parameter (GstRTSPSrc * src, const gchar * parameter,
+ const gchar * content_type, GstPromise * promise)
+ {
+ gchar *parameters[] = { (gchar *) parameter, NULL };
+
+ GST_LOG_OBJECT (src, "get_parameter: %s", GST_STR_NULL (parameter));
+
+ if (parameter == NULL || parameter[0] == '\0' || promise == NULL) {
+ GST_DEBUG ("invalid input");
+ return FALSE;
+ }
+
+ return get_parameters (src, parameters, content_type, promise);
+ }
+
+ static gboolean
+ get_parameters (GstRTSPSrc * src, gchar ** parameters,
+ const gchar * content_type, GstPromise * promise)
+ {
+ ParameterRequest *req;
+
+ GST_LOG_OBJECT (src, "get_parameters: %d", g_strv_length (parameters));
+
+ if (parameters == NULL || promise == NULL) {
+ GST_DEBUG ("invalid input");
+ return FALSE;
+ }
+
+ if (src->state == GST_RTSP_STATE_INVALID) {
+ GST_DEBUG ("invalid state");
+ return FALSE;
+ }
+
+ if (!validate_set_get_parameters (parameters)) {
+ return FALSE;
+ }
+
+ req = g_new0 (ParameterRequest, 1);
+ req->promise = gst_promise_ref (promise);
+ req->cmd = CMD_GET_PARAMETER;
+ /* Set the request body according to RFC 2326 or RFC 7826 */
+ req->body = g_string_new (NULL);
+ while (*parameters) {
+ g_string_append_printf (req->body, "%s:\r\n", *parameters);
+ parameters++;
+ }
+ if (content_type)
+ req->content_type = g_strdup (content_type);
+
+ GST_OBJECT_LOCK (src);
+ g_queue_push_tail (&src->set_get_param_q, req);
+ GST_OBJECT_UNLOCK (src);
+
+ gst_rtspsrc_loop_send_cmd (src, CMD_GET_PARAMETER, CMD_LOOP);
+
+ return TRUE;
+ }
+
+ static gboolean
+ set_parameter (GstRTSPSrc * src, const gchar * name, const gchar * value,
+ const gchar * content_type, GstPromise * promise)
+ {
+ ParameterRequest *req;
+
+ GST_LOG_OBJECT (src, "set_parameter: %s: %s", GST_STR_NULL (name),
+ GST_STR_NULL (value));
+
+ if (name == NULL || name[0] == '\0' || value == NULL || promise == NULL) {
+ GST_DEBUG ("invalid input");
+ return FALSE;
+ }
+
+ if (src->state == GST_RTSP_STATE_INVALID) {
+ GST_DEBUG ("invalid state");
+ return FALSE;
+ }
+
+ if (!validate_set_get_parameter_name (name)) {
+ return FALSE;
+ }
+
+ req = g_new0 (ParameterRequest, 1);
+ req->cmd = CMD_SET_PARAMETER;
+ req->promise = gst_promise_ref (promise);
+ req->body = g_string_new (NULL);
+ /* Set the request body according to RFC 2326 or RFC 7826 */
+ g_string_append_printf (req->body, "%s: %s\r\n", name, value);
+ if (content_type)
+ req->content_type = g_strdup (content_type);
+
+ GST_OBJECT_LOCK (src);
+ g_queue_push_tail (&src->set_get_param_q, req);
+ GST_OBJECT_UNLOCK (src);
+
+ gst_rtspsrc_loop_send_cmd (src, CMD_SET_PARAMETER, CMD_LOOP);
+
+ return TRUE;
+ }
+
static void
gst_rtspsrc_init (GstRTSPSrc * src)
{
src->debug = DEFAULT_DEBUG;
src->retry = DEFAULT_RETRY;
src->udp_timeout = DEFAULT_TIMEOUT;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ src->start_position = DEFAULT_START_POSITION;
+ src->is_audio_codec_supported = FALSE;
+ src->is_video_codec_supported = FALSE;
+ src->audio_codec = NULL;
+ src->video_codec = NULL;
+ src->video_frame_size = NULL;
+#endif
gst_rtspsrc_set_tcp_timeout (src, DEFAULT_TCP_TIMEOUT);
src->latency = DEFAULT_LATENCY_MS;
src->drop_on_latency = DEFAULT_DROP_ON_LATENCY;
src->user_agent = g_strdup (DEFAULT_USER_AGENT);
src->max_rtcp_rtp_time_diff = DEFAULT_MAX_RTCP_RTP_TIME_DIFF;
src->rfc7273_sync = DEFAULT_RFC7273_SYNC;
+ src->max_ts_offset_adjustment = DEFAULT_MAX_TS_OFFSET_ADJUSTMENT;
+ src->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ src->max_ts_offset_is_set = FALSE;
+ src->default_version = DEFAULT_VERSION;
+ src->version = GST_RTSP_VERSION_INVALID;
+ src->teardown_timeout = DEFAULT_TEARDOWN_TIMEOUT;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ g_mutex_init (&(src)->pause_lock);
+ g_cond_init (&(src)->open_end);
+#endif
/* get a list of all extensions */
src->extensions = gst_rtsp_ext_list_get ();
/* protects our state changes from multiple invocations */
g_rec_mutex_init (&src->state_rec_lock);
+ g_queue_init (&src->set_get_param_q);
+
src->state = GST_RTSP_STATE_INVALID;
g_mutex_init (&src->conninfo.send_lock);
g_mutex_init (&src->conninfo.recv_lock);
+ g_cond_init (&src->cmd_cond);
GST_OBJECT_FLAG_SET (src, GST_ELEMENT_FLAG_SOURCE);
gst_bin_set_suppressed_flags (GST_BIN (src),
}
static void
+ free_param_data (ParameterRequest * req)
+ {
+ gst_promise_unref (req->promise);
+ if (req->body)
+ g_string_free (req->body, TRUE);
+ g_free (req->content_type);
+ g_free (req);
+ }
+
+ static void
+ free_param_queue (gpointer data)
+ {
+ ParameterRequest *req = data;
+
+ gst_promise_expire (req->promise);
+ free_param_data (req);
+ }
+
+ static void
gst_rtspsrc_finalize (GObject * object)
{
GstRTSPSrc *rtspsrc;
rtspsrc = GST_RTSPSRC (object);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ rtspsrc->is_audio_codec_supported = FALSE;
+ rtspsrc->is_video_codec_supported = FALSE;
+ if (rtspsrc->audio_codec) {
+ g_free (rtspsrc->audio_codec);
+ rtspsrc->audio_codec = NULL;
+ }
+ if (rtspsrc->video_codec) {
+ g_free (rtspsrc->video_codec);
+ rtspsrc->video_codec = NULL;
+ }
+ if (rtspsrc->video_frame_size) {
+ g_free (rtspsrc->video_frame_size);
+ rtspsrc->video_frame_size = NULL;
+ }
+#endif
gst_rtsp_ext_list_free (rtspsrc->extensions);
g_free (rtspsrc->conninfo.location);
gst_rtsp_url_free (rtspsrc->conninfo.url);
g_free (rtspsrc->multi_iface);
g_free (rtspsrc->user_agent);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ g_mutex_clear (&(rtspsrc)->pause_lock);
+ g_cond_clear (&(rtspsrc)->open_end);
+#endif
+
if (rtspsrc->sdp) {
gst_sdp_message_free (rtspsrc->sdp);
rtspsrc->sdp = NULL;
g_mutex_clear (&rtspsrc->conninfo.send_lock);
g_mutex_clear (&rtspsrc->conninfo.recv_lock);
+ g_cond_clear (&rtspsrc->cmd_cond);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
case PROP_TIMEOUT:
rtspsrc->udp_timeout = g_value_get_uint64 (value);
break;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ case PROP_START_POSITION:
+ rtspsrc->start_position = g_value_get_uint64 (value);
+ break;
+ case PROP_RESUME_POSITION:
+ rtspsrc->last_pos = g_value_get_uint64 (value);
+ GST_DEBUG_OBJECT (rtspsrc, "src->last_pos value set to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (rtspsrc->last_pos));
+ break;
+#endif
case PROP_TCP_TIMEOUT:
gst_rtspsrc_set_tcp_timeout (rtspsrc, g_value_get_uint64 (value));
break;
const gchar *str;
str = g_value_get_string (value);
- if (sscanf (str, "%u-%u", &rtspsrc->client_port_range.min,
+ if (str == NULL || sscanf (str, "%u-%u", &rtspsrc->client_port_range.min,
&rtspsrc->client_port_range.max) != 2) {
rtspsrc->client_port_range.min = 0;
rtspsrc->client_port_range.max = 0;
break;
case PROP_NTP_SYNC:
rtspsrc->ntp_sync = g_value_get_boolean (value);
+ /* The default value of max_ts_offset depends on ntp_sync. If user
+ * hasn't set it then change default value */
+ if (!rtspsrc->max_ts_offset_is_set) {
+ if (rtspsrc->ntp_sync) {
+ rtspsrc->max_ts_offset = 0;
+ } else {
+ rtspsrc->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ }
+ }
break;
case PROP_USE_PIPELINE_CLOCK:
rtspsrc->use_pipeline_clock = g_value_get_boolean (value);
case PROP_RFC7273_SYNC:
rtspsrc->rfc7273_sync = g_value_get_boolean (value);
break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ rtspsrc->max_ts_offset_adjustment = g_value_get_uint64 (value);
+ break;
+ case PROP_MAX_TS_OFFSET:
+ rtspsrc->max_ts_offset = g_value_get_int64 (value);
+ rtspsrc->max_ts_offset_is_set = TRUE;
+ break;
+ case PROP_DEFAULT_VERSION:
+ rtspsrc->default_version = g_value_get_enum (value);
+ break;
+ case PROP_BACKCHANNEL:
+ rtspsrc->backchannel = g_value_get_enum (value);
+ break;
+ case PROP_TEARDOWN_TIMEOUT:
+ rtspsrc->teardown_timeout = g_value_get_uint64 (value);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
case PROP_TIMEOUT:
g_value_set_uint64 (value, rtspsrc->udp_timeout);
break;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ case PROP_START_POSITION:
+ g_value_set_uint64 (value, rtspsrc->start_position);
+ break;
+ case PROP_RESUME_POSITION:
+ g_value_set_uint64 (value, rtspsrc->last_pos);
+ break;
+#endif
case PROP_TCP_TIMEOUT:
{
guint64 timeout;
case PROP_RFC7273_SYNC:
g_value_set_boolean (value, rtspsrc->rfc7273_sync);
break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ g_value_set_uint64 (value, rtspsrc->max_ts_offset_adjustment);
+ break;
+ case PROP_MAX_TS_OFFSET:
+ g_value_set_int64 (value, rtspsrc->max_ts_offset);
+ break;
+ case PROP_DEFAULT_VERSION:
+ g_value_set_enum (value, rtspsrc->default_version);
+ break;
+ case PROP_BACKCHANNEL:
+ g_value_set_enum (value, rtspsrc->backchannel);
+ break;
+ case PROP_TEARDOWN_TIMEOUT:
+ g_value_set_uint64 (value, rtspsrc->teardown_timeout);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
+ static gchar *
+ make_stream_id (GstRTSPStream * stream, const GstSDPMedia * media)
+ {
+ gchar *stream_id =
+ g_strdup_printf ("%s:%d:%d:%s:%d", media->media, media->port,
+ media->num_ports, media->proto, stream->default_pt);
+
+ g_strcanon (stream_id, G_CSET_a_2_z G_CSET_A_2_Z G_CSET_DIGITS, ':');
+
+ return stream_id;
+ }
+
/* m=<media> <UDP port> RTP/AVP <payload>
*/
static void
else
goto unknown_proto;
+ if (gst_sdp_media_get_attribute_val (media, "sendonly") != NULL &&
+ /* We want to setup caps for streams configured as backchannel */
+ !stream->is_backchannel && src->backchannel != BACKCHANNEL_NONE)
+ goto sendonly_media;
+
/* Parse global SDP attributes once */
global_caps = gst_caps_new_empty_simple ("application/x-unknown");
GST_DEBUG ("mapping sdp session level attributes to caps");
GstStructure *s;
const gchar *enc;
PtMapItem item;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ const gchar *encoder, *mediatype;
+#endif
pt = atoi (gst_sdp_media_get_format (media, i));
GST_DEBUG_OBJECT (src, " looking at %d pt: %d", i, pt);
if (strcmp (enc, "X-ASF-PF") == 0)
stream->container = TRUE;
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if ((mediatype = gst_structure_get_string (s, "media"))) {
+ GST_DEBUG_OBJECT (src, " mediatype : %s", mediatype);
+ if (!strcmp (mediatype, "video")) {
+ if ((encoder = gst_structure_get_string (s, "encoding-name"))) {
+ GST_DEBUG_OBJECT (src, " encoder : %s", encoder);
+ if ((!strcmp (encoder, "H261")) ||
+ (!strcmp (encoder, "H263")) ||
+ (!strcmp (encoder, "H263-1998"))
+ || (!strcmp (encoder, "H263-2000")) || (!strcmp (encoder, "H264"))
+ || (!strcmp (encoder, "MP4V-ES"))) {
+ src->is_video_codec_supported = TRUE;
+ GST_DEBUG_OBJECT (src, "Supported Video Codec %s", encoder);
+ } else {
+ GST_DEBUG_OBJECT (src, "Unsupported Video Codec %s", encoder);
+ }
+ }
+
+ src->video_codec = g_strdup (encoder);
+ src->video_frame_size =
+ g_strdup (gst_structure_get_string (s, "a-framesize"));
+ GST_DEBUG_OBJECT (src, "video_codec %s , video_frame_size %s ",
+ src->video_codec, src->video_frame_size);
+ } else if (!strcmp (mediatype, "audio")) {
+ if ((encoder = gst_structure_get_string (s, "encoding-name"))) {
+ GST_DEBUG_OBJECT (src, " encoder : %s", encoder);
+ if ((!strcmp (encoder, "MP4A-LATM")) ||
+ (!strcmp (encoder, "AMR")) || (!strcmp (encoder, "AMR-WB"))
+ || (!strcmp (encoder, "AMR-NB"))
+ || (!strcmp (encoder, "mpeg4-generic"))
+ || (!strcmp (encoder, "MPEG4-GENERIC"))
+ || (!strcmp (encoder, "QCELP")) || ((strstr (encoder, "G726"))
+ || (strstr (encoder, "PCMU")))) {
+ src->is_audio_codec_supported = TRUE;
+ GST_DEBUG_OBJECT (src, "Supported Audio Codec %s", encoder);
+ } else {
+ GST_DEBUG_OBJECT (src, "Unsupported Audio Codec %s", encoder);
+ }
+ }
+
+ src->audio_codec = g_strdup (encoder);
+ GST_DEBUG_OBJECT (src, "audio_codec %s ", src->audio_codec);
+ }
+ }
+#endif
/* Merge in global caps */
/* Intersect will merge in missing fields to the current caps */
g_array_append_val (stream->ptmap, item);
}
+ stream->stream_id = make_stream_id (stream, media);
+
gst_caps_unref (global_caps);
return;
GST_ERROR_OBJECT (src, "unknown proto in media: '%s'", proto);
return;
}
+ sendonly_media:
+ {
+ GST_DEBUG_OBJECT (src, "sendonly media ignored, no backchannel");
+ return;
+ }
}
static const gchar *
stream->profile = GST_RTSP_PROFILE_AVP;
stream->ptmap = g_array_new (FALSE, FALSE, sizeof (PtMapItem));
stream->mikey = NULL;
+ stream->stream_id = NULL;
+ stream->is_backchannel = FALSE;
g_mutex_init (&stream->conninfo.send_lock);
g_mutex_init (&stream->conninfo.recv_lock);
g_array_set_clear_func (stream->ptmap, (GDestroyNotify) clear_ptmap_item);
+ /* stream is sendonly and onvif backchannel is requested */
+ if (gst_sdp_media_get_attribute_val (media, "sendonly") != NULL &&
+ src->backchannel != BACKCHANNEL_NONE)
+ stream->is_backchannel = TRUE;
+
/* collect bandwidth information for this steam. FIXME, configure in the RTP
* session manager to scale RTCP. */
gst_rtspsrc_collect_bandwidth (src, sdp, media, stream);
g_free (stream->destination);
g_free (stream->control_url);
g_free (stream->conninfo.location);
+ g_free (stream->stream_id);
for (i = 0; i < 2; i++) {
if (stream->udpsrc[i]) {
gst_element_set_state (stream->udpsrc[i], GST_STATE_NULL);
- gst_bin_remove (GST_BIN_CAST (src), stream->udpsrc[i]);
+ if (gst_object_has_as_parent (GST_OBJECT (stream->udpsrc[i]),
+ GST_OBJECT (src)))
+ gst_bin_remove (GST_BIN_CAST (src), stream->udpsrc[i]);
gst_object_unref (stream->udpsrc[i]);
}
if (stream->channelpad[i])
if (stream->udpsink[i]) {
gst_element_set_state (stream->udpsink[i], GST_STATE_NULL);
- gst_bin_remove (GST_BIN_CAST (src), stream->udpsink[i]);
+ if (gst_object_has_as_parent (GST_OBJECT (stream->udpsink[i]),
+ GST_OBJECT (src)))
+ gst_bin_remove (GST_BIN_CAST (src), stream->udpsink[i]);
gst_object_unref (stream->udpsink[i]);
}
}
- if (stream->fakesrc) {
- gst_element_set_state (stream->fakesrc, GST_STATE_NULL);
- gst_bin_remove (GST_BIN_CAST (src), stream->fakesrc);
- gst_object_unref (stream->fakesrc);
+ if (stream->rtpsrc) {
+ gst_element_set_state (stream->rtpsrc, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (src), stream->rtpsrc);
+ gst_object_unref (stream->rtpsrc);
}
if (stream->srcpad) {
gst_pad_set_active (stream->srcpad, FALSE);
gst_object_unref (src->provided_clock);
src->provided_clock = NULL;
}
+
+ /* free parameter requests queue */
+ if (!g_queue_is_empty (&src->set_get_param_q))
+ g_queue_free_full (&src->set_get_param_q, free_param_queue);
+
}
static gboolean
{
GList *walk;
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_WARNING_OBJECT (src, "Setting [%s] element state to: %s \n",
+ GST_ELEMENT_NAME (GST_ELEMENT_CAST (src)),
+ gst_element_state_get_name (state));
++#endif
if (src->manager)
gst_element_set_state (GST_ELEMENT_CAST (src->manager), state);
}
static void
- gst_rtspsrc_flush (GstRTSPSrc * src, gboolean flush, gboolean playing)
+ gst_rtspsrc_flush (GstRTSPSrc * src, gboolean flush, gboolean playing,
+ guint32 seqnum)
{
GstEvent *event;
gint cmd;
if (flush) {
event = gst_event_new_flush_start ();
+ gst_event_set_seqnum (event, seqnum);
GST_DEBUG_OBJECT (src, "start flush");
cmd = CMD_WAIT;
state = GST_STATE_PAUSED;
} else {
event = gst_event_new_flush_stop (FALSE);
+ gst_event_set_seqnum (event, seqnum);
GST_DEBUG_OBJECT (src, "stop flush; playing %d", playing);
cmd = CMD_LOOP;
if (playing)
gboolean playing;
GstSegment seeksegment = { 0, };
GList *walk;
+ const gchar *seek_style = NULL;
- if (event) {
- GST_DEBUG_OBJECT (src, "doing seek with event");
+ GST_DEBUG_OBJECT (src, "doing seek with event %" GST_PTR_FORMAT, event);
- gst_event_parse_seek (event, &rate, &format, &flags,
- &cur_type, &cur, &stop_type, &stop);
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
- /* no negative rates yet */
- if (rate < 0.0)
- goto negative_rate;
+ /* no negative rates yet */
+ if (rate < 0.0)
+ goto negative_rate;
- /* we need TIME format */
- if (format != src->segment.format)
- goto no_format;
- } else {
- GST_DEBUG_OBJECT (src, "doing seek without event");
- flags = 0;
- cur_type = GST_SEEK_TYPE_SET;
- stop_type = GST_SEEK_TYPE_SET;
- }
+ /* we need TIME format */
+ if (format != src->segment.format)
+ goto no_format;
+
+ /* Check if we are not at all seekable */
+ if (src->seekable == -1.0)
+ goto not_seekable;
+
+ /* Additional seeking-to-beginning-only check */
+ if (src->seekable == 0.0 && cur != 0)
+ goto not_seekable;
+
+ if (flags & GST_SEEK_FLAG_SEGMENT)
+ goto invalid_segment_flag;
/* get flush flag */
flush = flags & GST_SEEK_FLAG_FLUSH;
* blocking in preroll). */
if (flush) {
GST_DEBUG_OBJECT (src, "starting flush");
- gst_rtspsrc_flush (src, TRUE, FALSE);
+ gst_rtspsrc_flush (src, TRUE, FALSE, gst_event_get_seqnum (event));
} else {
if (src->task) {
gst_task_pause (src->task);
/* configure the seek parameters in the seeksegment. We will then have the
* right values in the segment to perform the seek */
- if (event) {
- GST_DEBUG_OBJECT (src, "configuring seek");
- gst_segment_do_seek (&seeksegment, rate, format, flags,
- cur_type, cur, stop_type, stop, &update);
- }
+ GST_DEBUG_OBJECT (src, "configuring seek");
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update);
/* figure out the last position we need to play. If it's configured (stop !=
* -1), use that, else we play until the total duration of the file */
if (flush) {
/* if we started flush, we stop now */
GST_DEBUG_OBJECT (src, "stopping flush");
- gst_rtspsrc_flush (src, FALSE, playing);
+ gst_rtspsrc_flush (src, FALSE, playing, gst_event_get_seqnum (event));
}
/* now we did the seek and can activate the new segment values */
&& GST_STATE (src) == GST_STATE_PLAYING)
|| (GST_STATE_PENDING (src) == GST_STATE_PLAYING);
GST_OBJECT_UNLOCK (src);
+
+ if (src->version >= GST_RTSP_VERSION_2_0) {
+ if (flags & GST_SEEK_FLAG_ACCURATE)
+ seek_style = "RAP";
+ else if (flags & GST_SEEK_FLAG_KEY_UNIT)
+ seek_style = "CoRAP";
+ else if (flags & GST_SEEK_FLAG_KEY_UNIT
+ && flags & GST_SEEK_FLAG_SNAP_BEFORE)
+ seek_style = "First-Prior";
+ else if (flags & GST_SEEK_FLAG_KEY_UNIT && flags & GST_SEEK_FLAG_SNAP_AFTER)
+ seek_style = "Next";
+ }
+
if (playing)
- gst_rtspsrc_play (src, &seeksegment, FALSE);
+ gst_rtspsrc_play (src, &seeksegment, FALSE, seek_style);
GST_RTSP_STREAM_UNLOCK (src);
GST_DEBUG_OBJECT (src, "unsupported format given, seek aborted.");
return FALSE;
}
+ not_seekable:
+ {
+ GST_DEBUG_OBJECT (src, "stream is not seekable");
+ return FALSE;
+ }
+ invalid_segment_flag:
+ {
+ GST_WARNING_OBJECT (src, "Segment seeks not supported");
+ return FALSE;
+ }
}
static gboolean
return res;
}
- /* this is the final event function we receive on the internal source pad when
- * we deal with TCP connections */
static gboolean
- gst_rtspsrc_handle_internal_src_event (GstPad * pad, GstObject * parent,
+ gst_rtspsrc_handle_src_sink_event (GstPad * pad, GstObject * parent,
GstEvent * event)
{
- gboolean res;
+ GstRTSPStream *stream;
- GST_DEBUG_OBJECT (pad, "received event %s", GST_EVENT_TYPE_NAME (event));
+ stream = gst_pad_get_element_private (pad);
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_SEEK:
- case GST_EVENT_QOS:
- case GST_EVENT_NAVIGATION:
- case GST_EVENT_LATENCY:
- default:
+ case GST_EVENT_STREAM_START:{
+ const gchar *upstream_id;
+ gchar *stream_id;
+
+ gst_event_parse_stream_start (event, &upstream_id);
+ stream_id = g_strdup_printf ("%s/%s", upstream_id, stream->stream_id);
+
gst_event_unref (event);
- res = TRUE;
- break;
+ event = gst_event_new_stream_start (stream_id);
+ g_free (stream_id);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return gst_pad_push_event (stream->srcpad, event);
+ }
+
+ /* this is the final event function we receive on the internal source pad when
+ * we deal with TCP connections */
+ static gboolean
+ gst_rtspsrc_handle_internal_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ gboolean res;
+
+ GST_DEBUG_OBJECT (pad, "received event %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ case GST_EVENT_QOS:
+ case GST_EVENT_NAVIGATION:
+ case GST_EVENT_LATENCY:
+ default:
+ gst_event_unref (event);
+ res = TRUE;
+ break;
}
return res;
}
if (format == GST_FORMAT_TIME) {
gboolean seekable =
src->cur_protocols != GST_RTSP_LOWER_TRANS_UDP_MCAST;
+ GstClockTime start = 0, duration = src->segment.duration;
/* seeking without duration is unlikely */
- seekable = seekable && src->seekable && src->segment.duration &&
+ seekable = seekable && src->seekable >= 0.0 && src->segment.duration &&
GST_CLOCK_TIME_IS_VALID (src->segment.duration);
- gst_query_set_seeking (query, GST_FORMAT_TIME, seekable, 0,
- src->segment.duration);
+ if (seekable) {
+ if (src->seekable > 0.0) {
+ start = src->last_pos - src->seekable * GST_SECOND;
+ } else {
+ /* src->seekable == 0 means that we can only seek to 0 */
+ start = 0;
+ duration = 0;
+ }
+ }
+
+ GST_LOG_OBJECT (src, "seekable : %d", seekable);
+
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable, start,
+ duration);
res = TRUE;
}
break;
return res;
}
+ static GstFlowReturn
+ gst_rtspsrc_push_backchannel_buffer (GstRTSPSrc * src, guint id,
+ GstSample * sample)
+ {
+ GstFlowReturn res = GST_FLOW_OK;
+ GstRTSPStream *stream;
+
+ if (!src->conninfo.connected || src->state != GST_RTSP_STATE_PLAYING)
+ goto out;
+
+ stream = find_stream (src, &id, (gpointer) find_stream_by_id);
+ if (stream == NULL) {
+ GST_ERROR_OBJECT (src, "no stream with id %u", id);
+ goto out;
+ }
+
+ if (src->interleaved) {
+ GstBuffer *buffer;
+ GstMapInfo map;
+ guint8 *data;
+ guint size;
+ GstRTSPResult ret;
+ GstRTSPMessage message = { 0 };
+ GstRTSPConnInfo *conninfo;
+
+ buffer = gst_sample_get_buffer (sample);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ size = map.size;
+ data = map.data;
+
+ gst_rtsp_message_init_data (&message, stream->channel[0]);
+
+ /* lend the body data to the message */
+ gst_rtsp_message_take_body (&message, data, size);
+
+ if (stream->conninfo.connection)
+ conninfo = &stream->conninfo;
+ else
+ conninfo = &src->conninfo;
+
+ GST_DEBUG_OBJECT (src, "sending %u bytes backchannel RTP", size);
+ ret = gst_rtspsrc_connection_send (src, conninfo, &message, NULL);
+ GST_DEBUG_OBJECT (src, "sent backchannel RTP, %d", ret);
+
+ /* and steal it away again because we will free it when unreffing the
+ * buffer */
+ gst_rtsp_message_steal_body (&message, &data, &size);
+ gst_rtsp_message_unset (&message);
+
+ gst_buffer_unmap (buffer, &map);
+
+ res = GST_FLOW_OK;
+ } else {
+ g_signal_emit_by_name (stream->rtpsrc, "push-sample", sample, &res);
+ GST_DEBUG_OBJECT (src, "sent backchannel RTP sample %p: %s", sample,
+ gst_flow_get_name (res));
+ }
+
+ out:
+ gst_sample_unref (sample);
+
+ return res;
+ }
+
static GstPadProbeReturn
pad_blocked (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
}
}
+ static GstPadProbeReturn
+ udpsrc_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
+ {
+ guint32 *segment_seqnum = user_data;
+
+ switch (GST_EVENT_TYPE (info->data)) {
+ case GST_EVENT_SEGMENT:
+ if (!gst_event_is_writable (info->data))
+ info->data = gst_event_make_writable (info->data);
+
+ *segment_seqnum = gst_event_get_seqnum (info->data);
+ default:
+ break;
+ }
+
+ return GST_PAD_PROBE_OK;
+ }
+
static gboolean
copy_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
{
return TRUE;
}
+ static gboolean
+ add_backchannel_fakesink (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstPad * srcpad)
+ {
+ GstPad *sinkpad;
+ GstElement *fakesink;
+
+ fakesink = gst_element_factory_make ("fakesink", NULL);
+ if (fakesink == NULL) {
+ GST_ERROR_OBJECT (src, "no fakesink");
+ return FALSE;
+ }
+
+ sinkpad = gst_element_get_static_pad (fakesink, "sink");
+
+ GST_DEBUG_OBJECT (src, "backchannel stream %p, hooking fakesink", stream);
+
+ gst_bin_add (GST_BIN_CAST (src), fakesink);
+ if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
+ GST_WARNING_OBJECT (src, "could not link to fakesink");
+ return FALSE;
+ }
+
+ gst_object_unref (sinkpad);
+
+ gst_element_sync_state_with_parent (fakesink);
+ return TRUE;
+ }
+
/* this callback is called when the session manager generated a new src pad with
* payloaded RTP packets. We simply ghost the pad here. */
static void
GList *ostreams;
GstRTSPStream *stream;
gboolean all_added;
+ GstPad *internal_src;
GST_DEBUG_OBJECT (src, "got new manager pad %" GST_PTR_FORMAT, pad);
gst_object_unref (template);
g_free (name);
+ /* We intercept and modify the stream start event */
+ internal_src =
+ GST_PAD (gst_proxy_pad_get_internal (GST_PROXY_PAD (stream->srcpad)));
+ gst_pad_set_element_private (internal_src, stream);
+ gst_pad_set_event_function (internal_src, gst_rtspsrc_handle_src_sink_event);
+ gst_object_unref (internal_src);
+
gst_pad_set_event_function (stream->srcpad, gst_rtspsrc_handle_src_event);
gst_pad_set_query_function (stream->srcpad, gst_rtspsrc_handle_src_query);
gst_pad_set_active (stream->srcpad, TRUE);
gst_pad_sticky_events_foreach (pad, copy_sticky_events, stream->srcpad);
- gst_element_add_pad (GST_ELEMENT_CAST (src), stream->srcpad);
+
+ /* don't add the srcpad if this is a sendonly stream */
+ if (stream->is_backchannel)
+ add_backchannel_fakesink (src, stream, stream->srcpad);
+ else
+ gst_element_add_pad (GST_ELEMENT_CAST (src), stream->srcpad);
if (all_added) {
GST_DEBUG_OBJECT (src, "We added all streams");
}
static void
- on_timeout (GObject * session, GObject * source, GstRTSPStream * stream)
+ on_timeout_common (GObject * session, GObject * source, GstRTSPStream * stream)
{
GstRTSPSrc *src = stream->parent;
guint ssrc;
}
static void
+ on_timeout (GObject * session, GObject * source, GstRTSPStream * stream)
+ {
+ GstRTSPSrc *src = stream->parent;
+
+ /* timeout, post element message */
+ gst_element_post_message (GST_ELEMENT_CAST (src),
+ gst_message_new_element (GST_OBJECT_CAST (src),
+ gst_structure_new ("GstRTSPSrcTimeout",
+ "cause", G_TYPE_ENUM, GST_RTSP_SRC_TIMEOUT_CAUSE_RTCP,
+ "stream-number", G_TYPE_INT, stream->id, "ssrc", G_TYPE_UINT,
+ stream->ssrc, NULL)));
+
+ on_timeout_common (session, source, stream);
+ }
+
+ static void
on_npt_stop (GstElement * rtpbin, guint session, guint ssrc, GstRTSPSrc * src)
{
GstRTSPStream *stream;
src->max_rtcp_rtp_time_diff, NULL);
}
+ if (g_object_class_find_property (klass, "max-ts-offset-adjustment")) {
+ g_object_set (src->manager, "max-ts-offset-adjustment",
+ src->max_ts_offset_adjustment, NULL);
+ }
+
+ if (g_object_class_find_property (klass, "max-ts-offset")) {
+ gint64 max_ts_offset;
+
+ /* setting max-ts-offset in the manager has side effects so only do it
+ * if the value differs */
+ g_object_get (src->manager, "max-ts-offset", &max_ts_offset, NULL);
+ if (max_ts_offset != src->max_ts_offset) {
+ g_object_set (src->manager, "max-ts-offset", src->max_ts_offset,
+ NULL);
+ }
+ }
+
/* buffer mode pauses are handled by adding offsets to buffer times,
* but some depayloaders may have a hard time syncing output times
* with such input times, e.g. container ones, most notably ASF */
g_signal_connect (rtpsession, "on-bye-ssrc", (GCallback) on_bye_ssrc,
stream);
- g_signal_connect (rtpsession, "on-bye-timeout", (GCallback) on_timeout,
- stream);
+ g_signal_connect (rtpsession, "on-bye-timeout",
+ (GCallback) on_timeout_common, stream);
g_signal_connect (rtpsession, "on-timeout", (GCallback) on_timeout,
stream);
g_signal_connect (rtpsession, "on-ssrc-active",
gst_object_ref_sink (stream->udpsrc[1]);
if (src->multi_iface != NULL)
- g_object_set (G_OBJECT (stream->udpsrc[0]), "multicast-iface",
+ g_object_set (G_OBJECT (stream->udpsrc[1]), "multicast-iface",
src->multi_iface, NULL);
gst_element_set_state (stream->udpsrc[1], GST_STATE_READY);
GST_PAD_PROBE_TYPE_BLOCK | GST_PAD_PROBE_TYPE_BUFFER |
GST_PAD_PROBE_TYPE_BUFFER_LIST, pad_blocked, src, NULL);
+ gst_pad_add_probe (stream->blockedpad,
+ GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, udpsrc_probe_cb,
+ &(stream->segment_seqnum[0]), NULL);
+
if (stream->channelpad[0]) {
GST_DEBUG_OBJECT (src, "connecting UDP source 0 to manager");
/* configure for UDP delivery, we need to connect the UDP pads to
GST_DEBUG_OBJECT (src, "connecting UDP source 1 to manager");
pad = gst_element_get_static_pad (stream->udpsrc[1], "src");
+ gst_pad_add_probe (pad,
+ GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, udpsrc_probe_cb,
+ &(stream->segment_seqnum[1]), NULL);
gst_pad_link_full (pad, stream->channelpad[1],
GST_PAD_LINK_CHECK_NOTHING);
gst_object_unref (pad);
goto no_destination;
/* try to construct the fakesrc to the RTP port of the server to open up any
- * NAT firewalls */
+ * NAT firewalls or, if backchannel, construct an appsrc */
if (do_rtp) {
GST_DEBUG_OBJECT (src, "configure RTP UDP sink for %s:%d", destination,
rtp_port);
g_object_unref (socket);
}
- /* the source for the dummy packets to open up NAT */
- stream->fakesrc = gst_element_factory_make ("fakesrc", NULL);
- if (stream->fakesrc == NULL)
- goto no_fakesrc_element;
+ if (stream->is_backchannel) {
+ /* appsrc is for the app to shovel data using push-backchannel-buffer */
+ stream->rtpsrc = gst_element_factory_make ("appsrc", NULL);
+ if (stream->rtpsrc == NULL)
+ goto no_appsrc_element;
- /* random data in 5 buffers, a size of 200 bytes should be fine */
- g_object_set (G_OBJECT (stream->fakesrc), "filltype", 3, "num-buffers", 5,
- "sizetype", 2, "sizemax", 200, "silent", TRUE, NULL);
+ /* interal use only, don't emit signals */
+ g_object_set (G_OBJECT (stream->rtpsrc), "emit-signals", TRUE,
+ "is-live", TRUE, NULL);
+ } else {
+ /* the source for the dummy packets to open up NAT */
+ stream->rtpsrc = gst_element_factory_make ("fakesrc", NULL);
+ if (stream->rtpsrc == NULL)
+ goto no_fakesrc_element;
+
+ /* random data in 5 buffers, a size of 200 bytes should be fine */
+ g_object_set (G_OBJECT (stream->rtpsrc), "filltype", 3, "num-buffers", 5,
+ "sizetype", 2, "sizemax", 200, "silent", TRUE, NULL);
+ }
/* keep everything locked */
gst_element_set_locked_state (stream->udpsink[0], TRUE);
- gst_element_set_locked_state (stream->fakesrc, TRUE);
+ gst_element_set_locked_state (stream->rtpsrc, TRUE);
gst_object_ref (stream->udpsink[0]);
gst_bin_add (GST_BIN_CAST (src), stream->udpsink[0]);
- gst_object_ref (stream->fakesrc);
- gst_bin_add (GST_BIN_CAST (src), stream->fakesrc);
+ gst_object_ref (stream->rtpsrc);
+ gst_bin_add (GST_BIN_CAST (src), stream->rtpsrc);
- gst_element_link_pads_full (stream->fakesrc, "src", stream->udpsink[0],
+ gst_element_link_pads_full (stream->rtpsrc, "src", stream->udpsink[0],
"sink", GST_PAD_LINK_CHECK_NOTHING);
}
if (do_rtcp) {
GST_ERROR_OBJECT (src, "no UDP sink element found");
return FALSE;
}
+ no_appsrc_element:
+ {
+ GST_ERROR_OBJECT (src, "no appsrc element found");
+ return FALSE;
+ }
no_fakesrc_element:
{
GST_ERROR_OBJECT (src, "no fakesrc element found");
case GST_RTSP_LOWER_TRANS_UDP:
if (!gst_rtspsrc_stream_configure_udp (src, stream, transport, &outpad))
goto transport_failed;
- /* configure udpsinks back to the server for RTCP messages and for the
- * dummy RTP messages to open NAT. */
+ /* configure udpsinks back to the server for RTCP messages, for the
+ * dummy RTP messages to open NAT, and for the backchannel */
if (!gst_rtspsrc_stream_configure_udp_sinks (src, stream, transport))
goto transport_failed;
break;
goto unknown_transport;
}
- if (outpad) {
- GST_DEBUG_OBJECT (src, "creating ghostpad");
+ /* using backchannel and no manager, hence no srcpad for this stream */
+ if (outpad && stream->is_backchannel) {
+ add_backchannel_fakesink (src, stream, outpad);
+ gst_object_unref (outpad);
+ } else if (outpad) {
+ GST_DEBUG_OBJECT (src, "creating ghostpad for stream %p", stream);
gst_pad_use_fixed_caps (outpad);
/* ERRORS */
transport_failed:
{
- GST_DEBUG_OBJECT (src, "failed to configure transport");
+ GST_WARNING_OBJECT (src, "failed to configure transport");
return FALSE;
}
unknown_transport:
{
- GST_DEBUG_OBJECT (src, "unknown transport");
+ GST_WARNING_OBJECT (src, "unknown transport");
return FALSE;
}
no_manager:
{
- GST_DEBUG_OBJECT (src, "cannot get a session manager");
+ GST_WARNING_OBJECT (src, "cannot get a session manager");
return FALSE;
}
}
for (walk = src->streams; walk; walk = g_list_next (walk)) {
GstRTSPStream *stream = (GstRTSPStream *) walk->data;
- if (stream->fakesrc && stream->udpsink[0]) {
+ if (!stream->rtpsrc || !stream->udpsink[0])
+ continue;
+
+ if (stream->is_backchannel)
+ GST_DEBUG_OBJECT (src, "starting backchannel stream %p", stream);
+ else
GST_DEBUG_OBJECT (src, "sending dummy packet to stream %p", stream);
- gst_element_set_state (stream->udpsink[0], GST_STATE_NULL);
- gst_element_set_state (stream->fakesrc, GST_STATE_NULL);
- gst_element_set_state (stream->udpsink[0], GST_STATE_PLAYING);
- gst_element_set_state (stream->fakesrc, GST_STATE_PLAYING);
- }
+
+ gst_element_set_state (stream->udpsink[0], GST_STATE_NULL);
+ gst_element_set_state (stream->rtpsrc, GST_STATE_NULL);
+ gst_element_set_state (stream->udpsink[0], GST_STATE_PLAYING);
+ gst_element_set_state (stream->rtpsrc, GST_STATE_PLAYING);
}
return TRUE;
}
/* add the pad */
if (!stream->added) {
GST_DEBUG_OBJECT (src, "adding stream pad %p", stream);
- gst_element_add_pad (GST_ELEMENT_CAST (src), stream->srcpad);
+ if (stream->is_backchannel)
+ add_backchannel_fakesink (src, stream, stream->srcpad);
+ else
+ gst_element_add_pad (GST_ELEMENT_CAST (src), stream->srcpad);
stream->added = TRUE;
}
}
goto done;
if (stream->udpsrc[0]) {
- gst_event_ref (event);
- res = gst_element_send_event (stream->udpsrc[0], event);
+ GstEvent *sent_event;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ sent_event = gst_event_new_eos ();
+ gst_event_set_seqnum (sent_event, stream->segment_seqnum[0]);
+ } else {
+ sent_event = gst_event_ref (event);
+ }
+
+ res = gst_element_send_event (stream->udpsrc[0], sent_event);
} else if (stream->channelpad[0]) {
gst_event_ref (event);
if (GST_PAD_IS_SRC (stream->channelpad[0]))
}
if (stream->udpsrc[1]) {
- gst_event_ref (event);
- res &= gst_element_send_event (stream->udpsrc[1], event);
+ GstEvent *sent_event;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ sent_event = gst_event_new_eos ();
+ if (stream->segment_seqnum[1] != GST_SEQNUM_INVALID) {
+ gst_event_set_seqnum (sent_event, stream->segment_seqnum[1]);
+ }
+ } else {
+ sent_event = gst_event_ref (event);
+ }
+
+ res &= gst_element_send_event (stream->udpsrc[1], sent_event);
} else if (stream->channelpad[1]) {
gst_event_ref (event);
if (GST_PAD_IS_SRC (stream->channelpad[1]))
return res;
}
+ static gboolean
+ accept_certificate_cb (GTlsConnection * conn, GTlsCertificate * peer_cert,
+ GTlsCertificateFlags errors, gpointer user_data)
+ {
+ GstRTSPSrc *src = user_data;
+ gboolean accept = FALSE;
+
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_ACCEPT_CERTIFICATE], 0, conn,
+ peer_cert, errors, &accept);
+
+ return accept;
+ }
+
static GstRTSPResult
gst_rtsp_conninfo_connect (GstRTSPSrc * src, GstRTSPConnInfo * info,
gboolean async)
if (src->tls_interaction)
gst_rtsp_connection_set_tls_interaction (info->connection,
src->tls_interaction);
+ gst_rtsp_connection_set_accept_certificate_func (info->connection,
+ accept_certificate_cb, src, NULL);
}
if (info->url->transports & GST_RTSP_LOWER_TRANS_HTTP)
if (res < 0)
goto send_error;
- DEBUG_RTSP (src, &request);
+ request.type_data.request.version = src->version;
res = gst_rtspsrc_connection_send (src, &src->conninfo, &request, NULL);
if (res < 0)
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_SERVER,
+ "Could not receive message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not receive message. (%s)", str));
+#endif
g_free (str);
gst_rtsp_message_unset (&message);
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_SERVICE_UNAVAILABLE,
+ "Could not handle server message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not handle server message. (%s)", str));
+#endif
g_free (str);
gst_rtsp_message_unset (&message);
return GST_FLOW_ERROR;
src->conninfo.connected = FALSE;
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ "Could not connect to server.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ_WRITE, (NULL),
("Could not connect to server. (%s)", str));
+#endif
g_free (str);
ret = GST_FLOW_ERROR;
} else {
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_SERVER_DISCONNECTED,
+ "Could not receive message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not receive message. (%s)", str));
+#endif
g_free (str);
return GST_FLOW_ERROR;
}
gst_rtsp_message_unset (&message);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src,
+ GST_RTSPSRC_ERROR_SERVICE_UNAVAILABLE,
+ "Could not handle server message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not handle server message. (%s)", str));
+#endif
g_free (str);
ret = GST_FLOW_ERROR;
} else {
goto open_failed;
/* start playback */
- if (gst_rtspsrc_play (src, &src->segment, async) < 0)
+ if (gst_rtspsrc_play (src, &src->segment, async, NULL) < 0)
goto play_failed;
done:
{
src->cur_protocols = 0;
/* no transport possible, post an error and stop */
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_TRANSPORT,
+ "Could not receive any UDP packets for seconds, maybe your firewall is blocking it. No other protocols to try.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not receive any UDP packets for %.4f seconds, maybe your "
"firewall is blocking it. No other protocols to try.",
gst_guint64_to_gdouble (src->udp_timeout) / 1000000.0));
+#endif
return GST_RTSP_ERROR;
}
open_failed:
case CMD_PAUSE:
GST_ELEMENT_PROGRESS (src, START, "request", ("Sending PAUSE request"));
break;
+ case CMD_GET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, START, "request",
+ ("Sending GET_PARAMETER request"));
+ break;
+ case CMD_SET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, START, "request",
+ ("Sending SET_PARAMETER request"));
+ break;
case CMD_CLOSE:
GST_ELEMENT_PROGRESS (src, START, "close", ("Closing Stream"));
break;
static void
gst_rtspsrc_loop_complete_cmd (GstRTSPSrc * src, gint cmd)
{
- #endif
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GstMessage *s;
+ GST_WARNING_OBJECT (src, "Got cmd %s", cmd_to_string (cmd));
++#endif
+
switch (cmd) {
case CMD_OPEN:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_DEBUG_OBJECT (src,
+ "rtsp_duration %" GST_TIME_FORMAT
+ ", rtsp_audio_codec %s , rtsp_video_codec %s , rtsp_video_frame_size %s",
+ GST_TIME_ARGS (src->segment.duration), src->audio_codec,
+ src->video_codec, src->video_frame_size);
+
+ /* post message */
+ s = gst_message_new_element (GST_OBJECT_CAST (src),
+ gst_structure_new ("rtspsrc_properties",
+ "rtsp_duration", G_TYPE_UINT64, src->segment.duration,
+ "rtsp_audio_codec", G_TYPE_STRING, src->audio_codec,
+ "rtsp_video_codec", G_TYPE_STRING, src->video_codec,
+ "rtsp_video_frame_size", G_TYPE_STRING, src->video_frame_size,
+ NULL));
+
+ gst_element_post_message (GST_ELEMENT_CAST (src), s);
+#endif
GST_ELEMENT_PROGRESS (src, COMPLETE, "open", ("Opened Stream"));
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* rtspsrc PAUSE state should be here for parsing sdp before PAUSE state changed. */
+ g_mutex_lock (&(src)->pause_lock);
+ g_cond_signal (&(src)->open_end);
+ g_mutex_unlock (&(src)->pause_lock);
+#endif
break;
case CMD_PLAY:
GST_ELEMENT_PROGRESS (src, COMPLETE, "request", ("Sent PLAY request"));
case CMD_PAUSE:
GST_ELEMENT_PROGRESS (src, COMPLETE, "request", ("Sent PAUSE request"));
break;
+ case CMD_GET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "request",
+ ("Sent GET_PARAMETER request"));
+ break;
+ case CMD_SET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "request",
+ ("Sent SET_PARAMETER request"));
+ break;
case CMD_CLOSE:
GST_ELEMENT_PROGRESS (src, COMPLETE, "close", ("Closed Stream"));
break;
case CMD_PAUSE:
GST_ELEMENT_PROGRESS (src, CANCELED, "request", ("PAUSE canceled"));
break;
+ case CMD_GET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "request",
+ ("GET_PARAMETER canceled"));
+ break;
+ case CMD_SET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "request",
+ ("SET_PARAMETER canceled"));
+ break;
case CMD_CLOSE:
GST_ELEMENT_PROGRESS (src, CANCELED, "close", ("Close canceled"));
break;
switch (cmd) {
case CMD_OPEN:
GST_ELEMENT_PROGRESS (src, ERROR, "open", ("Open failed"));
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* Ending conditional wait for pause when open fails.*/
+ g_mutex_lock (&(src)->pause_lock);
+ g_cond_signal (&(src)->open_end);
+ g_mutex_unlock (&(src)->pause_lock);
+ GST_WARNING_OBJECT (src,
+ "ending conditional wait for pause as open is failed.");
+#endif
break;
case CMD_PLAY:
GST_ELEMENT_PROGRESS (src, ERROR, "request", ("PLAY failed"));
case CMD_PAUSE:
GST_ELEMENT_PROGRESS (src, ERROR, "request", ("PAUSE failed"));
break;
+ case CMD_GET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, ERROR, "request", ("GET_PARAMETER failed"));
+ break;
+ case CMD_SET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, ERROR, "request", ("SET_PARAMETER failed"));
+ break;
case CMD_CLOSE:
GST_ELEMENT_PROGRESS (src, ERROR, "close", ("Close failed"));
break;
GST_OBJECT_LOCK (src);
old = src->pending_cmd;
+
if (old == CMD_RECONNECT) {
GST_DEBUG_OBJECT (src, "ignore, we were reconnecting");
cmd = CMD_RECONNECT;
* still the pending command. */
GST_DEBUG_OBJECT (src, "ignore, we were closing");
cmd = CMD_CLOSE;
+ } else if (old == CMD_SET_PARAMETER) {
+ GST_DEBUG_OBJECT (src, "ignore, we have a pending %s", cmd_to_string (old));
+ cmd = CMD_SET_PARAMETER;
+ } else if (old == CMD_GET_PARAMETER) {
+ GST_DEBUG_OBJECT (src, "ignore, we have a pending %s", cmd_to_string (old));
+ cmd = CMD_GET_PARAMETER;
} else if (old != CMD_WAIT) {
src->pending_cmd = CMD_WAIT;
GST_OBJECT_UNLOCK (src);
}
static gboolean
+ gst_rtspsrc_loop_send_cmd_and_wait (GstRTSPSrc * src, gint cmd, gint mask,
+ GstClockTime timeout)
+ {
+ gboolean flushed = gst_rtspsrc_loop_send_cmd (src, cmd, mask);
+
+ if (timeout > 0) {
+ gint64 end_time = g_get_monotonic_time () + (timeout / 1000);
+ GST_OBJECT_LOCK (src);
+ while (src->pending_cmd == cmd || src->busy_cmd == cmd) {
+ if (!g_cond_wait_until (&src->cmd_cond, GST_OBJECT_GET_LOCK (src),
+ end_time)) {
+ GST_WARNING_OBJECT (src,
+ "Timed out waiting for TEARDOWN to be processed.");
+ break; /* timeout passed */
+ }
+ }
+ GST_OBJECT_UNLOCK (src);
+ }
+ return flushed;
+ }
+
+ static gboolean
gst_rtspsrc_loop (GstRTSPSrc * src)
{
GstFlowReturn ret;
{
/* Output an error indicating that we couldn't connect because there were
* no supported authentication protocols */
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_NOT_AUTHORIZED,
+ "No supported authentication protocol was found");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
("No supported authentication protocol was found"));
+#endif
return FALSE;
}
no_user_pass:
}
static GstRTSPResult
- gst_rtspsrc_try_send (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
- GstRTSPMessage * request, GstRTSPMessage * response,
- GstRTSPStatusCode * code)
+ gst_rtsp_src_receive_response (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * response, GstRTSPStatusCode * code)
{
- GstRTSPResult res;
GstRTSPStatusCode thecode;
gchar *content_base = NULL;
- gint try = 0;
-
- again:
- if (!src->short_header)
- gst_rtsp_ext_list_before_send (src->extensions, request);
-
- GST_DEBUG_OBJECT (src, "sending message");
+ GstRTSPResult res = gst_rtspsrc_connection_receive (src, conninfo,
+ response, src->ptcp_timeout);
- DEBUG_RTSP (src, request);
-
- res = gst_rtspsrc_connection_send (src, conninfo, request, src->ptcp_timeout);
- if (res < 0)
- goto send_error;
-
- gst_rtsp_connection_reset_timeout (conninfo->connection);
-
- next:
- res =
- gst_rtspsrc_connection_receive (src, conninfo, response,
- src->ptcp_timeout);
if (res < 0)
goto receive_error;
goto server_eof;
else if (res < 0)
goto handle_request_failed;
- goto next;
+
+ /* Not a response, receive next message */
+ return gst_rtsp_src_receive_response (src, conninfo, response, code);
case GST_RTSP_MESSAGE_RESPONSE:
/* ok, a response is good */
GST_DEBUG_OBJECT (src, "received response message");
/* get next response */
GST_DEBUG_OBJECT (src, "handle data response message");
gst_rtspsrc_handle_data (src, response);
- goto next;
+
+ /* Not a response, receive next message */
+ return gst_rtsp_src_receive_response (src, conninfo, response, code);
default:
GST_WARNING_OBJECT (src, "ignoring unknown message type %d",
response->type);
- goto next;
+
+ /* Not a response, receive next message */
+ return gst_rtsp_src_receive_response (src, conninfo, response, code);
}
thecode = response->type_data.response.code;
g_free (src->content_base);
src->content_base = g_strdup (content_base);
}
- gst_rtsp_ext_list_after_send (src->extensions, request, response);
return GST_RTSP_OK;
/* ERRORS */
- send_error:
- {
- gchar *str = gst_rtsp_strresult (res);
-
- if (res != GST_RTSP_EINTR) {
- #ifdef TIZEN_FEATURE_RTSP_MODIFICATION
- gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
- "Could not send message.");
- #else
- GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
- ("Could not send message. (%s)", str));
- #endif
- } else {
- GST_WARNING_OBJECT (src, "send interrupted");
- }
- g_free (str);
- return res;
- }
receive_error:
{
switch (res) {
case GST_RTSP_EEOF:
- GST_WARNING_OBJECT (src, "server closed connection");
- if ((try == 0) && !src->interleaved && src->udp_reconnect) {
- try++;
- /* if reconnect succeeds, try again */
- if ((res =
- gst_rtsp_conninfo_reconnect (src, &src->conninfo,
- FALSE)) == 0)
- goto again;
- }
- /* only try once after reconnect, then fallthrough and error out */
+ return GST_RTSP_EEOF;
default:
{
gchar *str = gst_rtsp_strresult (res);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src,
+ GST_RTSPSRC_ERROR_SERVER_DISCONNECTED,
+ "Could not receive message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not receive message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "receive interrupted");
}
}
}
- /**
- * gst_rtspsrc_send:
- * @src: the rtsp source
- * @conn: the connection to send on
- * @request: must point to a valid request
- * @response: must point to an empty #GstRTSPMessage
- * @code: an optional code result
- *
- * send @request and retrieve the response in @response. optionally @code can be
- * non-NULL in which case it will contain the status code of the response.
- *
- * If This function returns #GST_RTSP_OK, @response will contain a valid response
- * message that should be cleaned with gst_rtsp_message_unset() after usage.
- *
- * If @code is NULL, this function will return #GST_RTSP_ERROR (with an invalid
- * @response message) if the response code was not 200 (OK).
- *
- * If the attempt results in an authentication failure, then this will attempt
- * to retrieve authentication credentials via gst_rtspsrc_setup_auth and retry
- * the request.
- *
- * Returns: #GST_RTSP_OK if the processing was successful.
- */
+
static GstRTSPResult
- gst_rtspsrc_send (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ gst_rtspsrc_try_send (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
GstRTSPMessage * request, GstRTSPMessage * response,
GstRTSPStatusCode * code)
{
- GstRTSPStatusCode int_code = GST_RTSP_STS_OK;
- GstRTSPResult res = GST_RTSP_ERROR;
- gint count;
- gboolean retry;
- GstRTSPMethod method = GST_RTSP_INVALID;
+ GstRTSPResult res;
+ gint try = 0;
+ gboolean allow_send = TRUE;
- count = 0;
- do {
- retry = FALSE;
+ again:
+ if (!src->short_header)
+ gst_rtsp_ext_list_before_send (src->extensions, request);
- /* make sure we don't loop forever */
- if (count++ > 8)
- break;
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_BEFORE_SEND], 0,
+ request, &allow_send);
+ if (!allow_send) {
+ GST_DEBUG_OBJECT (src, "skipping message, disabled by signal");
+ return GST_RTSP_OK;
+ }
- /* save method so we can disable it when the server complains */
+ GST_DEBUG_OBJECT (src, "sending message");
+
+ DEBUG_RTSP (src, request);
+
+ res = gst_rtspsrc_connection_send (src, conninfo, request, src->ptcp_timeout);
+ if (res < 0)
+ goto send_error;
+
+ gst_rtsp_connection_reset_timeout (conninfo->connection);
+ if (!response)
+ return res;
+
+ res = gst_rtsp_src_receive_response (src, conninfo, response, code);
+ if (res == GST_RTSP_EEOF) {
+ GST_WARNING_OBJECT (src, "server closed connection");
+ /* only try once after reconnect, then fallthrough and error out */
+ if ((try == 0) && !src->interleaved && src->udp_reconnect) {
+ try++;
+ /* if reconnect succeeds, try again */
+ if ((res = gst_rtsp_conninfo_reconnect (src, &src->conninfo, FALSE)) == 0)
+ goto again;
+ }
+ }
+ gst_rtsp_ext_list_after_send (src->extensions, request, response);
+
+ return res;
+
+ send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ if (res != GST_RTSP_EINTR) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
++ "Could not send message.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not send message. (%s)", str));
++#endif
+ } else {
+ GST_WARNING_OBJECT (src, "send interrupted");
+ }
+ g_free (str);
+ return res;
+ }
+ }
+
+ /**
+ * gst_rtspsrc_send:
+ * @src: the rtsp source
+ * @conninfo: the connection information to send on
+ * @request: must point to a valid request
+ * @response: must point to an empty #GstRTSPMessage
+ * @code: an optional code result
+ * @versions: List of versions to try, setting it back onto the @request message
+ * if not set, `src->version` will be used as RTSP version.
+ *
+ * send @request and retrieve the response in @response. optionally @code can be
+ * non-NULL in which case it will contain the status code of the response.
+ *
+ * If This function returns #GST_RTSP_OK, @response will contain a valid response
+ * message that should be cleaned with gst_rtsp_message_unset() after usage.
+ *
+ * If @code is NULL, this function will return #GST_RTSP_ERROR (with an invalid
+ * @response message) if the response code was not 200 (OK).
+ *
+ * If the attempt results in an authentication failure, then this will attempt
+ * to retrieve authentication credentials via gst_rtspsrc_setup_auth and retry
+ * the request.
+ *
+ * Returns: #GST_RTSP_OK if the processing was successful.
+ */
+ static GstRTSPResult
+ gst_rtspsrc_send (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * request, GstRTSPMessage * response,
+ GstRTSPStatusCode * code, GstRTSPVersion * versions)
+ {
+ GstRTSPStatusCode int_code = GST_RTSP_STS_OK;
+ GstRTSPResult res = GST_RTSP_ERROR;
+ gint count;
+ gboolean retry;
+ GstRTSPMethod method = GST_RTSP_INVALID;
+ gint version_retry = 0;
+
+ count = 0;
+ do {
+ retry = FALSE;
+
+ /* make sure we don't loop forever */
+ if (count++ > 8)
+ break;
+
+ /* save method so we can disable it when the server complains */
method = request->type_data.request.method;
+ if (!versions)
+ request->type_data.request.version = src->version;
+
if ((res =
gst_rtspsrc_try_send (src, conninfo, request, response,
&int_code)) < 0)
retry = TRUE;
}
break;
+ case GST_RTSP_STS_RTSP_VERSION_NOT_SUPPORTED:
+ GST_INFO_OBJECT (src, "Version %s not supported by the server",
+ versions ? gst_rtsp_version_as_text (versions[version_retry]) :
+ "unknown");
+ if (versions && versions[version_retry] != GST_RTSP_VERSION_INVALID) {
+ GST_INFO_OBJECT (src, "Unsupported version %s => trying %s",
+ gst_rtsp_version_as_text (request->type_data.request.version),
+ gst_rtsp_version_as_text (versions[version_retry]));
+ request->type_data.request.version = versions[version_retry];
+ retry = TRUE;
+ version_retry++;
+ break;
+ }
+ /* falltrough */
default:
break;
}
switch (response->type_data.response.code) {
case GST_RTSP_STS_NOT_FOUND:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "STS NOT FOUND");
+#else
RTSP_SRC_RESPONSE_ERROR (src, response, RESOURCE, NOT_FOUND,
"Not found");
+#endif
break;
case GST_RTSP_STS_UNAUTHORIZED:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_NOT_AUTHORIZED,
+ "STS NOT AUTHORIZED");
+#else
RTSP_SRC_RESPONSE_ERROR (src, response, RESOURCE, NOT_AUTHORIZED,
"Unauthorized");
+#endif
break;
case GST_RTSP_STS_MOVED_PERMANENTLY:
case GST_RTSP_STS_MOVE_TEMPORARILY:
res = GST_RTSP_OK;
break;
default:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_UNEXPECTED_MSG,
+ "Got error response from Server");
+#else
RTSP_SRC_RESPONSE_ERROR (src, response, RESOURCE, READ,
"Unhandled error");
+#endif
break;
}
/* if we return ERROR we should unset the response ourselves */
gst_rtspsrc_send_cb (GstRTSPExtension * ext, GstRTSPMessage * request,
GstRTSPMessage * response, GstRTSPSrc * src)
{
- return gst_rtspsrc_send (src, &src->conninfo, request, response, NULL);
+ return gst_rtspsrc_send (src, &src->conninfo, request, response, NULL, NULL);
}
while (TRUE) {
respoptions = NULL;
gst_rtsp_message_get_header (response, field, &respoptions, indx);
- if (indx == 0 && !respoptions) {
- /* if no Allow header was found then try the Public header... */
- field = GST_RTSP_HDR_PUBLIC;
- gst_rtsp_message_get_header (response, field, &respoptions, indx);
- }
+ if (!respoptions)
+ break;
+
+ src->methods |= gst_rtsp_options_from_text (respoptions);
+
+ indx++;
+ }
+
+ indx = 0;
+ field = GST_RTSP_HDR_PUBLIC;
+ while (TRUE) {
+ respoptions = NULL;
+ gst_rtsp_message_get_header (response, field, &respoptions, indx);
if (!respoptions)
break;
* this */
src->methods |= GST_RTSP_PLAY;
/* also assume it will support Range */
- src->seekable = TRUE;
+ src->seekable = G_MAXFLOAT;
/* we need describe and setup */
if (!(src->methods & GST_RTSP_DESCRIBE))
/* ERRORS */
no_describe:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,
+ "Server does not support DESCRIBE.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
("Server does not support DESCRIBE."));
+#endif
return FALSE;
}
no_setup:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,
+ "Server does not support SETUP.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
("Server does not support SETUP."));
+#endif
return FALSE;
}
}
g_string_append_printf (str, "%d", src->free_channel);
else if (next[3] == '2')
g_string_append_printf (str, "%d", src->free_channel + 1);
+
}
p = next + 4;
}
+ if (src->version >= GST_RTSP_VERSION_2_0)
+ src->free_channel += 2;
+
/* append final part */
g_string_append (str, p);
return result;
}
+ static GstRTSPResult
+ gst_rtsp_src_setup_stream_from_response (GstRTSPSrc * src,
+ GstRTSPStream * stream, GstRTSPMessage * response,
+ GstRTSPLowerTrans * protocols, gint retry, gint * rtpport, gint * rtcpport)
+ {
+ gchar *resptrans = NULL;
+ GstRTSPTransport transport = { 0 };
+
+ gst_rtsp_message_get_header (response, GST_RTSP_HDR_TRANSPORT, &resptrans, 0);
+ if (!resptrans) {
+ gst_rtspsrc_stream_free_udp (stream);
+ goto no_transport;
+ }
+
+ /* parse transport, go to next stream on parse error */
+ if (gst_rtsp_transport_parse (resptrans, &transport) != GST_RTSP_OK) {
+ GST_WARNING_OBJECT (src, "failed to parse transport %s", resptrans);
+ return GST_RTSP_ELAST;
+ }
+
+ /* update allowed transports for other streams. once the transport of
+ * one stream has been determined, we make sure that all other streams
+ * are configured in the same way */
+ switch (transport.lower_transport) {
+ case GST_RTSP_LOWER_TRANS_TCP:
+ GST_DEBUG_OBJECT (src, "stream %p as TCP interleaved", stream);
+ if (protocols)
+ *protocols = GST_RTSP_LOWER_TRANS_TCP;
+ src->interleaved = TRUE;
+ if (src->version < GST_RTSP_VERSION_2_0) {
+ /* update free channels */
+ src->free_channel = MAX (transport.interleaved.min, src->free_channel);
+ src->free_channel = MAX (transport.interleaved.max, src->free_channel);
+ src->free_channel++;
+ }
+ break;
+ case GST_RTSP_LOWER_TRANS_UDP_MCAST:
+ /* only allow multicast for other streams */
+ GST_DEBUG_OBJECT (src, "stream %p as UDP multicast", stream);
+ if (protocols)
+ *protocols = GST_RTSP_LOWER_TRANS_UDP_MCAST;
+ /* if the server selected our ports, increment our counters so that
+ * we select a new port later */
+ if (src->next_port_num == transport.port.min &&
+ src->next_port_num + 1 == transport.port.max) {
+ src->next_port_num += 2;
+ }
+ break;
+ case GST_RTSP_LOWER_TRANS_UDP:
+ /* only allow unicast for other streams */
+ GST_DEBUG_OBJECT (src, "stream %p as UDP unicast", stream);
+ if (protocols)
+ *protocols = GST_RTSP_LOWER_TRANS_UDP;
+ break;
+ default:
+ GST_DEBUG_OBJECT (src, "stream %p unknown transport %d", stream,
+ transport.lower_transport);
+ break;
+ }
+
+ if (!src->interleaved || !retry) {
+ /* now configure the stream with the selected transport */
+ if (!gst_rtspsrc_stream_configure_transport (stream, &transport)) {
+ GST_DEBUG_OBJECT (src,
+ "could not configure stream %p transport, skipping stream", stream);
+ goto done;
+ } else if (stream->udpsrc[0] && stream->udpsrc[1] && rtpport && rtcpport) {
+ /* retain the first allocated UDP port pair */
+ g_object_get (G_OBJECT (stream->udpsrc[0]), "port", rtpport, NULL);
+ g_object_get (G_OBJECT (stream->udpsrc[1]), "port", rtcpport, NULL);
+ }
+ }
+ /* we need to activate at least one stream when we detect activity */
+ src->need_activate = TRUE;
+
+ /* stream is setup now */
+ stream->setup = TRUE;
+ stream->waiting_setup_response = FALSE;
+
+ if (src->version >= GST_RTSP_VERSION_2_0) {
+ gchar *prop, *media_properties;
+ gchar **props;
+ gint i;
+
+ if (gst_rtsp_message_get_header (response, GST_RTSP_HDR_MEDIA_PROPERTIES,
+ &media_properties, 0) != GST_RTSP_OK) {
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Error: No MEDIA_PROPERTY header in a SETUP request in RTSP 2.0"
+ " - this header is mandatory."));
+
+ gst_rtsp_message_unset (response);
+ return GST_RTSP_ERROR;
+ }
+
+ props = g_strsplit (media_properties, ",", -2);
+ for (i = 0; props[i]; i++) {
+ prop = props[i];
+
+ while (*prop == ' ')
+ prop++;
+
+ if (strstr (prop, "Random-Access")) {
+ gchar **random_seekable_val = g_strsplit (prop, "=", 2);
+
+ if (!random_seekable_val[1])
+ src->seekable = G_MAXFLOAT;
+ else
+ src->seekable = g_ascii_strtod (random_seekable_val[1], NULL);
+
+ g_strfreev (random_seekable_val);
+ } else if (!g_strcmp0 (prop, "No-Seeking")) {
+ src->seekable = -1.0;
+ } else if (!g_strcmp0 (prop, "Beginning-Only")) {
+ src->seekable = 0.0;
+ }
+ }
+
+ g_strfreev (props);
+ }
+
+ done:
+ /* clean up our transport struct */
+ gst_rtsp_transport_init (&transport);
+ /* clean up used RTSP messages */
+ gst_rtsp_message_unset (response);
+
+ return GST_RTSP_OK;
+
+ no_transport:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("Server did not select transport."));
+
+ gst_rtsp_message_unset (response);
+ return GST_RTSP_ERROR;
+ }
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_setup_streams_end (GstRTSPSrc * src, gboolean async)
+ {
+ GList *tmp;
+ GstRTSPConnInfo *conninfo;
+
+ g_assert (src->version >= GST_RTSP_VERSION_2_0);
+
+ conninfo = &src->conninfo;
+ for (tmp = src->streams; tmp; tmp = tmp->next) {
+ GstRTSPStream *stream = (GstRTSPStream *) tmp->data;
+ GstRTSPMessage response = { 0, };
+
+ if (!stream->waiting_setup_response)
+ continue;
+
+ if (!src->conninfo.connection)
+ conninfo = &((GstRTSPStream *) tmp->data)->conninfo;
+
+ gst_rtsp_src_receive_response (src, conninfo, &response, NULL);
+
+ gst_rtsp_src_setup_stream_from_response (src, stream,
+ &response, NULL, 0, NULL, NULL);
+ }
+
+ return GST_RTSP_OK;
+ }
+
/* Perform the SETUP request for all the streams.
*
* We ask the server for a specific transport, which initially includes all the
* Once the server replied with a transport, we configure the other streams
* with the same transport.
*
- * This function will also configure the stream for the selected transport,
- * which basically means creating the pipeline.
+ * In case setup request are not pipelined, this function will also configure the
+ * stream for the selected transport, * which basically means creating the pipeline.
+ * Otherwise, the first stream is setup right away from the reply and a
+ * CMD_FINALIZE_SETUP command is set for the stream pipelines to happen on the
+ * remaining streams from the RTSP thread.
*/
static GstRTSPResult
- gst_rtspsrc_setup_streams (GstRTSPSrc * src, gboolean async)
+ gst_rtspsrc_setup_streams_start (GstRTSPSrc * src, gboolean async)
{
GList *walk;
GstRTSPResult res = GST_RTSP_ERROR;
gint rtpport, rtcpport;
GstRTSPUrl *url;
gchar *hval;
+ gchar *pipelined_request_id = NULL;
if (src->conninfo.connection) {
url = gst_rtsp_connection_get_url (src->conninfo.connection);
caps = stream_get_caps_for_pt (stream, stream->default_pt);
if (caps == NULL) {
- GST_DEBUG_OBJECT (src, "skipping stream %p, no caps", stream);
+ GST_WARNING_OBJECT (src, "skipping stream %p, no caps", stream);
continue;
}
/* skip setup if we have no URL for it */
if (stream->conninfo.location == NULL) {
- GST_DEBUG_OBJECT (src, "skipping stream %p, no setup", stream);
+ GST_WARNING_OBJECT (src, "skipping stream %p, no setup", stream);
continue;
}
if (src->conninfo.connection == NULL) {
if (!gst_rtsp_conninfo_connect (src, &stream->conninfo, async)) {
- GST_DEBUG_OBJECT (src, "skipping stream %p, failed to connect", stream);
+ GST_WARNING_OBJECT (src, "skipping stream %p, failed to connect",
+ stream);
continue;
}
conninfo = &stream->conninfo;
}
GST_DEBUG_OBJECT (src, "transport is now %s", GST_STR_NULL (transports));
-
/* create SETUP request */
res =
gst_rtspsrc_init_request (src, &request, GST_RTSP_SETUP,
goto create_request_failed;
}
+ if (src->version >= GST_RTSP_VERSION_2_0) {
+ if (!pipelined_request_id)
+ pipelined_request_id = g_strdup_printf ("%d",
+ g_random_int_range (0, G_MAXINT32));
+
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_PIPELINED_REQUESTS,
+ pipelined_request_id);
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_ACCEPT_RANGES,
+ "npt, clock, smpte, clock");
+ }
+
/* select transport */
gst_rtsp_message_take_header (&request, GST_RTSP_HDR_TRANSPORT, transports);
+ if (stream->is_backchannel && src->backchannel == BACKCHANNEL_ONVIF)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+
/* set up keys */
if (stream->profile == GST_RTSP_PROFILE_SAVP ||
stream->profile == GST_RTSP_PROFILE_SAVPF) {
stream->id));
/* handle the code ourselves */
- res = gst_rtspsrc_send (src, conninfo, &request, &response, &code);
+ res =
+ gst_rtspsrc_send (src, conninfo, &request,
+ pipelined_request_id ? NULL : &response, &code, NULL);
if (res < 0)
goto send_error;
goto response_error;
}
- /* parse response transport */
- {
- gchar *resptrans = NULL;
- GstRTSPTransport transport = { 0 };
-
- gst_rtsp_message_get_header (&response, GST_RTSP_HDR_TRANSPORT,
- &resptrans, 0);
- if (!resptrans) {
- gst_rtspsrc_stream_free_udp (stream);
- goto no_transport;
- }
-
- /* parse transport, go to next stream on parse error */
- if (gst_rtsp_transport_parse (resptrans, &transport) != GST_RTSP_OK) {
- GST_WARNING_OBJECT (src, "failed to parse transport %s", resptrans);
- goto next;
- }
- /* update allowed transports for other streams. once the transport of
- * one stream has been determined, we make sure that all other streams
- * are configured in the same way */
- switch (transport.lower_transport) {
- case GST_RTSP_LOWER_TRANS_TCP:
- GST_DEBUG_OBJECT (src, "stream %p as TCP interleaved", stream);
- protocols = GST_RTSP_LOWER_TRANS_TCP;
- src->interleaved = TRUE;
- /* update free channels */
- src->free_channel =
- MAX (transport.interleaved.min, src->free_channel);
- src->free_channel =
- MAX (transport.interleaved.max, src->free_channel);
- src->free_channel++;
- break;
- case GST_RTSP_LOWER_TRANS_UDP_MCAST:
- /* only allow multicast for other streams */
- GST_DEBUG_OBJECT (src, "stream %p as UDP multicast", stream);
- protocols = GST_RTSP_LOWER_TRANS_UDP_MCAST;
- /* if the server selected our ports, increment our counters so that
- * we select a new port later */
- if (src->next_port_num == transport.port.min &&
- src->next_port_num + 1 == transport.port.max) {
- src->next_port_num += 2;
- }
- break;
- case GST_RTSP_LOWER_TRANS_UDP:
- /* only allow unicast for other streams */
- GST_DEBUG_OBJECT (src, "stream %p as UDP unicast", stream);
- protocols = GST_RTSP_LOWER_TRANS_UDP;
- break;
+ if (!pipelined_request_id) {
+ /* parse response transport */
+ res = gst_rtsp_src_setup_stream_from_response (src, stream,
+ &response, &protocols, retry, &rtpport, &rtcpport);
+ switch (res) {
+ case GST_RTSP_ERROR:
+ goto cleanup_error;
+ case GST_RTSP_ELAST:
+ goto retry;
default:
- GST_DEBUG_OBJECT (src, "stream %p unknown transport %d", stream,
- transport.lower_transport);
break;
}
-
- if (!src->interleaved || !retry) {
- /* now configure the stream with the selected transport */
- if (!gst_rtspsrc_stream_configure_transport (stream, &transport)) {
- GST_DEBUG_OBJECT (src,
- "could not configure stream %p transport, skipping stream",
- stream);
- goto next;
- } else if (stream->udpsrc[0] && stream->udpsrc[1]) {
- /* retain the first allocated UDP port pair */
- g_object_get (G_OBJECT (stream->udpsrc[0]), "port", &rtpport, NULL);
- g_object_get (G_OBJECT (stream->udpsrc[1]), "port", &rtcpport, NULL);
- }
- }
- /* we need to activate at least one streams when we detect activity */
+ } else {
+ stream->waiting_setup_response = TRUE;
+ /* we need to activate at least one stream when we detect activity */
src->need_activate = TRUE;
+ }
- /* stream is setup now */
- stream->setup = TRUE;
- {
- GList *skip = walk;
+ {
+ GList *skip = walk;
- while (TRUE) {
- GstRTSPStream *sskip;
+ while (TRUE) {
+ GstRTSPStream *sskip;
- skip = g_list_next (skip);
- if (skip == NULL)
- break;
+ skip = g_list_next (skip);
+ if (skip == NULL)
+ break;
- sskip = (GstRTSPStream *) skip->data;
+ sskip = (GstRTSPStream *) skip->data;
- /* skip all streams with the same control url */
- if (g_str_equal (stream->conninfo.location, sskip->conninfo.location)) {
- GST_DEBUG_OBJECT (src, "found stream %p with same control %s",
- sskip, sskip->conninfo.location);
- sskip->skipped = TRUE;
- }
+ /* skip all streams with the same control url */
+ if (g_str_equal (stream->conninfo.location, sskip->conninfo.location)) {
+ GST_DEBUG_OBJECT (src, "found stream %p with same control %s",
+ sskip, sskip->conninfo.location);
+ sskip->skipped = TRUE;
}
}
- next:
- /* clean up our transport struct */
- gst_rtsp_transport_init (&transport);
- /* clean up used RTSP messages */
- gst_rtsp_message_unset (&request);
- gst_rtsp_message_unset (&response);
}
+ gst_rtsp_message_unset (&request);
+ }
+
+ if (pipelined_request_id) {
+ gst_rtspsrc_setup_streams_end (src, TRUE);
}
/* store the transport protocol that was configured */
gst_rtsp_ext_list_stream_select (src->extensions, url);
+ if (pipelined_request_id)
+ g_free (pipelined_request_id);
+
/* if there is nothing to activate, error out */
if (!src->need_activate)
goto nothing_to_activate;
/* ERRORS */
no_protocols:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_INVALID_PROTOCOL,
+ "Could not connect to server, no protocols left");
+#else
/* no transport possible, post an error and stop */
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("Could not connect to server, no protocols left"));
+#endif
return GST_RTSP_ERROR;
}
no_streams:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONTENT_NOT_FOUND,
+ "SDP contains no streams");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("SDP contains no streams"));
+#endif
return GST_RTSP_ERROR;
}
create_request_failed:
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "Could not create request.");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto cleanup_error;
}
setup_transport_failed:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "Could not setup transport.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("Could not setup transport."));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
response_error:
{
+#ifndef TIZEN_FEATURE_RTSP_MODIFICATION
const gchar *str = gst_rtsp_status_as_text (code);
+#endif
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_UNEXPECTED_MSG,
+ "Error from Server .");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Error (%d): %s", code, GST_STR_NULL (str)));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
gchar *str = gst_rtsp_strresult (res);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ "Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "send interrupted");
}
g_free (str);
goto cleanup_error;
}
- no_transport:
- {
- #ifdef TIZEN_FEATURE_RTSP_MODIFICATION
- gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_TRANSPORT,
- "Server did not select transport.");
- #else
- GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
- ("Server did not select transport."));
- #endif
- res = GST_RTSP_ERROR;
- goto cleanup_error;
- }
nothing_to_activate:
{
/* none of the available error codes is really right .. */
if (unsupported_real) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,
+ "No supported stream was found. You might need to install a GStreamer RTSP extension plugin for Real media streams.");
+#else
GST_ELEMENT_ERROR (src, STREAM, CODEC_NOT_FOUND,
(_("No supported stream was found. You might need to install a "
"GStreamer RTSP extension plugin for Real media streams.")),
(NULL));
+#endif
} else {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,
+ "No supported stream was found. You might need to allow more transport protocols or may otherwise be missing the right GStreamer RTSP extension plugin.");
+#else
GST_ELEMENT_ERROR (src, STREAM, CODEC_NOT_FOUND,
(_("No supported stream was found. You might need to allow "
"more transport protocols or may otherwise be missing "
"the right GStreamer RTSP extension plugin.")), (NULL));
+#endif
}
return GST_RTSP_ERROR;
}
cleanup_error:
{
+ if (pipelined_request_id)
+ g_free (pipelined_request_id);
gst_rtsp_message_unset (&request);
gst_rtsp_message_unset (&response);
return res;
/* we need to start playback without clipping from the position reported by
* the server */
segment->start = seconds;
+#ifndef TIZEN_FEATURE_RTSP_MODIFICATION
+/*
+The range-min points to the start of the segment , not the current position.
+After getting the current position from MSL during normal pause/resume or during seek , we should not
+update the segment->position again with the rtp header npt timestamp.
+*/
segment->position = seconds;
+#endif
if (therange->max.type == GST_RTSP_TIME_NOW)
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ seconds = 0;
+#else
seconds = -1;
+#endif
else if (therange->max.type == GST_RTSP_TIME_END)
seconds = -1;
else
src->control = g_strdup (control);
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ src->is_audio_codec_supported = FALSE;
+ src->is_video_codec_supported = FALSE;
+#endif
+
/* create streams */
n_streams = gst_sdp_message_medias_len (sdp);
for (i = 0; i < n_streams; i++) {
}
src->state = GST_RTSP_STATE_INIT;
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* Check for the support for the Media codecs */
+ if ((!src->is_audio_codec_supported) && (!src->is_video_codec_supported)) {
+ GST_ERROR_OBJECT (src, "UnSupported Media Type !!!! \n");
+ goto unsupported_file_type;
+ } else {
+ GST_DEBUG_OBJECT (src, "Supported Media Type. \n");
+ }
+#endif
/* setup streams */
- if ((res = gst_rtspsrc_setup_streams (src, async)) < 0)
+ if ((res = gst_rtspsrc_setup_streams_start (src, async)) < 0)
goto setup_failed;
/* reset our state */
gst_rtspsrc_cleanup (src);
return res;
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+unsupported_file_type:
+ {
+ gst_rtspsrc_post_error_message (src,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,
+ "No supported stream was found");
+ res = GST_RTSP_ERROR;
+ gst_rtspsrc_cleanup (src);
+ return res;
+ }
+#endif
}
static GstRTSPResult
guint8 *data;
guint size;
gchar *respcont = NULL;
+ GstRTSPVersion versions[] =
+ { GST_RTSP_VERSION_2_0, GST_RTSP_VERSION_INVALID };
+
+ src->version = src->default_version;
+ if (src->default_version == GST_RTSP_VERSION_2_0) {
+ versions[0] = GST_RTSP_VERSION_1_0;
+ }
restart:
src->need_redirect = FALSE;
goto connect_failed;
/* create OPTIONS */
- GST_DEBUG_OBJECT (src, "create options...");
+ GST_DEBUG_OBJECT (src, "create options... (%s)", async ? "async" : "sync");
res =
gst_rtspsrc_init_request (src, &request, GST_RTSP_OPTIONS,
src->conninfo.url_str);
goto create_request_failed;
/* send OPTIONS */
+ request.type_data.request.version = src->version;
GST_DEBUG_OBJECT (src, "send options...");
if (async)
if ((res =
gst_rtspsrc_send (src, &src->conninfo, &request, &response,
- NULL)) < 0)
+ NULL, versions)) < 0) {
goto send_error;
+ }
+
+ src->version = request.type_data.request.version;
+ GST_INFO_OBJECT (src, "Now using version: %s",
+ gst_rtsp_version_as_text (src->version));
/* parse OPTIONS */
if (!gst_rtspsrc_parse_methods (src, &response))
gst_rtsp_message_add_header (&request, GST_RTSP_HDR_ACCEPT,
"application/sdp");
+ if (src->backchannel == BACKCHANNEL_ONVIF)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+ /* TODO: Handle the case when backchannel is unsupported and goto restart */
+
/* send DESCRIBE */
GST_DEBUG_OBJECT (src, "send describe...");
if ((res =
gst_rtspsrc_send (src, &src->conninfo, &request, &response,
- NULL)) < 0)
+ NULL, NULL)) < 0)
goto send_error;
/* we only perform redirect for describe and play, currently */
/* ERRORS */
no_url:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_INVALID_URL,
+ "No valid RTSP URL was provided");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND, (NULL),
("No valid RTSP URL was provided"));
+#endif
goto cleanup_error;
}
connect_failed:
gchar *str = gst_rtsp_strresult (res);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ "Failed to connect.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ_WRITE, (NULL),
("Failed to connect. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "connect interrupted");
}
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "Could not create request.");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto cleanup_error;
}
}
wrong_content_type:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_OPTION_NOT_SUPPORTED,
+ "Server does not support SDP. ");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("Server does not support SDP, got %s.", respcont));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
no_describe:
{
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,
+ "Server can not provide an SDP.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("Server can not provide an SDP."));
+#endif
res = GST_RTSP_ERROR;
goto cleanup_error;
}
/* do TEARDOWN */
res =
gst_rtspsrc_init_request (src, &request, GST_RTSP_TEARDOWN, setup_url);
+ GST_LOG_OBJECT (src, "Teardown on %s", setup_url);
if (res < 0)
goto create_request_failed;
+ if (stream->is_backchannel && src->backchannel == BACKCHANNEL_ONVIF)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+
if (async)
GST_ELEMENT_PROGRESS (src, CONTINUE, "close", ("Closing stream"));
- if ((res = gst_rtspsrc_send (src, info, &request, &response, NULL)) < 0)
+ if ((res =
+ gst_rtspsrc_send (src, info, &request, &response, NULL, NULL)) < 0)
goto send_error;
/* FIXME, parse result? */
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "Could not create request.");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto close;
}
gst_rtsp_message_unset (&request);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ "Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "TEARDOWN interrupted");
}
gen_range_header (GstRTSPSrc * src, GstSegment * segment)
{
gchar val_str[G_ASCII_DTOSTR_BUF_SIZE] = { 0, };
-
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ if (src->start_position != 0 && segment->position == 0) {
+ segment->position = src->start_position;
+ src->start_position = 0;
+ }
+#endif
if (src->range && src->range->min.type == GST_RTSP_TIME_NOW) {
g_strlcpy (val_str, "now", sizeof (val_str));
} else {
((gdouble) segment->position) / GST_SECOND);
}
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_DEBUG_OBJECT (src, "Range Header Added : npt=%s-", val_str);
+#endif
return g_strdup_printf ("npt=%s-", val_str);
}
}
static GstRTSPResult
- gst_rtspsrc_play (GstRTSPSrc * src, GstSegment * segment, gboolean async)
+ gst_rtspsrc_play (GstRTSPSrc * src, GstSegment * segment, gboolean async,
+ const gchar * seek_style)
{
GstRTSPMessage request = { 0 };
GstRTSPMessage response = { 0 };
if (res < 0)
goto create_request_failed;
- if (src->need_range) {
+ if (src->need_range && src->seekable >= 0.0) {
+#ifndef TIZEN_FEATURE_RTSP_MODIFICATION
hval = gen_range_header (src, segment);
gst_rtsp_message_take_header (&request, GST_RTSP_HDR_RANGE, hval);
+#endif
/* store the newsegment event so it can be sent from the streaming thread. */
src->need_segment = TRUE;
}
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ else {
+/*
+ Updating position with the MSL current position as gst_rtspsrc_get_position() does not return correct position.
+*/
+ GST_DEBUG_OBJECT (src,
+ " During normal pause-resume , segment->position=%" GST_TIME_FORMAT
+ ",src->start_position=%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment->position),
+ GST_TIME_ARGS (src->start_position));
+ segment->position = src->last_pos;
+ }
+
+/*
+ Sending the npt range request for each play request for updating the segment position properly.
+*/
+ hval = gen_range_header (src, segment);
+ gst_rtsp_message_take_header (&request, GST_RTSP_HDR_RANGE, hval);
+#endif
if (segment->rate != 1.0) {
gchar hval[G_ASCII_DTOSTR_BUF_SIZE];
gst_rtsp_message_add_header (&request, GST_RTSP_HDR_SPEED, hval);
}
+ if (seek_style)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_SEEK_STYLE,
+ seek_style);
+
+ /* when we have an ONVIF audio backchannel, the PLAY request must have the
+ * Require: header when doing either aggregate or non-aggregate control */
+ if (src->backchannel == BACKCHANNEL_ONVIF &&
+ (control || stream->is_backchannel))
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+
if (async)
GST_ELEMENT_PROGRESS (src, CONTINUE, "request", ("Sending PLAY request"));
- if ((res = gst_rtspsrc_send (src, conninfo, &request, &response, NULL)) < 0)
+ if ((res =
+ gst_rtspsrc_send (src, conninfo, &request, &response, NULL, NULL))
+ < 0)
goto send_error;
if (src->need_redirect) {
/* seek may have silently failed as it is not supported */
if (!(src->methods & GST_RTSP_PLAY)) {
GST_DEBUG_OBJECT (src, "PLAY Range not supported; re-enable PLAY");
+
+ if (src->version >= GST_RTSP_VERSION_2_0 && src->seekable >= 0.0) {
+ GST_WARNING_OBJECT (src, "Server declared stream as seekable but"
+ " playing with range failed... Ignoring information.");
+ }
/* obviously it is supported as we made it here */
src->methods |= GST_RTSP_PLAY;
- src->seekable = FALSE;
+ src->seekable = -1.0;
/* but there is nothing to parse in the response,
* so convey we have no idea and not to expect anything particular */
clear_rtp_base (src, stream);
/* ERRORS */
open_failed:
{
- GST_DEBUG_OBJECT (src, "failed to open stream");
+ GST_WARNING_OBJECT (src, "failed to open stream");
goto done;
}
not_supported:
{
- GST_DEBUG_OBJECT (src, "PLAY is not supported");
+ GST_WARNING_OBJECT (src, "PLAY is not supported");
goto done;
}
was_playing:
{
- GST_DEBUG_OBJECT (src, "we were already PLAYING");
+ GST_WARNING_OBJECT (src, "we were already PLAYING");
goto done;
}
create_request_failed:
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "Could not create request. ");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto done;
}
gst_rtsp_message_unset (&request);
if (res != GST_RTSP_EINTR) {
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ "Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "PLAY interrupted");
}
setup_url)) < 0)
goto create_request_failed;
- if ((res = gst_rtspsrc_send (src, conninfo, &request, &response, NULL)) < 0)
+ /* when we have an ONVIF audio backchannel, the PAUSE request must have the
+ * Require: header when doing either aggregate or non-aggregate control */
+ if (src->backchannel == BACKCHANNEL_ONVIF &&
+ (control || stream->is_backchannel))
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+
+ if ((res =
+ gst_rtspsrc_send (src, conninfo, &request, &response, NULL,
+ NULL)) < 0)
goto send_error;
gst_rtsp_message_unset (&request);
{
gchar *str = gst_rtsp_strresult (res);
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
+ "Could not create request.");
+#else
GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
("Could not create request. (%s)", str));
+#endif
g_free (str);
goto done;
}
gst_rtsp_message_unset (&request);
if (res != GST_RTSP_EINTR) {
- "Could not send message. ");
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
++ "Could not send message.");
+#else
GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
("Could not send message. (%s)", str));
+#endif
} else {
GST_WARNING_OBJECT (src, "PAUSE interrupted");
}
gst_rtspsrc_thread (GstRTSPSrc * src)
{
gint cmd;
+ ParameterRequest *req = NULL;
GST_OBJECT_LOCK (src);
cmd = src->pending_cmd;
if (cmd == CMD_RECONNECT || cmd == CMD_PLAY || cmd == CMD_PAUSE
- || cmd == CMD_LOOP || cmd == CMD_OPEN)
- src->pending_cmd = CMD_LOOP;
- else
+ || cmd == CMD_LOOP || cmd == CMD_OPEN || cmd == CMD_GET_PARAMETER
+ || cmd == CMD_SET_PARAMETER) {
+ if (g_queue_is_empty (&src->set_get_param_q)) {
+ src->pending_cmd = CMD_LOOP;
+ } else {
+ ParameterRequest *next_req;
+ req = g_queue_pop_head (&src->set_get_param_q);
+ next_req = g_queue_peek_head (&src->set_get_param_q);
+ src->pending_cmd = next_req ? next_req->cmd : CMD_LOOP;
+ }
+ } else
src->pending_cmd = CMD_WAIT;
GST_DEBUG_OBJECT (src, "got command %s", cmd_to_string (cmd));
gst_rtspsrc_open (src, TRUE);
break;
case CMD_PLAY:
- gst_rtspsrc_play (src, &src->segment, TRUE);
+ gst_rtspsrc_play (src, &src->segment, TRUE, NULL);
break;
case CMD_PAUSE:
gst_rtspsrc_pause (src, TRUE);
case CMD_CLOSE:
gst_rtspsrc_close (src, TRUE, FALSE);
break;
+ case CMD_GET_PARAMETER:
+ gst_rtspsrc_get_parameter (src, req);
+ break;
+ case CMD_SET_PARAMETER:
+ gst_rtspsrc_set_parameter (src, req);
+ break;
case CMD_LOOP:
gst_rtspsrc_loop (src);
break;
}
GST_OBJECT_LOCK (src);
+ /* No more cmds, wake any waiters */
+ g_cond_broadcast (&src->cmd_cond);
/* and go back to sleep */
if (src->pending_cmd == CMD_WAIT) {
if (src->task)
{
GstRTSPSrc *rtspsrc;
GstStateChangeReturn ret;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ guint64 end_time;
+#endif
rtspsrc = GST_RTSPSRC (element);
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GST_WARNING_OBJECT (rtspsrc, "State change transition: %d \n", transition);
++#endif
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
ret = GST_STATE_CHANGE_SUCCESS;
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* don't change to PAUSE state before complete stream opend.
+ see gst_rtspsrc_loop_complete_cmd() */
+ g_mutex_lock (&(rtspsrc)->pause_lock);
+ end_time = g_get_monotonic_time () + 10 * G_TIME_SPAN_SECOND;
+ if (!g_cond_wait_until (&(rtspsrc)->open_end, &(rtspsrc)->pause_lock,
+ end_time)) {
+ GST_WARNING_OBJECT (rtspsrc,
+ "time out: stream opend is not completed yet..");
+ }
+ g_mutex_unlock (&(rtspsrc)->pause_lock);
+#endif
ret = GST_STATE_CHANGE_NO_PREROLL;
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
ret = GST_STATE_CHANGE_NO_PREROLL;
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_CLOSE, CMD_ALL);
+ gst_rtspsrc_loop_send_cmd_and_wait (rtspsrc, CMD_CLOSE, CMD_ALL,
+ rtspsrc->teardown_timeout);
ret = GST_STATE_CHANGE_SUCCESS;
break;
case GST_STATE_CHANGE_READY_TO_NULL:
iface->set_uri = gst_rtspsrc_uri_set_uri;
}
+
+ /* send GET_PARAMETER */
+ static GstRTSPResult
+ gst_rtspsrc_get_parameter (GstRTSPSrc * src, ParameterRequest * req)
+ {
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GstRTSPResult res;
+ GstRTSPStatusCode code = GST_RTSP_STS_OK;
+ const gchar *control;
+ gchar *recv_body = NULL;
+ guint recv_body_len;
+
+ GST_DEBUG_OBJECT (src, "creating server get_parameter");
+
+ if ((res = gst_rtspsrc_ensure_open (src, FALSE)) < 0)
+ goto open_failed;
+
+ control = get_aggregate_control (src);
+ if (control == NULL)
+ goto no_control;
+
+ if (!(src->methods & GST_RTSP_GET_PARAMETER))
+ goto not_supported;
+
+ gst_rtspsrc_connection_flush (src, FALSE);
+
+ res = gst_rtsp_message_init_request (&request, GST_RTSP_GET_PARAMETER,
+ control);
+ if (res < 0)
+ goto create_request_failed;
+
+ res = gst_rtsp_message_add_header (&request, GST_RTSP_HDR_CONTENT_TYPE,
+ req->content_type == NULL ? "text/parameters" : req->content_type);
+ if (res < 0)
+ goto add_content_hdr_failed;
+
+ if (req->body && req->body->len) {
+ res =
+ gst_rtsp_message_set_body (&request, (guint8 *) req->body->str,
+ req->body->len);
+ if (res < 0)
+ goto set_body_failed;
+ }
+
+ if ((res = gst_rtspsrc_send (src, &src->conninfo,
+ &request, &response, &code, NULL)) < 0)
+ goto send_error;
+
+ res = gst_rtsp_message_get_body (&response, (guint8 **) & recv_body,
+ &recv_body_len);
+ if (res < 0)
+ goto get_body_failed;
+
+ done:
+ {
+ gst_promise_reply (req->promise,
+ gst_structure_new ("get-parameter-reply",
+ "rtsp-result", G_TYPE_INT, res,
+ "rtsp-code", G_TYPE_INT, code,
+ "rtsp-reason", G_TYPE_STRING, gst_rtsp_status_as_text (code),
+ "body", G_TYPE_STRING, GST_STR_NULL (recv_body), NULL));
+ free_param_data (req);
+
+
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ return res;
+ }
+
+ /* ERRORS */
+ open_failed:
+ {
+ GST_DEBUG_OBJECT (src, "failed to open stream");
+ goto done;
+ }
+ no_control:
+ {
+ GST_DEBUG_OBJECT (src, "no control url to send GET_PARAMETER");
+ res = GST_RTSP_ERROR;
+ goto done;
+ }
+ not_supported:
+ {
+ GST_DEBUG_OBJECT (src, "GET_PARAMETER is not supported");
+ res = GST_RTSP_ERROR;
+ goto done;
+ }
+ create_request_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not create GET_PARAMETER request");
+ goto done;
+ }
+ add_content_hdr_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not add content header");
+ goto done;
+ }
+ set_body_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not set body");
+ goto done;
+ }
+ send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_WARNING (src, RESOURCE, WRITE, (NULL),
+ ("Could not send get-parameter. (%s)", str));
+ g_free (str);
+ goto done;
+ }
+ get_body_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not get body");
+ goto done;
+ }
+ }
+
+ /* send SET_PARAMETER */
+ static GstRTSPResult
+ gst_rtspsrc_set_parameter (GstRTSPSrc * src, ParameterRequest * req)
+ {
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GstRTSPResult res = GST_RTSP_OK;
+ GstRTSPStatusCode code = GST_RTSP_STS_OK;
+ const gchar *control;
+
+ GST_DEBUG_OBJECT (src, "creating server set_parameter");
+
+ if ((res = gst_rtspsrc_ensure_open (src, FALSE)) < 0)
+ goto open_failed;
+
+ control = get_aggregate_control (src);
+ if (control == NULL)
+ goto no_control;
+
+ if (!(src->methods & GST_RTSP_SET_PARAMETER))
+ goto not_supported;
+
+ gst_rtspsrc_connection_flush (src, FALSE);
+
+ res =
+ gst_rtsp_message_init_request (&request, GST_RTSP_SET_PARAMETER, control);
+ if (res < 0)
+ goto send_error;
+
+ res = gst_rtsp_message_add_header (&request, GST_RTSP_HDR_CONTENT_TYPE,
+ req->content_type == NULL ? "text/parameters" : req->content_type);
+ if (res < 0)
+ goto add_content_hdr_failed;
+
+ if (req->body && req->body->len) {
+ res =
+ gst_rtsp_message_set_body (&request, (guint8 *) req->body->str,
+ req->body->len);
+
+ if (res < 0)
+ goto set_body_failed;
+ }
+
+ if ((res = gst_rtspsrc_send (src, &src->conninfo,
+ &request, &response, &code, NULL)) < 0)
+ goto send_error;
+
+ done:
+ {
+ gst_promise_reply (req->promise, gst_structure_new ("set-parameter-reply",
+ "rtsp-result", G_TYPE_INT, res,
+ "rtsp-code", G_TYPE_INT, code,
+ "rtsp-reason", G_TYPE_STRING, gst_rtsp_status_as_text (code),
+ NULL));
+ free_param_data (req);
+
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ return res;
+ }
+
+ /* ERRORS */
+ open_failed:
+ {
+ GST_DEBUG_OBJECT (src, "failed to open stream");
+ goto done;
+ }
+ no_control:
+ {
+ GST_DEBUG_OBJECT (src, "no control url to send SET_PARAMETER");
+ res = GST_RTSP_ERROR;
+ goto done;
+ }
+ not_supported:
+ {
+ GST_DEBUG_OBJECT (src, "SET_PARAMETER is not supported");
+ res = GST_RTSP_ERROR;
+ goto done;
+ }
+ add_content_hdr_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not add content header");
+ goto done;
+ }
+ set_body_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not set body");
+ goto done;
+ }
+ send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_WARNING (src, RESOURCE, WRITE, (NULL),
+ ("Could not send set-parameter. (%s)", str));
+ g_free (str);
+ goto done;
+ }
+ }
+
typedef struct _RTSPKeyValue
{
GstRTSPHeaderField field;
else
key_string = gst_rtsp_header_as_text (key_value->field);
- GST_INFO_OBJECT (src, " key: '%s', value: '%s'", key_string,
+ GST_LOG_OBJECT (src, " key: '%s', value: '%s'", key_string,
key_value->value);
}
g_return_if_fail (src != NULL);
g_return_if_fail (msg != NULL);
- if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) < GST_LEVEL_INFO)
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) < GST_LEVEL_LOG)
return;
- GST_INFO_OBJECT (src, "--------------------------------------------");
+ GST_LOG_OBJECT (src, "--------------------------------------------");
switch (msg->type) {
case GST_RTSP_MESSAGE_REQUEST:
- GST_INFO_OBJECT (src, "RTSP request message %p", msg);
- GST_INFO_OBJECT (src, " request line:");
- GST_INFO_OBJECT (src, " method: '%s'",
+ GST_LOG_OBJECT (src, "RTSP request message %p", msg);
+ GST_LOG_OBJECT (src, " request line:");
+ GST_LOG_OBJECT (src, " method: '%s'",
gst_rtsp_method_as_text (msg->type_data.request.method));
- GST_INFO_OBJECT (src, " uri: '%s'", msg->type_data.request.uri);
- GST_INFO_OBJECT (src, " version: '%s'",
+ GST_LOG_OBJECT (src, " uri: '%s'", msg->type_data.request.uri);
+ GST_LOG_OBJECT (src, " version: '%s'",
gst_rtsp_version_as_text (msg->type_data.request.version));
- GST_INFO_OBJECT (src, " headers:");
+ GST_LOG_OBJECT (src, " headers:");
key_value_foreach (msg->hdr_fields, dump_key_value, src);
- GST_INFO_OBJECT (src, " body:");
+ GST_LOG_OBJECT (src, " body:");
gst_rtsp_message_get_body (msg, &data, &size);
if (size > 0) {
body_string = g_string_new_len ((const gchar *) data, size);
- GST_INFO_OBJECT (src, " %s(%d)", body_string->str, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
g_string_free (body_string, TRUE);
body_string = NULL;
}
break;
case GST_RTSP_MESSAGE_RESPONSE:
- GST_INFO_OBJECT (src, "RTSP response message %p", msg);
- GST_INFO_OBJECT (src, " status line:");
- GST_INFO_OBJECT (src, " code: '%d'", msg->type_data.response.code);
- GST_INFO_OBJECT (src, " reason: '%s'", msg->type_data.response.reason);
- GST_INFO_OBJECT (src, " version: '%s",
+ GST_LOG_OBJECT (src, "RTSP response message %p", msg);
+ GST_LOG_OBJECT (src, " status line:");
+ GST_LOG_OBJECT (src, " code: '%d'", msg->type_data.response.code);
+ GST_LOG_OBJECT (src, " reason: '%s'", msg->type_data.response.reason);
+ GST_LOG_OBJECT (src, " version: '%s",
gst_rtsp_version_as_text (msg->type_data.response.version));
- GST_INFO_OBJECT (src, " headers:");
+ GST_LOG_OBJECT (src, " headers:");
key_value_foreach (msg->hdr_fields, dump_key_value, src);
gst_rtsp_message_get_body (msg, &data, &size);
- GST_INFO_OBJECT (src, " body: length %d", size);
+ GST_LOG_OBJECT (src, " body: length %d", size);
if (size > 0) {
body_string = g_string_new_len ((const gchar *) data, size);
- GST_INFO_OBJECT (src, " %s(%d)", body_string->str, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
g_string_free (body_string, TRUE);
body_string = NULL;
}
break;
case GST_RTSP_MESSAGE_HTTP_REQUEST:
- GST_INFO_OBJECT (src, "HTTP request message %p", msg);
- GST_INFO_OBJECT (src, " request line:");
- GST_INFO_OBJECT (src, " method: '%s'",
+ GST_LOG_OBJECT (src, "HTTP request message %p", msg);
+ GST_LOG_OBJECT (src, " request line:");
+ GST_LOG_OBJECT (src, " method: '%s'",
gst_rtsp_method_as_text (msg->type_data.request.method));
- GST_INFO_OBJECT (src, " uri: '%s'", msg->type_data.request.uri);
- GST_INFO_OBJECT (src, " version: '%s'",
+ GST_LOG_OBJECT (src, " uri: '%s'", msg->type_data.request.uri);
+ GST_LOG_OBJECT (src, " version: '%s'",
gst_rtsp_version_as_text (msg->type_data.request.version));
- GST_INFO_OBJECT (src, " headers:");
+ GST_LOG_OBJECT (src, " headers:");
key_value_foreach (msg->hdr_fields, dump_key_value, src);
- GST_INFO_OBJECT (src, " body:");
+ GST_LOG_OBJECT (src, " body:");
gst_rtsp_message_get_body (msg, &data, &size);
if (size > 0) {
body_string = g_string_new_len ((const gchar *) data, size);
- GST_INFO_OBJECT (src, " %s(%d)", body_string->str, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
g_string_free (body_string, TRUE);
body_string = NULL;
}
break;
case GST_RTSP_MESSAGE_HTTP_RESPONSE:
- GST_INFO_OBJECT (src, "HTTP response message %p", msg);
- GST_INFO_OBJECT (src, " status line:");
- GST_INFO_OBJECT (src, " code: '%d'", msg->type_data.response.code);
- GST_INFO_OBJECT (src, " reason: '%s'", msg->type_data.response.reason);
- GST_INFO_OBJECT (src, " version: '%s'",
+ GST_LOG_OBJECT (src, "HTTP response message %p", msg);
+ GST_LOG_OBJECT (src, " status line:");
+ GST_LOG_OBJECT (src, " code: '%d'", msg->type_data.response.code);
+ GST_LOG_OBJECT (src, " reason: '%s'", msg->type_data.response.reason);
+ GST_LOG_OBJECT (src, " version: '%s'",
gst_rtsp_version_as_text (msg->type_data.response.version));
- GST_INFO_OBJECT (src, " headers:");
+ GST_LOG_OBJECT (src, " headers:");
key_value_foreach (msg->hdr_fields, dump_key_value, src);
gst_rtsp_message_get_body (msg, &data, &size);
- GST_INFO_OBJECT (src, " body: length %d", size);
+ GST_LOG_OBJECT (src, " body: length %d", size);
if (size > 0) {
body_string = g_string_new_len ((const gchar *) data, size);
- GST_INFO_OBJECT (src, " %s(%d)", body_string->str, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
g_string_free (body_string, TRUE);
body_string = NULL;
}
break;
case GST_RTSP_MESSAGE_DATA:
- GST_INFO_OBJECT (src, "RTSP data message %p", msg);
- GST_INFO_OBJECT (src, " channel: '%d'", msg->type_data.data.channel);
- GST_INFO_OBJECT (src, " size: '%d'", msg->body_size);
+ GST_LOG_OBJECT (src, "RTSP data message %p", msg);
+ GST_LOG_OBJECT (src, " channel: '%d'", msg->type_data.data.channel);
+ GST_LOG_OBJECT (src, " size: '%d'", msg->body_size);
gst_rtsp_message_get_body (msg, &data, &size);
if (size > 0) {
body_string = g_string_new_len ((const gchar *) data, size);
- GST_INFO_OBJECT (src, " %s(%d)", body_string->str, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
g_string_free (body_string, TRUE);
body_string = NULL;
}
break;
default:
- GST_INFO_OBJECT (src, "unsupported message type %d", msg->type);
+ GST_LOG_OBJECT (src, "unsupported message type %d", msg->type);
break;
}
- GST_INFO_OBJECT (src, "--------------------------------------------");
+ GST_LOG_OBJECT (src, "--------------------------------------------");
}
static void
#define GST_RTSP_STREAM_LOCK(rtsp) (g_rec_mutex_lock (GST_RTSP_STREAM_GET_LOCK(rtsp)))
#define GST_RTSP_STREAM_UNLOCK(rtsp) (g_rec_mutex_unlock (GST_RTSP_STREAM_GET_LOCK(rtsp)))
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+typedef enum {
+ GST_RTSPSRC_ERROR_NONE = 0,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_AUDIO,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_VIDEO,
+ GST_RTSPSRC_ERROR_CONNECTION_FAIL,
+ GST_RTSPSRC_ERROR_DNS_FAIL,
+ GST_RTSPSRC_ERROR_SERVER_DISCONNECTED,
+ GST_RTSPSRC_ERROR_BAD_SERVER,
+ GST_RTSPSRC_ERROR_INVALID_PROTOCOL,
+ GST_RTSPSRC_ERROR_INVALID_URL,
+ GST_RTSPSRC_ERROR_UNEXPECTED_MSG,
+ GST_RTSPSRC_ERROR_OUT_OF_MEMORIES,
+ GST_RTSPSRC_ERROR_RTSP_TIMEOUT,
+ GST_RTSPSRC_ERROR_BAD_REQUEST,
+ GST_RTSPSRC_ERROR_NOT_AUTHORIZED,
+ GST_RTSPSRC_ERROR_PAYMENT_REQUIRED,
+ GST_RTSPSRC_ERROR_FORBIDDEN,
+ GST_RTSPSRC_ERROR_CONTENT_NOT_FOUND,
+ GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,
+ GST_RTSPSRC_ERROR_NOT_ACCEPTABLE,
+ GST_RTSPSRC_ERROR_PROXY_AUTHENTICATION_REQUIRED,
+ GST_RTSPSRC_ERROR_SERVER_TIMEOUT,
+ GST_RTSPSRC_ERROR_GONE,
+ GST_RTSPSRC_ERROR_LENGTH_REQUIRED,
+ GST_RTSPSRC_ERROR_PRECONDITION_FAILED,
+ GST_RTSPSRC_ERROR_REQUEST_ENTITY_TOO_LARGE,
+ GST_RTSPSRC_ERROR_REQUEST_URI_TOO_LARGE,
+ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,
+ GST_RTSPSRC_ERROR_PARAMETER_NOT_UNDERSTOOD,
+ GST_RTSPSRC_ERROR_CONFERENCE_NOT_FOUND,
+ GST_RTSPSRC_ERROR_NOT_ENOUGH_BANDWIDTH,
+ GST_RTSPSRC_ERROR_NO_SESSION_ID,
+ GST_RTSPSRC_ERROR_METHOD_NOT_VALID_IN_THIS_STATE,
+ GST_RTSPSRC_ERROR_HEADER_FIELD_NOT_VALID_FOR_SOURCE,
+ GST_RTSPSRC_ERROR_INVALID_RANGE,
+ GST_RTSPSRC_ERROR_PARAMETER_IS_READONLY,
+ GST_RTSPSRC_ERROR_AGGREGATE_OP_NOT_ALLOWED,
+ GST_RTSPSRC_ERROR_ONLY_AGGREGATE_OP_ALLOWED,
+ GST_RTSPSRC_ERROR_BAD_TRANSPORT,
+ GST_RTSPSRC_ERROR_DESTINATION_UNREACHABLE,
+ GST_RTSPSRC_ERROR_INTERNAL_SERVER_ERROR,
+ GST_RTSPSRC_ERROR_NOT_IMPLEMENTED,
+ GST_RTSPSRC_ERROR_BAD_GATEWAY,
+ GST_RTSPSRC_ERROR_SERVICE_UNAVAILABLE,
+ GST_RTSPSRC_ERROR_GATEWAY_TIME_OUT ,
+ GST_RTSPSRC_ERROR_RTSP_VERSION_NOT_SUPPORTED,
+ GST_RTSPSRC_ERROR_OPTION_NOT_SUPPORTED,
+}_GstRTSPSrcError;
+typedef _GstRTSPSrcError GstRTSPSrcError;
+#endif
+
typedef struct _GstRTSPConnInfo GstRTSPConnInfo;
struct _GstRTSPConnInfo {
gboolean eos;
gboolean discont;
gboolean need_caps;
+ gboolean waiting_setup_response;
/* for interleaved mode */
guint8 channel[2];
GstElement *udpsink[2];
GstPad *rtcppad;
- /* fakesrc for sending dummy data */
- GstElement *fakesrc;
+ /* fakesrc for sending dummy data or appsrc for sending backchannel data */
+ GstElement *rtpsrc;
/* state */
guint port;
gchar *destination;
gboolean is_multicast;
guint ttl;
+ gboolean is_backchannel;
+
+ /* A unique and stable id we will use for the stream start event */
+ gchar *stream_id;
GstStructure *rtx_pt_map;
+
+ guint32 segment_seqnum[2];
};
/**
+ * GstRTSPSrcTimeoutCause:
+ * @GST_RTSP_SRC_TIMEOUT_CAUSE_RTCP: timeout triggered by RTCP
+ *
+ * Different causes to why the rtspsrc generated the GstRTSPSrcTimeout
+ * message.
+ */
+ typedef enum
+ {
+ GST_RTSP_SRC_TIMEOUT_CAUSE_RTCP
+ } GstRTSPSrcTimeoutCause;
+
+ /**
* GstRTSPNatMethod:
* @GST_RTSP_NAT_NONE: none
* @GST_RTSP_NAT_DUMMY: send dummy packets
GST_RTSP_NAT_DUMMY
} GstRTSPNatMethod;
+
struct _GstRTSPSrc {
GstBin parent;
/* UDP mode loop */
gint pending_cmd;
gint busy_cmd;
+ GCond cmd_cond;
gboolean ignore_timeout;
gboolean open_error;
gchar *user_agent;
GstClockTime max_rtcp_rtp_time_diff;
gboolean rfc7273_sync;
+ guint64 max_ts_offset_adjustment;
+ gint64 max_ts_offset;
+ gboolean max_ts_offset_is_set;
+ gint backchannel;
+ GstClockTime teardown_timeout;
/* state */
GstRTSPState state;
/* supported methods */
gint methods;
- gboolean seekable;
+ /* seekability
+ * -1.0 : Stream is not seekable
+ * 0.0 : seekable only to the beginning
+ * G_MAXFLOAT : Any value is possible
+ *
+ * Any other positive value indicates the longest duration
+ * between any two random access points
+ * */
+ gfloat seekable;
GstClockTime last_pos;
/* session management */
gulong manager_ptmap_id;
gboolean use_buffering;
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ /* media type */
+ gboolean is_audio_codec_supported;
+ gboolean is_video_codec_supported;
+ gchar *audio_codec;
+ gchar *video_codec;
+ gchar *video_frame_size;
+#endif
+
GstRTSPConnInfo conninfo;
+ /* SET/GET PARAMETER requests queue */
+ GQueue set_get_param_q;
+
/* a list of RTSP extensions as GstElement */
GstRTSPExtensionList *extensions;
- #endif
+ GstRTSPVersion default_version;
+ GstRTSPVersion version;
++
+#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
+ GCond open_end;
+ GMutex pause_lock;
+ guint64 start_position;
++#endif
};
struct _GstRTSPSrcClass {
GstBinClass parent_class;
+
+ /* action signals */
+ gboolean (*get_parameter) (GstRTSPSrc *rtsp, const gchar *parameter, const gchar *content_type, GstPromise *promise);
+ gboolean (*get_parameters) (GstRTSPSrc *rtsp, gchar **parameters, const gchar *content_type, GstPromise *promise);
+ gboolean (*set_parameter) (GstRTSPSrc *rtsp, const gchar *name, const gchar *value, const gchar *content_type, GstPromise *promise);
+ GstFlowReturn (*push_backchannel_buffer) (GstRTSPSrc *src, guint id, GstSample *sample);
};
GType gst_rtspsrc_get_type(void);
};
static GstStaticPadTemplate sink_template_factory =
- GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-wav")
+ GST_STATIC_CAPS ("audio/x-wav;audio/x-rf64")
);
#define DEBUG_INIT \
}
static void
+ gst_wavparse_notes_free (GstWavParseNote * note)
+ {
+ if (note)
+ g_free (note->text);
+ g_free (note);
+ }
+
+ static void
+ gst_wavparse_labls_free (GstWavParseLabl * labl)
+ {
+ if (labl)
+ g_free (labl->text);
+ g_free (labl);
+ }
+
+ static void
gst_wavparse_reset (GstWavParse * wav)
{
wav->state = GST_WAVPARSE_START;
wav->dataleft = 0;
wav->datasize = 0;
wav->datastart = 0;
+ wav->chunk_size = 0;
wav->duration = 0;
wav->got_fmt = FALSE;
wav->first = TRUE;
g_list_free_full (wav->cues, g_free);
wav->cues = NULL;
if (wav->labls)
- g_list_free_full (wav->labls, g_free);
+ g_list_free_full (wav->labls, (GDestroyNotify) gst_wavparse_labls_free);
wav->labls = NULL;
+ if (wav->notes)
+ g_list_free_full (wav->notes, (GDestroyNotify) gst_wavparse_notes_free);
+ wav->notes = NULL;
if (wav->caps)
gst_caps_unref (wav->caps);
wav->caps = NULL;
gboolean update;
GstSegment seeksegment = { 0, };
gint64 last_stop;
- guint32 seqnum = 0;
+ guint32 seqnum = GST_SEQNUM_INVALID;
if (event) {
GST_DEBUG_OBJECT (wav, "doing seek with event");
/* BYTE seek event */
event = gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, cur_type, cur,
stop_type, stop);
- gst_event_set_seqnum (event, seqnum);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, seqnum);
res = gst_pad_push_event (wav->sinkpad, event);
}
return res;
GST_DEBUG_OBJECT (wav, "sending flush start");
fevent = gst_event_new_flush_start ();
- gst_event_set_seqnum (fevent, seqnum);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (fevent, seqnum);
gst_pad_push_event (wav->sinkpad, gst_event_ref (fevent));
gst_pad_push_event (wav->srcpad, fevent);
} else {
GST_DEBUG_OBJECT (wav, "sending flush stop");
fevent = gst_event_new_flush_stop (TRUE);
- gst_event_set_seqnum (fevent, seqnum);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (fevent, seqnum);
gst_pad_push_event (wav->sinkpad, gst_event_ref (fevent));
gst_pad_push_event (wav->srcpad, fevent);
}
if (wav->start_segment)
gst_event_unref (wav->start_segment);
wav->start_segment = gst_event_new_segment (&wav->segment);
- gst_event_set_seqnum (wav->start_segment, seqnum);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (wav->start_segment, seqnum);
/* mark discont if we are going to stream from another position. */
if (last_stop != wav->segment.position) {
GST_DEBUG_OBJECT (wav, "Using ds64 datasize");
size64 = wav->datasize;
}
+ wav->chunk_size = size64;
+
/* If size is zero, then the data chunk probably actually extends to
the end of the file */
if (size64 == 0 && upstream_size) {
const gst_riff_acid *acid = NULL;
const guint data_size = sizeof (gst_riff_acid);
gfloat tempo;
+#ifdef TIZEN_FEATURE_WAVPARSE_MODIFICATION
+ const guint8 *data = NULL;
+#endif
GST_INFO_OBJECT (wav, "Have acid chunk");
if (size < data_size) {
goto exit;
}
gst_adapter_flush (wav->adapter, 8);
+#ifdef TIZEN_FEATURE_WAVPARSE_MODIFICATION
+ if (gst_adapter_available (wav->adapter) < 24) {
+ goto exit;
+ }
+ data = gst_adapter_map (wav->adapter, 24);
+ tempo = GST_READ_FLOAT_LE (data + 20);
+#else
acid = (const gst_riff_acid *) gst_adapter_map (wav->adapter,
data_size);
tempo = acid->tempo;
+#endif
gst_adapter_unmap (wav->adapter);
} else {
GstMapInfo map;
}
static GstFlowReturn
- gst_wavparse_stream_data (GstWavParse * wav)
+ gst_wavparse_stream_data (GstWavParse * wav, gboolean flushing)
{
GstBuffer *buf = NULL;
GstFlowReturn res = GST_FLOW_OK;
"offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT " , dataleft: %"
G_GINT64_FORMAT, wav->offset, wav->end_offset, wav->dataleft);
- /* Get the next n bytes and output them */
- if (wav->dataleft == 0 || wav->dataleft < wav->blockalign)
- goto found_eos;
+ if ((wav->dataleft == 0 || wav->dataleft < wav->blockalign)) {
+ /* In case chunk size is not declared in the begining get size from the
+ * file size directly */
+ if (wav->chunk_size == 0) {
+ gint64 upstream_size = 0;
+
+ /* Get the size of the file */
+ if (!gst_pad_peer_query_duration (wav->sinkpad, GST_FORMAT_BYTES,
+ &upstream_size))
+ goto found_eos;
+
+ if (upstream_size < wav->offset + wav->datastart)
+ goto found_eos;
+
+ /* If file has updated since the beggining continue reading the file */
+ wav->dataleft = upstream_size - wav->offset - wav->datastart;
+ wav->end_offset = upstream_size;
+
+ /* Get the next n bytes and output them, if we can */
+ if (wav->dataleft == 0 || wav->dataleft < wav->blockalign)
+ goto found_eos;
+ } else {
+ goto found_eos;
+ }
+ }
/* scale the amount of data by the segment rate so we get equal
* amounts of data regardless of the playback rate */
if (avail < desired) {
GST_LOG_OBJECT (wav, "Got only %u bytes of data from the sinkpad", avail);
- return GST_FLOW_OK;
- }
- buf = gst_adapter_take_buffer (wav->adapter, desired);
+ /* If we are at the end of the stream, we need to flush whatever we have left */
+ if (avail > 0 && flushing) {
+ if (avail >= wav->blockalign && wav->blockalign > 0) {
+ avail -= (avail % wav->blockalign);
+ buf = gst_adapter_take_buffer (wav->adapter, avail);
+ } else {
+ return GST_FLOW_OK;
+ }
+ } else {
+ return GST_FLOW_OK;
+ }
+ } else {
+ buf = gst_adapter_take_buffer (wav->adapter, desired);
+ }
} else {
if ((res = gst_pad_pull_range (wav->sinkpad, wav->offset,
desired, &buf)) != GST_FLOW_OK)
/* fall-through */
case GST_WAVPARSE_DATA:
- if ((ret = gst_wavparse_stream_data (wav)) != GST_FLOW_OK)
+ if ((ret = gst_wavparse_stream_data (wav, FALSE)) != GST_FLOW_OK)
goto pause;
break;
default:
case GST_WAVPARSE_DATA:
if (buf && GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))
wav->discont = TRUE;
- if ((ret = gst_wavparse_stream_data (wav)) != GST_FLOW_OK)
+ if ((ret = gst_wavparse_stream_data (wav, FALSE)) != GST_FLOW_OK)
goto done;
break;
default:
guint av;
if ((av = gst_adapter_available (wav->adapter)) > 0) {
- ret = gst_wavparse_stream_data (wav);
+ ret = gst_wavparse_stream_data (wav, TRUE);
}
return ret;
if (G_UNLIKELY (wav->first)) {
wav->first = FALSE;
gst_wavparse_add_src_pad (wav, NULL);
- } else {
- /* stream leftover data in current segment */
- gst_wavparse_flush_data (wav);
}
+
+ /* stream leftover data in current segment */
+ gst_wavparse_flush_data (wav);
}
/* fall-through */
}
break;
}
+ case GST_QUERY_SEGMENT:
+ {
+ GstFormat format;
+ gint64 start, stop;
+
+ format = wav->segment.format;
+
+ start =
+ gst_segment_to_stream_time (&wav->segment, format,
+ wav->segment.start);
+ if ((stop = wav->segment.stop) == -1)
+ stop = wav->segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&wav->segment, format, stop);
+
+ gst_query_set_segment (query, wav->segment.rate, format, start, stop);
+ res = TRUE;
+ break;
+ }
default:
res = gst_pad_query_default (pad, parent, query);
break;
--- /dev/null
- Version: 1.12.2
- Release: 2
+%bcond_with x
+%define gst_branch 1.0
+
+Name: gst-plugins-good
++Version: 1.16.2
++Release: 1
+License: LGPL-2.1+
+Summary: GStreamer Streaming-Media Framework Plug-Ins
+Url: http://gstreamer.freedesktop.org/
+Group: Multimedia/Framework
+Source: http://gstreamer.freedesktop.org/src/gst-plugins-good/gst-plugins-good-%{version}.tar.xz
+Source100: common.tar.gz
+BuildRequires: gcc-c++
+BuildRequires: gettext-tools
+BuildRequires: pkgconfig(glib-2.0) >= 2.32
+BuildRequires: pkgconfig(gstreamer-1.0)
+BuildRequires: pkgconfig(gstreamer-plugins-base-1.0)
+BuildRequires: libjpeg-devel
+BuildRequires: orc >= 0.4.16
+BuildRequires: python
+BuildRequires: xsltproc
+BuildRequires: pkgconfig(bzip2)
+BuildRequires: pkgconfig(libpng) >= 1.2
+BuildRequires: pkgconfig(libpulse) >= 1.0
+BuildRequires: pkgconfig(libsoup-2.4)
+BuildRequires: pkgconfig(libxml-2.0) >= 2.4.9
+# TODO find where process.h comes from, not kernel-devel and not wxWidgets so far.
+%if %{with x}
+BuildRequires: pkgconfig(ice)
+BuildRequires: pkgconfig(sm)
+BuildRequires: pkgconfig(xdamage)
+BuildRequires: pkgconfig(xfixes)
+# used by libgstvideo4linux2.so
+BuildRequires: pkgconfig(xv)
+%endif
+
+BuildRequires: pkgconfig(zlib)
+%if "%{tizen_profile_name}" != "tv"
+BuildRequires: pkgconfig(libv4l2)
+%endif
+BuildRequires: pkgconfig(vconf)
+BuildRequires: pkgconfig(gio-2.0)
+Requires: gst-plugins-base >= 1.0.0
+Requires: gstreamer >= 1.0.5
+
+%description
+GStreamer is a streaming media framework based on graphs of filters
+that operate on media data. Applications using this library can do
+anything media-related, from real-time sound processing to playing
+videos. Its plug-in-based architecture means that new data types or
+processing capabilities can be added simply by installing new plug-ins.
+
+%package extra
+Summary: Complementary plugins for %{name}
+Group: Productivity/Multimedia/Other
+Requires: %{name} = %{version}
+Enhances: gst-plugins-good
+
+%description extra
+This package provides complementary plugins for %{name} and
+plugins not included in official Tizen images, which may be used for development / experimental purposes.
+
+%prep
+%setup -q -n gst-plugins-good-%{version}
+%setup -q -T -D -a 100
+
+%build
+# FIXME:
+# warning: failed to load external entity "xml/element-v4l2src-details.xml"
+# warning: failed to load external entity "xml/plugin-video4linux2.xml"
+export V=1
+NOCONFIGURE=1 ./autogen.sh
+export CFLAGS+=" -DTIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE\
+ -DTIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID\
+ -DTIZEN_FEATURE_WAVPARSE_MODIFICATION\
+ -DTIZEN_FEATURE_MP3PARSE_MODIFICATION\
+ -DTIZEN_FEATURE_AACPARSE_MODIFICATION\
+ -DTIZEN_FEATURE_QTDEMUX_MODIFICATION\
+ -DTIZEN_FEATURE_FLVDEMUX_MODIFICATION\
+ -DTIZEN_FEATURE_GST_UPSTREAM\
+ -DTIZEN_FEATURE_RTSP_MODIFICATION\
+ -DTIZEN_FEATURE_GST_MUX_ENHANCEMENT\
+ -DTIZEN_FEATURE_SOUP_MODIFICATION\
+ -DTIZEN_FEATURE_RGVOLUME_MODIFICATION\
+ -DTIZEN_FEATURE_BASEPARSE_MODIFICATION\
+ -DTIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY\
+ -fstack-protector-strong\
+ -Wl,-z,relro\
+ -D_FORTIFY_SOURCE=2"
+%configure\
+%if ! 0%{?ENABLE_AALIB}
+ --disable-aalib\
+%endif
+%if "%{tizen_profile_name}" != "tv"
+ --with-libv4l2 \
+%endif
+ --disable-gtk-doc\
+ --with-gtk=3.0\
+ --disable-monoscope\
+ --disable-y4m\
+ --disable-taglib\
+ --disable-wavpack\
+ --enable-experimental\
+ --disable-equalizer\
+%if "%{tizen_profile_name}" == "tv"
+ --disable-flv\
+ --disable-videobox\
+ --disable-videomixer\
+%endif
+ --disable-effectv\
+ --disable-alpha\
+ --disable-auparse\
+ --disable-effectv\
+ --disable-flx\
+ --disable-goom\
+ --disable-goom2k1\
+ --disable-level\
+ --disable-multipart\
+ --disable-smpte\
+ --disable-spectrum\
+ --disable-cutter\
+ --disable-dtmf\
+ --disable-oss4\
+ --disable-oss\
+ --disable-shapewipe
+
+make %{?_smp_mflags} CFLAGS+="-Wno-error" CXXFLAGS+="-Wno-error"
+
+%install
+%make_install
+%find_lang %{name}-%{gst_branch}
+
+%lang_package -f %{name}-%{gst_branch}
+
+%files
+%manifest %{name}.manifest
+%defattr(-, root, root)
+%license COPYING
+%{_libdir}/gstreamer-%{gst_branch}/libgstalaw.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstalpha.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstalphacolor.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstapetag.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstaudiofx.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstaudioparsers.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstauparse.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstautodetect.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstavi.so
+# Not yet ported
+#%{_libdir}/gstreamer-%{gst_branch}/libgstcutter.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstdebug.so
+# Not yet ported
+%{_libdir}/gstreamer-%{gst_branch}/libgstdeinterlace.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgsteffectv.so
+
+#%{_datadir}/gstreamer-%{gst_branch}/presets/GstVP8Enc.prs
+
+#%{_libdir}/gstreamer-%{gst_branch}/libgstflxdec.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstgoom.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstgoom2k1.so
+%{_libdir}/gstreamer-%{gst_branch}/libgsticydemux.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstid3demux.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstinterleave.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstisomp4.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstjpeg.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstlevel.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstmatroska.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstmonoscope.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstmulaw.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstmultifile.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstmultipart.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstnavigationtest.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstoss4audio.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstossaudio.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstpulseaudio.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstreplaygain.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstrtp.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstrtpmanager.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstrtsp.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstshapewipe.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstsmpte.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstspectrum.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstspeex.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstudp.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideo4linux2.so
+
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideocrop.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideofilter.so
+%if "%{tizen_profile_name}" != "tv"
+%{_libdir}/gstreamer-%{gst_branch}/libgstflv.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstequalizer.so
+#%{_datadir}/gstreamer-%{gst_branch}/presets/GstIirEqualizer10Bands.prs
+#%{_datadir}/gstreamer-%{gst_branch}/presets/GstIirEqualizer3Bands.prs
+%{_datadir}/gstreamer-%{gst_branch}/presets/GstQTMux.prs
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideobox.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstvideomixer.so
+%endif
+%{_libdir}/gstreamer-%{gst_branch}/libgstwavenc.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstwavparse.so
+%if %{with x}
+%{_libdir}/gstreamer-%{gst_branch}/libgstximagesrc.so
+%endif
+#%{_libdir}/gstreamer-%{gst_branch}/libgsty4menc.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstcairo.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstsoup.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstflac.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstvpx.so
+#%{_libdir}/gstreamer-%{gst_branch}/libgstdtmf.so
+
+
+%files extra
+%manifest %{name}.manifest
+%defattr(-, root, root)
+%if 0%{?ENABLE_AALIB}
+%{_libdir}/gstreamer-%{gst_branch}/libgstaasink.so
+%endif
+%{_libdir}/gstreamer-%{gst_branch}/libgstpng.so
+%{_libdir}/gstreamer-%{gst_branch}/libgstimagefreeze.so
- # Translation of gst-plugins-good to Croatian.
+ # Translation of gst-plugins-good messages to Croatian.
# This file is put in the public domain.
+ # Copyright (C) 2004-2010, 2019 GStreamer core team.
+ # This file is distributed under the same license as the gst-plugins-good package.
#
# Tomislav Krznar <tomislav.krznar@gmail.com>, 2012.
- # Božidar Putanec <bozidarp@yahoo.com>, 2016, 2017.
+ # Božidar Putanec <bozidarp@yahoo.com>, 2016, 2017, 2018, 2019.
msgid ""
msgstr ""
- "Project-Id-Version: gst-plugins-good-1.12.0\n"
+ "Project-Id-Version: gst-plugins-good-1.15.1\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
- "POT-Creation-Date: 2017-07-14 13:30+0300\n"
- "PO-Revision-Date: 2017-05-04 16:09-0800\n"
+ "POT-Creation-Date: 2019-02-26 11:47+0000\n"
+ "PO-Revision-Date: 2019-02-03 13:58-0800\n"
"Last-Translator: Božidar Putanec <bozidarp@yahoo.com>\n"
"Language-Team: Croatian <lokalizacija@linux.hr>\n"
"Language: hr\n"
"X-Bugs: Report translation errors to the Language-Team address.\n"
"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
- "X-Generator: Lokalize 2.0\n"
+ "X-Generator: Poedit 2.2.1\n"
msgid "Jack server not found"
- msgstr "Poslužitelj Jack nije pronađen"
+ msgstr "Server Jack nije pronađen"
msgid "Failed to decode JPEG image"
msgstr "Nije uspjelo dekodirati JPEG sliku"
+ msgid ""
+ "Failed to configure LAME mp3 audio encoder. Check your encoding parameters."
+ msgstr ""
+ "Nije uspjelo konfigurirati audio koder MP3 LAME. Provjerite parametre "
+ "kodiranja."
+
+ #, c-format
+ msgid ""
+ "The requested bitrate %d kbit/s for property '%s' is not allowed. The "
+ "bitrate was changed to %d kbit/s."
+ msgstr ""
+ "Zatražena brzina %d kbit/s za svojstvo „%s“ nije dopuštena -- postavljena je "
+ "na brzinu od %d kbit/s."
+
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
- msgstr "â\80\98%sâ\80\99 (â\80\98%sâ\80\99)"
+ msgstr "â\80\9e%sâ\80\9c od â\80\9e%sâ\80\9c"
msgid "Could not connect to server"
- msgstr "Spajanje na poslužitelj nije moguće"
+ msgstr "Spajanje na server nije moguće"
msgid "No URL set."
- msgstr "URL adresa nije imenovana."
+ msgstr "Nema URL-adrese (nije postavljena)."
msgid "Could not resolve server name."
- msgstr "Nije moguće razriješiti ime poslužitelja."
+ msgstr "Nije moguće razriješiti ime servera."
msgid "Could not establish connection to server."
- msgstr "Nije moguće uspostaviti vezu s poslužiteljem."
+ msgstr "Nije moguće uspostaviti vezu sa serverom."
msgid "Secure connection setup failed."
- msgstr "Uspostavljanje sigurne veze nije uspjelo."
+ msgstr "Nije uspjelo uspostaviti sigurnu vezu."
msgid ""
"A network error occurred, or the server closed the connection unexpectedly."
- msgstr ""
- "Dogodila se mrežna greška, ili je poslužitelj neočekivano zatvorio vezu."
+ msgstr "Dogodila se greška na mreži ili je server neočekivano prekinuo vezu."
msgid "Server sent bad data."
- msgstr "Poslužitelj je poslao neispravne podatke."
+ msgstr "Server je poslao loše podatke."
msgid "Server does not support seeking."
- msgstr "Poslužitelj ne podržava traženje."
+ msgstr "Server ne podržava skok traženje na poziciju."
+
+ msgid "Failed to configure TwoLAME encoder. Check your encoding parameters."
+ msgstr ""
+ "Nije uspjelo konfigurirati koder TwoLAME. Provjerite parametre kodiranja."
msgid "No or invalid input audio, AVI stream will be corrupt."
- msgstr "Nema audio ulaza ili je neispravan, AVI stream će biti iskvaren."
+ msgstr "Nema audio ulaza ili nije valjan -- AVI protok bit će oštećen."
msgid "This file contains no playable streams."
- msgstr "Ova datoteka ne sadrži nijedan upotrebljivi stream (tok podataka)."
+ msgstr "Ova datoteka ne sadrži upotrebljive protoke."
msgid "This file is invalid and cannot be played."
- msgstr "Datoteka je neispravna i ne može se reproducirati."
+ msgstr "Datoteka nije valjana i ne može se reproducirati."
msgid "Cannot play stream because it is encrypted with PlayReady DRM."
- msgstr ""
- "Ovaj stream nije moguće reproducirati jer je kriptiran s PlayReady DRM."
+ msgstr "Ovaj protok nije moguće reproducirati jer je šifriran s PlayReady DRM."
msgid "This file is corrupt and cannot be played."
- msgstr "Datoteka je iskvarena i ne može se reproducirati."
+ msgstr "Datoteka je oštećena i ne može se reproducirati."
msgid "Invalid atom size."
- msgstr "Veličina atoma je neispravna."
+ msgstr "Veličina atoma nije valjana."
msgid "This file is incomplete and cannot be played."
msgstr "Datoteka je nepotpuna i ne može se reproducirati."
msgid "The video in this file might not play correctly."
- msgstr "Video iz ove datoteke se možda neće ispravno reproducirati."
-
- #, c-format
- msgid "This file contains too many streams. Only playing first %d"
- msgstr ""
- "U ovoj datoteci ima previše streamova (streams). Samo prvih %d će se "
- "reproducirati"
+ msgstr "Video iz ove datoteke se možda neće korektno reproducirati."
# https://gstreamer.freedesktop.org/documentation/rtp.html
msgid ""
"No supported stream was found. You might need to install a GStreamer RTSP "
"extension plugin for Real media streams."
msgstr ""
- "Nijedan podržani stream nije nađen. Možda ćete morati instalirati GStreamer "
- "RTSP dodatni plugin za Real medijske streamove."
+ "Nije nađen nijedan podržani protok. Vjerojatno trebate instalirati plugin "
+ "GStreamera za proširenje RTSP na Real multimedijske protoke."
msgid ""
"No supported stream was found. You might need to allow more transport "
"protocols or may otherwise be missing the right GStreamer RTSP extension "
"plugin."
msgstr ""
- "Nijedan podržani stream nije nađen. Možda ćete morati dopustiti više "
- "prijenosnih protokola ili možda vam nedostaje odgovarajući GStreamer RTSP "
- "dodatni plugin."
+ "Nije nađen nijedan podržani protok. Vjerojatno trebate omogućiti još neke "
+ "prijenosne protokole ili vam možda nedostaje odgovarajući plugin GStreamera "
+ "za proširenje RTSP-a."
msgid ""
"Could not open audio device for playback. Device is being used by another "
"application."
msgstr ""
- "Audiouređaj nije moguće otvoriti za reprodukciju. Uređaj trenutačno koristi "
- "neka druga aplikacija."
+ "Audiouređaj nije moguće otvoriti za reprodukciju jer ga koristi neka druga "
+ "aplikacija."
msgid ""
"Could not open audio device for playback. You don't have permission to open "
"the device."
msgstr ""
- "Audiouređaj nije moguće otvoriti za reprodukciju. Nemate dopuštenje za "
+ "Audiouređaj nije moguće otvoriti za reprodukciju jer nemate dopuštenje za "
"otvaranje uređaja."
msgid "Could not open audio device for playback."
"Could not open audio device for playback. This version of the Open Sound "
"System is not supported by this element."
msgstr ""
- "Audiouređaj nije moguće otvoriti za reprodukciju. Ovaj element ne podržava "
- "ovu inačicu Open Sound System."
+ "Audiouređaj nije moguće otvoriti za reprodukciju jer ovaj element ne "
+ "podržava ovu inačicu Open Sound System."
msgid "Playback is not supported by this audio device."
msgstr "Ovaj audiouređaj ne podržava reprodukciju."
msgstr "Ovaj audiouređaj ne podržava snimanje."
msgid "Error recording from audio device."
- msgstr "Greška snimanja s audiouređaja."
+ msgstr "Greška pri snimanju s audiouređaja."
msgid ""
"Could not open audio device for recording. You don't have permission to open "
"the device."
msgstr ""
- "Audiouređaj nije moguće otvoriti za snimanje. Nemate dopuštenje za otvaranje "
- "uređaja."
+ "Audiouređaj nije moguće otvoriti za snimanje jer nemate dopuštenje za "
+ "otvaranje uređaja."
msgid "Could not open audio device for recording."
msgstr "Audiouređaj nije moguće otvoriti za snimanje."
msgid "CoreAudio device could not be opened"
msgstr "CoreAudio uređaj nije moguće otvoriti"
- msgid "Record Source"
- msgstr "Izvor snimanja"
-
- msgid "Microphone"
- msgstr "Mikrofon"
-
- msgid "Line In"
- msgstr "Linijski ulaz"
-
- msgid "Internal CD"
- msgstr "Interni CD"
-
- msgid "SPDIF In"
- msgstr "SPDIF ulaz"
-
- msgid "AUX 1 In"
- msgstr "AUX 1 ulaz"
-
- msgid "AUX 2 In"
- msgstr "AUX 2 ulaz"
-
- msgid "Codec Loopback"
- msgstr "Kodek-povratna petlja"
-
- msgid "SunVTS Loopback"
- msgstr "SunVTS-povratna petlja"
-
- msgid "Volume"
- msgstr "Glasnoća"
-
- msgid "Gain"
- msgstr "Pojačanje"
-
- msgid "Monitor"
- msgstr "Nadzor"
-
- msgid "Built-in Speaker"
- msgstr "Ugrađeni zvučnik"
-
- msgid "Headphone"
- msgstr "Slušalica"
-
- msgid "Line Out"
- msgstr "Linijski izlaz"
-
- msgid "SPDIF Out"
- msgstr "SPDIF izlaz"
-
- msgid "AUX 1 Out"
- msgstr "AUX 1 izlaz"
-
- msgid "AUX 2 Out"
- msgstr "AUX 2 izlaz"
-
#, c-format
msgid "Error reading %d bytes from device '%s'."
- msgstr "Greška čitanja %d bajtova s uređaja ‘%s’."
+ msgstr "Greška čitanja %d bajtova iz uređaja „%s“."
#, c-format
msgid "Failed to enumerate possible video formats device '%s' can work with"
msgstr ""
- "Nije uspjelo nabrojati sve moguće video formate s kojima uređaj ‘%s’ može "
- "raditi"
+ "Nije uspjelo prikazati sve video formate s kojima može raditi uređaj „%s“"
#, c-format
msgid "Could not map buffers from device '%s'"
- msgstr "Nije moguće preslikati (map) međuspremnike iz uređaja ‘%s’"
+ msgstr "Nije moguće mapirati međuspremnike uređaja „%s“"
#, c-format
msgid "The driver of device '%s' does not support the IO method %d"
- msgstr "UpravljaÄ\8dki program ureÄ\91aja â\80\98%sâ\80\99 ne podržava IO metodu %d"
+ msgstr "UpravljaÄ\8dki program ureÄ\91aja â\80\9e%sâ\80\9c ne podržava metodu U/I %d"
#, c-format
msgid "The driver of device '%s' does not support any known IO method."
msgstr ""
- "Upravljački program uređaja ‘%s’ ne podržava nijednu poznatu IO metodu."
+ "Upravljački program uređaja „%s“ ne podržava nijednu poznatu metodu U/I."
+
+ #, c-format
+ msgid "Device '%s' has no supported format"
+ msgstr "Uređaj „%s“ nema podržani format"
+
+ #, c-format
+ msgid "Device '%s' failed during initialization"
+ msgstr "Nije uspjela inicijalizacija uređaja „%s“"
#, c-format
msgid "Device '%s' is busy"
- msgstr "UreÄ\91aj â\80\98%sâ\80\99 je zauzet"
+ msgstr "UreÄ\91aj â\80\9e%sâ\80\9c je zauzet"
#, c-format
msgid "Device '%s' cannot capture at %dx%d"
- msgstr "UreÄ\91aj â\80\98%sâ\80\99 ne može snimati s razluÄ\8divosti od %dx%d"
+ msgstr "UreÄ\91aj â\80\9e%sâ\80\9c ne može snimati u rezoluciji %dx%d"
#, c-format
msgid "Device '%s' cannot capture in the specified format"
- msgstr "UreÄ\91aj â\80\98%sâ\80\99 ne može snimati u specificiranom formatu."
+ msgstr "UreÄ\91aj â\80\9e%sâ\80\9c ne može snimati u specificiranom formatu"
#, c-format
msgid "Device '%s' does support non-contiguous planes"
- msgstr "Device ‘%s’ ne podržava prekinute ravnine"
+ msgstr "Device „%s“ podržava nepovezane plohe/ravnine (non-contiguous planes)"
+
+ #, c-format
+ msgid "Device '%s' does not support %s interlacing"
+ msgstr "Device „%s“ ne podržava preplitanje (interlacing) %s"
+
+ #, c-format
+ msgid "Device '%s' does not support %s colorimetry"
+ msgstr "Uređaj „%s“ ne podržava kolorimetriju %s"
#, c-format
msgid "Could not get parameters on device '%s'"
- msgstr "Parametre uređaja ‘%s’ nije moguće dobiti"
+ msgstr "Nije moguće dobiti parametre uređaja „%s“"
msgid "Video device did not accept new frame rate setting."
- msgstr "Videouređaj nije prihvatio novu frekvenciju slika (ili poluslika)."
+ msgstr "Videouređaj nije prihvatio novu postavku frekvencije okvira (slika)."
msgid "Video device did not provide output format."
- msgstr "Videouređaj nije predočio izlazni format."
+ msgstr "Videouređaj nije dao/odredio izlazni format."
msgid "Video device returned invalid dimensions."
- msgstr "Videouređaj je uzvratio s neispravnim dimenzijama."
+ msgstr "Videouređaj nije vratio valjane dimenzije."
msgid "Video device uses an unsupported interlacing method."
- msgstr "Videouređaj koristi nepodržanu metodu poluslika."
+ msgstr "Videouređaj koristi nepodržanu metodu preplitanja (interlacing)."
msgid "Video device uses an unsupported pixel format."
-msgstr "Videouređaj koristi nepodržani format piksela."
+msgstr "Videouređaj koristi format piksela koji nije podržan."
msgid "Failed to configure internal buffer pool."
- msgstr "Nije uspjelo konfigurirati internu zalihu međuspremnika."
+ msgstr "Nije uspjelo konfigurirati interne međuspremnike (buffer pool)."
msgid "Video device did not suggest any buffer size."
- msgstr "Videouređaj nije predložio nijednu veličinu međuspremnika."
+ msgstr "Videouređaj nije naveo/zatražio bilo kakvu veličinu međuspremnika."
msgid "No downstream pool to import from."
- msgstr "Nema se od nikuda uvesti ‘downstream’ zaliha."
+ msgstr "Ne postoji mjesto (downstream pool) iz kojeg se može uvoziti."
# tuner > štelanje frekvencije, mijenjanje (biranje) kanala
#, c-format
msgid "Failed to get settings of tuner %d on device '%s'."
- msgstr "Nije uspjelo dobiti postavke tunera %d na uređaj ‘%s’."
+ msgstr "Nije uspjelo dobiti postavke tunera %d na uređaju „%s“."
#, c-format
msgid "Error getting capabilities for device '%s'."
- msgstr "Greška pri dobivanju podataka o mogućnostima uređaja ‘%s’."
+ msgstr "Greška pri dobivanju sposobnosti uređaja „%s“."
#, c-format
msgid "Device '%s' is not a tuner."
- msgstr "UreÄ\91aj â\80\98%sâ\80\99 nije tuner."
+ msgstr "UreÄ\91aj â\80\9e%sâ\80\9c nije tuner."
#, c-format
msgid "Failed to get radio input on device '%s'. "
- msgstr "Nije uspjelo dobiti radijski ulaz na uređaju ‘%s’. "
+ msgstr "Nije uspjelo dobiti radiosignal na uređaju „%s“."
#, c-format
msgid "Failed to set input %d on device %s."
- msgstr "Nije uspjelo postaviti ulaz %d na uređaj %s."
+ msgstr "Nije uspjelo postaviti ulaz %d na uređaju %s."
#, c-format
msgid "Failed to change mute state for device '%s'."
- msgstr "Nije uspjelo promijeniti status mute (Zvûk (da/ne) za uređaj ‘%s’."
+ msgstr "Nije uspjelo promijeniti stanje „mute“ na uređaju „%s“."
msgid "Failed to allocated required memory."
- msgstr "Nije uspjelo izdvojiti potrebnu memoriju."
+ msgstr "Nije uspjelo dodijeliti potrebnu memoriju."
msgid "Failed to allocate required memory."
msgstr "Nije uspjelo dodijeliti potrebnu memoriju."
#, c-format
msgid "Converter on device %s has no supported input format"
- msgstr "Konverter na uređaju %s nema podržani ulazni format"
+ msgstr "Pretvarač na uređaju %s nema podržani ulazni format"
#, c-format
msgid "Converter on device %s has no supported output format"
- msgstr "Konverter na uređaju %s nema podržani izlazni format"
+ msgstr "Pretvarač na uređaju %s nema podržani izlazni format"
#, c-format
- msgid "Encoder on device %s has no supported input format"
- msgstr "Koder na uređaju %s nema podržani ulazni format"
+ msgid "Decoder on device %s has no supported input format"
+ msgstr "Dekoder na uređaju %s nema podržani ulazni format"
#, c-format
- msgid "Encoder on device %s has no supported output format"
- msgstr "Koder na uređaju %s nema podržani izlazni format"
+ msgid "Decoder on device %s has no supported output format"
+ msgstr "Dekoder na uređaju %s nema podržani izlazni format"
msgid "Failed to start decoding thread."
- msgstr "Nije uspjelo započeti dekodiranje niti (thread)."
+ msgstr "Nije uspjelo započeti dekodiranje dretve."
msgid "Failed to process frame."
- msgstr "Nije uspjelo obraditi sliku (polusliku)."
+ msgstr "Nije uspjelo obraditi okvir (sliku)."
+
+ #, c-format
+ msgid "Encoder on device %s has no supported output format"
+ msgstr "Koder na uređaju %s nema podržani izlazni format"
+
+ #, c-format
+ msgid "Encoder on device %s has no supported input format"
+ msgstr "Koder na uređaju %s nema podržani ulazni format"
+
+ msgid "Failed to start encoding thread."
+ msgstr "Nije uspjelo započeti kodiranje dretve."
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"it is a v4l1 driver."
msgstr ""
- "Greška pri dobivanja mogućnosti za uređaj ‘%s’: To nije v4l2 upravljački "
- "program. Provjerite da li je v4l1 upravljački program."
+ "Greška pri dobivanju Caps (sposobnosti) za uređaj „%s“: To nije v4l2 "
+ "upravljački program. Provjerite je li to v4l1 upravljački program."
#, c-format
msgid "Failed to query attributes of input %d in device %s"
- msgstr "Nije uspjelo ispitati svojstva ulaza %d uređaja %s"
+ msgstr "Nije uspjelo ispitati atribute ulaza %d uređaja %s"
#, c-format
msgid "Failed to get setting of tuner %d on device '%s'."
- msgstr "Nije uspjelo dobiti postavke tunera %d ureÄ\91aja â\80\98%sâ\80\99."
+ msgstr "Nije uspjelo dobiti postavke tunera %d ureÄ\91aja â\80\9e%sâ\80\9c."
#, c-format
msgid "Failed to query norm on device '%s'."
- msgstr "Nije uspjelo ispitati normu (standard) na uređaju ‘%s’."
+ msgstr "Nije uspjelo ispitati „norm“ na uređaju „%s“."
#, c-format
msgid "Failed getting controls attributes on device '%s'."
- msgstr "Nije uspjelo dobiti upravljačka svojstva uređaja ‘%s’."
+ msgstr "Nije uspjelo dobiti atribute kontrola uređaja „%s“."
#, c-format
msgid "Cannot identify device '%s'."
- msgstr "Ne može se identificirati uređaj ‘%s’."
+ msgstr "Nije moguće identificirati uređaj „%s“."
#, c-format
msgid "This isn't a device '%s'."
- msgstr "Ovo nije uređaj ‘%s’."
+ msgstr "To nije uređaj „%s“."
#, c-format
msgid "Could not open device '%s' for reading and writing."
- msgstr "Uređaj ‘%s’nije moguće otvoriti za čitanje i pisanje."
+ msgstr "Nije bilo moguće otvoriti uređaj „%s“ za čitanje i pisanje."
#, c-format
msgid "Device '%s' is not a capture device."
- msgstr "UreÄ\91aj â\80\98%sâ\80\99 nije uređaj za snimanje."
+ msgstr "UreÄ\91aj â\80\9e%sâ\80\9c nije uređaj za snimanje."
#, c-format
msgid "Device '%s' is not a output device."
- msgstr "UreÄ\91aj â\80\98%sâ\80\99 nije izlazni uređaj."
+ msgstr "UreÄ\91aj â\80\9e%sâ\80\9c nije izlazni uređaj."
#, c-format
msgid "Device '%s' is not a M2M device."
- msgstr "UreÄ\91aj â\80\98%sâ\80\99 nije M2M ureÄ\91aj."
+ msgstr "UreÄ\91aj â\80\9e%sâ\80\9c nije ureÄ\91aj M2M."
#, c-format
msgid "Could not dup device '%s' for reading and writing."
- msgstr "Uređaj ‘%s’nije moguće duplicirati za čitanje i pisanje."
+ msgstr "Nije uspjelo dup() uređaj „%s“ za čitanje i pisanje."
#, c-format
msgid "Failed to set norm for device '%s'."
- msgstr "Nije uspjelo postaviti normu (standard) za uređaj ‘%s’."
+ msgstr "Nije uspjelo postaviti „norm“ za uređaj „%s“."
#, c-format
msgid "Failed to get current tuner frequency for device '%s'."
- msgstr "Nije uspjelo dobiti trenutačnu frekvenciju tunera za uređaj ‘%s’."
+ msgstr "Nije uspjelo dobiti aktualnu frekvenciju tunera za uređaj „%s“."
#, c-format
msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
msgstr ""
- "Nije uspjelo postaviti trenutačnu frekvenciju tunera za uređaj ‘%s’ na %lu "
- "Hz."
+ "Nije uspjelo postaviti aktualnu frekvenciju tunera za uređaj „%s“ na %lu Hz."
#, c-format
msgid "Failed to get signal strength for device '%s'."
- msgstr "Nije uspjelo dobiti snagu signala za ureÄ\91aj â\80\98%sâ\80\99."
+ msgstr "Nije uspjelo dobiti snagu signala za ureÄ\91aj â\80\9e%sâ\80\9c."
#, c-format
msgid "Failed to get value for control %d on device '%s'."
- msgstr "Nije uspjelo dobiti vrijednost za kontrolu %d na ureÄ\91aju â\80\98%sâ\80\99."
+ msgstr "Nije uspjelo dobiti vrijednost za kontrolu %d na ureÄ\91aju â\80\9e%sâ\80\9c."
#, c-format
msgid "Failed to set value %d for control %d on device '%s'."
- msgstr "Nije uspjelo postaviti vrijednost %d za kontrolu %d na uređaju ‘%s’."
+ msgstr "Nije uspjelo postaviti na vrijednost %d kontrolu %d na uređaju „%s“."
#, c-format
msgid "Failed to get current input on device '%s'. May be it is a radio device"
msgstr ""
- "Nije uspjelo dobiti trenutačni ulaz na uređaju ‘%s’. Možda je to radiouređaj"
+ "Nije uspjelo dobiti aktualni ulaz na uređaju „%s“ -- možda je to radiouređaj."
#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
- "Nije uspjelo dobiti trenutačni izlaz na uređaju ‘%s’. Možda je to radiouređaj"
+ "Nije uspjelo dobiti aktualni izlaz na uređaju „%s“ -- možda je to "
+ "radiouređaj."
#, c-format
msgid "Failed to set output %d on device %s."
- msgstr "Nije uspjelo postaviti izlaz %d na uređaj %s."
+ msgstr "Nije uspjelo postaviti izlaz %d na uređaju %s."
msgid "Changing resolution at runtime is not yet supported."
msgstr "Promjena rezolucije u tijeku rada (runtime) još nije podržana."
msgid "Cannot operate without a clock"
- msgstr "Ne mogu raditi bez takta (sata)"
+ msgstr "Nije moguće raditi bez sata (clock)"
+
+ #~ msgid "This file contains too many streams. Only playing first %d"
+ #~ msgstr ""
+ #~ "U ovoj datoteci ima previše struja. Samo prvih %d će se reproducirati"
+
+ #~ msgid "Record Source"
+ #~ msgstr "Izvor snimanja"
+
+ #~ msgid "Microphone"
+ #~ msgstr "Mikrofon"
+
+ #~ msgid "Line In"
+ #~ msgstr "Linijski ulaz"
+
+ #~ msgid "Internal CD"
+ #~ msgstr "Interni CD"
+
+ #~ msgid "SPDIF In"
+ #~ msgstr "SPDIF ulaz"
+
+ #~ msgid "AUX 1 In"
+ #~ msgstr "AUX 1 ulaz"
+
+ #~ msgid "AUX 2 In"
+ #~ msgstr "AUX 2 ulaz"
+
+ #~ msgid "Codec Loopback"
+ #~ msgstr "Kodekova povratna petlja"
+
+ #~ msgid "SunVTS Loopback"
+ #~ msgstr "SunVTS povratna petlja"
+
+ #~ msgid "Volume"
+ #~ msgstr "Glasnoća"
+
+ #~ msgid "Gain"
+ #~ msgstr "Pojačanje"
+
+ #~ msgid "Monitor"
+ #~ msgstr "Monitor"
+
+ #~ msgid "Built-in Speaker"
+ #~ msgstr "Ugrađeni zvučnik"
+
+ #~ msgid "Headphone"
+ #~ msgstr "Slušalica"
+
+ #~ msgid "Line Out"
+ #~ msgstr "Linijski izlaz"
+
+ #~ msgid "SPDIF Out"
+ #~ msgstr "SPDIF izlaz"
+
+ #~ msgid "AUX 1 Out"
+ #~ msgstr "AUX 1 izlaz"
+
+ #~ msgid "AUX 2 Out"
+ #~ msgstr "AUX 2 izlaz"
#~ msgid "Internal data stream error."
#~ msgstr "Interna greška toka (stream) podataka."
+ /* SPDX-License-Identifier: ((GPL-2.0+ WITH Linux-syscall-note) OR BSD-3-Clause) */
/*
* Video for Linux Two header file
*
* All kernel-specific stuff were moved to media/v4l2-dev.h, so
* no #if __KERNEL tests are allowed here
*
- * See http://linuxtv.org for more info
+ * See https://linuxtv.org for more info
*
* Author: Bill Dirks <bill@thedirks.org>
* Justin Schoeman
transmitted first */
};
#define V4L2_FIELD_HAS_TOP(field) \
- ((field) == V4L2_FIELD_TOP ||\
+ ((field) == V4L2_FIELD_TOP ||\
(field) == V4L2_FIELD_INTERLACED ||\
(field) == V4L2_FIELD_INTERLACED_TB ||\
(field) == V4L2_FIELD_INTERLACED_BT ||\
(field) == V4L2_FIELD_SEQ_TB ||\
(field) == V4L2_FIELD_SEQ_BT)
#define V4L2_FIELD_HAS_BOTTOM(field) \
- ((field) == V4L2_FIELD_BOTTOM ||\
+ ((field) == V4L2_FIELD_BOTTOM ||\
(field) == V4L2_FIELD_INTERLACED ||\
(field) == V4L2_FIELD_INTERLACED_TB ||\
(field) == V4L2_FIELD_INTERLACED_BT ||\
((field) == V4L2_FIELD_BOTTOM ||\
(field) == V4L2_FIELD_TOP ||\
(field) == V4L2_FIELD_ALTERNATE)
+ #define V4L2_FIELD_IS_INTERLACED(field) \
+ ((field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT)
+ #define V4L2_FIELD_IS_SEQUENTIAL(field) \
+ ((field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
enum v4l2_buf_type {
V4L2_BUF_TYPE_VIDEO_CAPTURE = 1,
V4L2_BUF_TYPE_VBI_OUTPUT = 5,
V4L2_BUF_TYPE_SLICED_VBI_CAPTURE = 6,
V4L2_BUF_TYPE_SLICED_VBI_OUTPUT = 7,
- #if 1
- /* Experimental */
V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY = 8,
- #endif
V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE = 9,
V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE = 10,
V4L2_BUF_TYPE_SDR_CAPTURE = 11,
+ V4L2_BUF_TYPE_SDR_OUTPUT = 12,
+ V4L2_BUF_TYPE_META_CAPTURE = 13,
+ V4L2_BUF_TYPE_META_OUTPUT = 14,
/* Deprecated, do not use */
V4L2_BUF_TYPE_PRIVATE = 0x80,
};
|| (type) == V4L2_BUF_TYPE_VIDEO_OVERLAY \
|| (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY \
|| (type) == V4L2_BUF_TYPE_VBI_OUTPUT \
- || (type) == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT)
+ || (type) == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT \
+ || (type) == V4L2_BUF_TYPE_SDR_OUTPUT \
+ || (type) == V4L2_BUF_TYPE_META_OUTPUT)
enum v4l2_tuner_type {
V4L2_TUNER_RADIO = 1,
V4L2_TUNER_ANALOG_TV = 2,
V4L2_TUNER_DIGITAL_TV = 3,
- V4L2_TUNER_ADC = 4,
+ V4L2_TUNER_SDR = 4,
V4L2_TUNER_RF = 5,
};
+ /* Deprecated, do not use */
+ #define V4L2_TUNER_ADC V4L2_TUNER_SDR
+
enum v4l2_memory {
V4L2_MEMORY_MMAP = 1,
V4L2_MEMORY_USERPTR = 2,
/* For RGB colorspaces such as produces by most webcams. */
V4L2_COLORSPACE_SRGB = 8,
- /* AdobeRGB colorspace */
- V4L2_COLORSPACE_ADOBERGB = 9,
+ /* opRGB colorspace */
+ V4L2_COLORSPACE_OPRGB = 9,
/* BT.2020 colorspace, used for UHDTV. */
V4L2_COLORSPACE_BT2020 = 10,
/* Raw colorspace: for RAW unprocessed images */
V4L2_COLORSPACE_RAW = 11,
+
+ /* DCI-P3 colorspace, used by cinema projectors */
+ V4L2_COLORSPACE_DCI_P3 = 12,
};
/*
*
* V4L2_COLORSPACE_SRGB, V4L2_COLORSPACE_JPEG: V4L2_XFER_FUNC_SRGB
*
- * V4L2_COLORSPACE_ADOBERGB: V4L2_XFER_FUNC_ADOBERGB
+ * V4L2_COLORSPACE_OPRGB: V4L2_XFER_FUNC_OPRGB
*
* V4L2_COLORSPACE_SMPTE240M: V4L2_XFER_FUNC_SMPTE240M
*
* V4L2_COLORSPACE_RAW: V4L2_XFER_FUNC_NONE
+ *
+ * V4L2_COLORSPACE_DCI_P3: V4L2_XFER_FUNC_DCI_P3
*/
V4L2_XFER_FUNC_DEFAULT = 0,
V4L2_XFER_FUNC_709 = 1,
V4L2_XFER_FUNC_SRGB = 2,
- V4L2_XFER_FUNC_ADOBERGB = 3,
+ V4L2_XFER_FUNC_OPRGB = 3,
V4L2_XFER_FUNC_SMPTE240M = 4,
V4L2_XFER_FUNC_NONE = 5,
+ V4L2_XFER_FUNC_DCI_P3 = 6,
+ V4L2_XFER_FUNC_SMPTE2084 = 7,
};
/*
* This depends on the colorspace.
*/
#define V4L2_MAP_XFER_FUNC_DEFAULT(colsp) \
- ((colsp) == V4L2_XFER_FUNC_ADOBERGB ? V4L2_XFER_FUNC_ADOBERGB : \
+ ((colsp) == V4L2_COLORSPACE_OPRGB ? V4L2_XFER_FUNC_OPRGB : \
((colsp) == V4L2_COLORSPACE_SMPTE240M ? V4L2_XFER_FUNC_SMPTE240M : \
- ((colsp) == V4L2_COLORSPACE_RAW ? V4L2_XFER_FUNC_NONE : \
- ((colsp) == V4L2_COLORSPACE_SRGB || (colsp) == V4L2_COLORSPACE_JPEG ? \
- V4L2_XFER_FUNC_SRGB : V4L2_XFER_FUNC_709))))
+ ((colsp) == V4L2_COLORSPACE_DCI_P3 ? V4L2_XFER_FUNC_DCI_P3 : \
+ ((colsp) == V4L2_COLORSPACE_RAW ? V4L2_XFER_FUNC_NONE : \
+ ((colsp) == V4L2_COLORSPACE_SRGB || (colsp) == V4L2_COLORSPACE_JPEG ? \
+ V4L2_XFER_FUNC_SRGB : V4L2_XFER_FUNC_709)))))
enum v4l2_ycbcr_encoding {
/*
* various colorspaces:
*
* V4L2_COLORSPACE_SMPTE170M, V4L2_COLORSPACE_470_SYSTEM_M,
- * V4L2_COLORSPACE_470_SYSTEM_BG, V4L2_COLORSPACE_ADOBERGB and
- * V4L2_COLORSPACE_JPEG: V4L2_YCBCR_ENC_601
- *
- * V4L2_COLORSPACE_REC709: V4L2_YCBCR_ENC_709
+ * V4L2_COLORSPACE_470_SYSTEM_BG, V4L2_COLORSPACE_SRGB,
+ * V4L2_COLORSPACE_OPRGB and V4L2_COLORSPACE_JPEG: V4L2_YCBCR_ENC_601
*
- * V4L2_COLORSPACE_SRGB: V4L2_YCBCR_ENC_SYCC
+ * V4L2_COLORSPACE_REC709 and V4L2_COLORSPACE_DCI_P3: V4L2_YCBCR_ENC_709
*
* V4L2_COLORSPACE_BT2020: V4L2_YCBCR_ENC_BT2020
*
/* Rec. 709/EN 61966-2-4 Extended Gamut -- HDTV */
V4L2_YCBCR_ENC_XV709 = 4,
- /* sYCC (Y'CbCr encoding of sRGB) */
+ /*
+ * sYCC (Y'CbCr encoding of sRGB), identical to ENC_601. It was added
+ * originally due to a misunderstanding of the sYCC standard. It should
+ * not be used, instead use V4L2_YCBCR_ENC_601.
+ */
V4L2_YCBCR_ENC_SYCC = 5,
/* BT.2020 Non-constant Luminance Y'CbCr */
};
/*
+ * enum v4l2_hsv_encoding values should not collide with the ones from
+ * enum v4l2_ycbcr_encoding.
+ */
+ enum v4l2_hsv_encoding {
+
+ /* Hue mapped to 0 - 179 */
+ V4L2_HSV_ENC_180 = 128,
+
+ /* Hue mapped to 0-255 */
+ V4L2_HSV_ENC_256 = 129,
+ };
+
+ /*
* Determine how YCBCR_ENC_DEFAULT should map to a proper Y'CbCr encoding.
* This depends on the colorspace.
*/
#define V4L2_MAP_YCBCR_ENC_DEFAULT(colsp) \
- ((colsp) == V4L2_COLORSPACE_REC709 ? V4L2_YCBCR_ENC_709 : \
+ (((colsp) == V4L2_COLORSPACE_REC709 || \
+ (colsp) == V4L2_COLORSPACE_DCI_P3) ? V4L2_YCBCR_ENC_709 : \
((colsp) == V4L2_COLORSPACE_BT2020 ? V4L2_YCBCR_ENC_BT2020 : \
((colsp) == V4L2_COLORSPACE_SMPTE240M ? V4L2_YCBCR_ENC_SMPTE240M : \
V4L2_YCBCR_ENC_601)))
/*
* The default for R'G'B' quantization is always full range, except
* for the BT2020 colorspace. For Y'CbCr the quantization is always
- * limited range, except for COLORSPACE_JPEG, SYCC, XV601 or XV709:
- * those are full range.
+ * limited range, except for COLORSPACE_JPEG: this is full range.
*/
V4L2_QUANTIZATION_DEFAULT = 0,
V4L2_QUANTIZATION_FULL_RANGE = 1,
* This depends on whether the image is RGB or not, the colorspace and the
* Y'CbCr encoding.
*/
- #define V4L2_MAP_QUANTIZATION_DEFAULT(is_rgb, colsp, ycbcr_enc) \
- (((is_rgb) && (colsp) == V4L2_COLORSPACE_BT2020) ? V4L2_QUANTIZATION_LIM_RANGE : \
- (((is_rgb) || (ycbcr_enc) == V4L2_YCBCR_ENC_XV601 || \
- (ycbcr_enc) == V4L2_YCBCR_ENC_XV709 || (colsp) == V4L2_COLORSPACE_JPEG) ? \
+ #define V4L2_MAP_QUANTIZATION_DEFAULT(is_rgb_or_hsv, colsp, ycbcr_enc) \
+ (((is_rgb_or_hsv) && (colsp) == V4L2_COLORSPACE_BT2020) ? \
+ V4L2_QUANTIZATION_LIM_RANGE : \
+ (((is_rgb_or_hsv) || (colsp) == V4L2_COLORSPACE_JPEG) ? \
V4L2_QUANTIZATION_FULL_RANGE : V4L2_QUANTIZATION_LIM_RANGE))
+ /*
+ * Deprecated names for opRGB colorspace (IEC 61966-2-5)
+ *
+ * WARNING: Please don't use these deprecated defines in your code, as
+ * there is a chance we have to remove them in the future.
+ */
+ #define V4L2_COLORSPACE_ADOBERGB V4L2_COLORSPACE_OPRGB
+ #define V4L2_XFER_FUNC_ADOBERGB V4L2_XFER_FUNC_OPRGB
+
enum v4l2_priority {
V4L2_PRIORITY_UNSET = 0, /* not initialized */
V4L2_PRIORITY_BACKGROUND = 1,
#define V4L2_CAP_SDR_CAPTURE 0x00100000 /* Is a SDR capture device */
#define V4L2_CAP_EXT_PIX_FORMAT 0x00200000 /* Supports the extended pixel format */
+ #define V4L2_CAP_SDR_OUTPUT 0x00400000 /* Is a SDR output device */
+ #define V4L2_CAP_META_CAPTURE 0x00800000 /* Is a metadata capture device */
#define V4L2_CAP_READWRITE 0x01000000 /* read/write systemcalls */
#define V4L2_CAP_ASYNCIO 0x02000000 /* async I/O */
#define V4L2_CAP_STREAMING 0x04000000 /* streaming I/O ioctls */
+ #define V4L2_CAP_META_OUTPUT 0x08000000 /* Is a metadata output device */
+
+ #define V4L2_CAP_TOUCH 0x10000000 /* Is a touch device */
#define V4L2_CAP_DEVICE_CAPS 0x80000000 /* sets device capabilities field */
* V I D E O I M A G E F O R M A T
*/
struct v4l2_pix_format {
- __u32 width;
+ __u32 width;
__u32 height;
__u32 pixelformat;
__u32 field; /* enum v4l2_field */
- __u32 bytesperline; /* for padding, zero if unused */
- __u32 sizeimage;
+ __u32 bytesperline; /* for padding, zero if unused */
+ __u32 sizeimage;
__u32 colorspace; /* enum v4l2_colorspace */
__u32 priv; /* private data, depends on pixelformat */
__u32 flags; /* format flags (V4L2_PIX_FMT_FLAG_*) */
- __u32 ycbcr_enc; /* enum v4l2_ycbcr_encoding */
+ union {
+ /* enum v4l2_ycbcr_encoding */
+ __u32 ycbcr_enc;
+ /* enum v4l2_hsv_encoding */
+ __u32 hsv_enc;
+ };
__u32 quantization; /* enum v4l2_quantization */
__u32 xfer_func; /* enum v4l2_xfer_func */
};
/* Grey bit-packed formats */
#define V4L2_PIX_FMT_Y10BPACK v4l2_fourcc('Y', '1', '0', 'B') /* 10 Greyscale bit-packed */
+ #define V4L2_PIX_FMT_Y10P v4l2_fourcc('Y', '1', '0', 'P') /* 10 Greyscale, MIPI RAW10 packed */
/* Palette formats */
#define V4L2_PIX_FMT_PAL8 v4l2_fourcc('P', 'A', 'L', '8') /* 8 8-bit palette */
#define V4L2_PIX_FMT_UV8 v4l2_fourcc('U', 'V', '8', ' ') /* 8 UV 4:4 */
/* Luminance+Chrominance formats */
- #define V4L2_PIX_FMT_YVU410 v4l2_fourcc('Y', 'V', 'U', '9') /* 9 YVU 4:1:0 */
- #define V4L2_PIX_FMT_YVU420 v4l2_fourcc('Y', 'V', '1', '2') /* 12 YVU 4:2:0 */
#define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') /* 16 YUV 4:2:2 */
#define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') /* 16 YUV 4:2:2 */
#define V4L2_PIX_FMT_YVYU v4l2_fourcc('Y', 'V', 'Y', 'U') /* 16 YVU 4:2:2 */
#define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') /* 16 YUV 4:2:2 */
#define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') /* 16 YUV 4:2:2 */
- #define V4L2_PIX_FMT_YUV422P v4l2_fourcc('4', '2', '2', 'P') /* 16 YVU422 planar */
- #define V4L2_PIX_FMT_YUV411P v4l2_fourcc('4', '1', '1', 'P') /* 16 YVU411 planar */
#define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') /* 12 YUV 4:1:1 */
#define V4L2_PIX_FMT_YUV444 v4l2_fourcc('Y', '4', '4', '4') /* 16 xxxxyyyy uuuuvvvv */
#define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') /* 16 YUV-5-5-5 */
#define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') /* 16 YUV-5-6-5 */
#define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') /* 32 YUV-8-8-8-8 */
- #define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') /* 9 YUV 4:1:0 */
- #define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') /* 12 YUV 4:2:0 */
+ #define V4L2_PIX_FMT_AYUV32 v4l2_fourcc('A', 'Y', 'U', 'V') /* 32 AYUV-8-8-8-8 */
+ #define V4L2_PIX_FMT_XYUV32 v4l2_fourcc('X', 'Y', 'U', 'V') /* 32 XYUV-8-8-8-8 */
+ #define V4L2_PIX_FMT_VUYA32 v4l2_fourcc('V', 'U', 'Y', 'A') /* 32 VUYA-8-8-8-8 */
+ #define V4L2_PIX_FMT_VUYX32 v4l2_fourcc('V', 'U', 'Y', 'X') /* 32 VUYX-8-8-8-8 */
#define V4L2_PIX_FMT_HI240 v4l2_fourcc('H', 'I', '2', '4') /* 8 8-bit color */
#define V4L2_PIX_FMT_HM12 v4l2_fourcc('H', 'M', '1', '2') /* 8 YUV 4:2:0 16x16 macroblocks */
#define V4L2_PIX_FMT_M420 v4l2_fourcc('M', '4', '2', '0') /* 12 YUV 4:2:0 2 lines y, 1 line uv interleaved */
#define V4L2_PIX_FMT_NV12MT v4l2_fourcc('T', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 64x32 macroblocks */
#define V4L2_PIX_FMT_NV12MT_16X16 v4l2_fourcc('V', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 16x16 macroblocks */
+ /* three planes - Y Cb, Cr */
+ #define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') /* 9 YUV 4:1:0 */
+ #define V4L2_PIX_FMT_YVU410 v4l2_fourcc('Y', 'V', 'U', '9') /* 9 YVU 4:1:0 */
+ #define V4L2_PIX_FMT_YUV411P v4l2_fourcc('4', '1', '1', 'P') /* 12 YVU411 planar */
+ #define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') /* 12 YUV 4:2:0 */
+ #define V4L2_PIX_FMT_YVU420 v4l2_fourcc('Y', 'V', '1', '2') /* 12 YVU 4:2:0 */
+ #define V4L2_PIX_FMT_YUV422P v4l2_fourcc('4', '2', '2', 'P') /* 16 YVU422 planar */
+
/* three non contiguous planes - Y, Cb, Cr */
#define V4L2_PIX_FMT_YUV420M v4l2_fourcc('Y', 'M', '1', '2') /* 12 YUV420 planar */
#define V4L2_PIX_FMT_YVU420M v4l2_fourcc('Y', 'M', '2', '1') /* 12 YVU420 planar */
+ #define V4L2_PIX_FMT_YUV422M v4l2_fourcc('Y', 'M', '1', '6') /* 16 YUV422 planar */
+ #define V4L2_PIX_FMT_YVU422M v4l2_fourcc('Y', 'M', '6', '1') /* 16 YVU422 planar */
+ #define V4L2_PIX_FMT_YUV444M v4l2_fourcc('Y', 'M', '2', '4') /* 24 YUV444 planar */
+ #define V4L2_PIX_FMT_YVU444M v4l2_fourcc('Y', 'M', '4', '2') /* 24 YVU444 planar */
/* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
#define V4L2_PIX_FMT_SBGGR8 v4l2_fourcc('B', 'A', '8', '1') /* 8 BGBG.. GRGR.. */
#define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2') /* 12 GBGB.. RGRG.. */
#define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2') /* 12 GRGR.. BGBG.. */
#define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2') /* 12 RGRG.. GBGB.. */
+ /* 12bit raw bayer packed, 6 bytes for every 4 pixels */
+ #define V4L2_PIX_FMT_SBGGR12P v4l2_fourcc('p', 'B', 'C', 'C')
+ #define V4L2_PIX_FMT_SGBRG12P v4l2_fourcc('p', 'G', 'C', 'C')
+ #define V4L2_PIX_FMT_SGRBG12P v4l2_fourcc('p', 'g', 'C', 'C')
+ #define V4L2_PIX_FMT_SRGGB12P v4l2_fourcc('p', 'R', 'C', 'C')
+ /* 14bit raw bayer packed, 7 bytes for every 4 pixels */
+ #define V4L2_PIX_FMT_SBGGR14P v4l2_fourcc('p', 'B', 'E', 'E')
+ #define V4L2_PIX_FMT_SGBRG14P v4l2_fourcc('p', 'G', 'E', 'E')
+ #define V4L2_PIX_FMT_SGRBG14P v4l2_fourcc('p', 'g', 'E', 'E')
+ #define V4L2_PIX_FMT_SRGGB14P v4l2_fourcc('p', 'R', 'E', 'E')
#define V4L2_PIX_FMT_SBGGR16 v4l2_fourcc('B', 'Y', 'R', '2') /* 16 BGBG.. GRGR.. */
+ #define V4L2_PIX_FMT_SGBRG16 v4l2_fourcc('G', 'B', '1', '6') /* 16 GBGB.. RGRG.. */
+ #define V4L2_PIX_FMT_SGRBG16 v4l2_fourcc('G', 'R', '1', '6') /* 16 GRGR.. BGBG.. */
+ #define V4L2_PIX_FMT_SRGGB16 v4l2_fourcc('R', 'G', '1', '6') /* 16 RGRG.. GBGB.. */
+
+ /* HSV formats */
+ #define V4L2_PIX_FMT_HSV24 v4l2_fourcc('H', 'S', 'V', '3')
+ #define V4L2_PIX_FMT_HSV32 v4l2_fourcc('H', 'S', 'V', '4')
/* compressed formats */
#define V4L2_PIX_FMT_MJPEG v4l2_fourcc('M', 'J', 'P', 'G') /* Motion-JPEG */
#define V4L2_PIX_FMT_H263 v4l2_fourcc('H', '2', '6', '3') /* H263 */
#define V4L2_PIX_FMT_MPEG1 v4l2_fourcc('M', 'P', 'G', '1') /* MPEG-1 ES */
#define V4L2_PIX_FMT_MPEG2 v4l2_fourcc('M', 'P', 'G', '2') /* MPEG-2 ES */
+ #define V4L2_PIX_FMT_MPEG2_SLICE v4l2_fourcc('M', 'G', '2', 'S') /* MPEG-2 parsed slice data */
#define V4L2_PIX_FMT_MPEG4 v4l2_fourcc('M', 'P', 'G', '4') /* MPEG-4 part 2 ES */
#define V4L2_PIX_FMT_XVID v4l2_fourcc('X', 'V', 'I', 'D') /* Xvid */
#define V4L2_PIX_FMT_VC1_ANNEX_G v4l2_fourcc('V', 'C', '1', 'G') /* SMPTE 421M Annex G compliant stream */
#define V4L2_PIX_FMT_VC1_ANNEX_L v4l2_fourcc('V', 'C', '1', 'L') /* SMPTE 421M Annex L compliant stream */
#define V4L2_PIX_FMT_VP8 v4l2_fourcc('V', 'P', '8', '0') /* VP8 */
+ #define V4L2_PIX_FMT_VP9 v4l2_fourcc('V', 'P', '9', '0') /* VP9 */
+ #define V4L2_PIX_FMT_HEVC v4l2_fourcc('H', 'E', 'V', 'C') /* HEVC aka H.265 */
+ #define V4L2_PIX_FMT_FWHT v4l2_fourcc('F', 'W', 'H', 'T') /* Fast Walsh Hadamard Transform (vicodec) */
/* Vendor-specific formats */
#define V4L2_PIX_FMT_CPIA1 v4l2_fourcc('C', 'P', 'I', 'A') /* cpia1 YUV */
#define V4L2_PIX_FMT_JPGL v4l2_fourcc('J', 'P', 'G', 'L') /* JPEG-Lite */
#define V4L2_PIX_FMT_SE401 v4l2_fourcc('S', '4', '0', '1') /* se401 janggu compressed rgb */
#define V4L2_PIX_FMT_S5C_UYVY_JPG v4l2_fourcc('S', '5', 'C', 'I') /* S5C73M3 interleaved UYVY/JPEG */
+ #define V4L2_PIX_FMT_Y8I v4l2_fourcc('Y', '8', 'I', ' ') /* Greyscale 8-bit L/R interleaved */
+ #define V4L2_PIX_FMT_Y12I v4l2_fourcc('Y', '1', '2', 'I') /* Greyscale 12-bit L/R interleaved */
+ #define V4L2_PIX_FMT_Z16 v4l2_fourcc('Z', '1', '6', ' ') /* Depth data 16-bit */
+ #define V4L2_PIX_FMT_MT21C v4l2_fourcc('M', 'T', '2', '1') /* Mediatek compressed block mode */
+ #define V4L2_PIX_FMT_INZI v4l2_fourcc('I', 'N', 'Z', 'I') /* Intel Planar Greyscale 10-bit and Depth 16-bit */
+ #define V4L2_PIX_FMT_SUNXI_TILED_NV12 v4l2_fourcc('S', 'T', '1', '2') /* Sunxi Tiled NV12 Format */
+ #define V4L2_PIX_FMT_CNF4 v4l2_fourcc('C', 'N', 'F', '4') /* Intel 4-bit packed depth confidence information */
+
+ /* 10bit raw bayer packed, 32 bytes for every 25 pixels, last LSB 6 bits unused */
+ #define V4L2_PIX_FMT_IPU3_SBGGR10 v4l2_fourcc('i', 'p', '3', 'b') /* IPU3 packed 10-bit BGGR bayer */
+ #define V4L2_PIX_FMT_IPU3_SGBRG10 v4l2_fourcc('i', 'p', '3', 'g') /* IPU3 packed 10-bit GBRG bayer */
+ #define V4L2_PIX_FMT_IPU3_SGRBG10 v4l2_fourcc('i', 'p', '3', 'G') /* IPU3 packed 10-bit GRBG bayer */
+ #define V4L2_PIX_FMT_IPU3_SRGGB10 v4l2_fourcc('i', 'p', '3', 'r') /* IPU3 packed 10-bit RGGB bayer */
/* SDR formats - used only for Software Defined Radio devices */
#define V4L2_SDR_FMT_CU8 v4l2_fourcc('C', 'U', '0', '8') /* IQ u8 */
#define V4L2_SDR_FMT_CS8 v4l2_fourcc('C', 'S', '0', '8') /* complex s8 */
#define V4L2_SDR_FMT_CS14LE v4l2_fourcc('C', 'S', '1', '4') /* complex s14le */
#define V4L2_SDR_FMT_RU12LE v4l2_fourcc('R', 'U', '1', '2') /* real u12le */
+ #define V4L2_SDR_FMT_PCU16BE v4l2_fourcc('P', 'C', '1', '6') /* planar complex u16be */
+ #define V4L2_SDR_FMT_PCU18BE v4l2_fourcc('P', 'C', '1', '8') /* planar complex u18be */
+ #define V4L2_SDR_FMT_PCU20BE v4l2_fourcc('P', 'C', '2', '0') /* planar complex u20be */
+
+ /* Touch formats - used for Touch devices */
+ #define V4L2_TCH_FMT_DELTA_TD16 v4l2_fourcc('T', 'D', '1', '6') /* 16-bit signed deltas */
+ #define V4L2_TCH_FMT_DELTA_TD08 v4l2_fourcc('T', 'D', '0', '8') /* 8-bit signed deltas */
+ #define V4L2_TCH_FMT_TU16 v4l2_fourcc('T', 'U', '1', '6') /* 16-bit unsigned touch data */
+ #define V4L2_TCH_FMT_TU08 v4l2_fourcc('T', 'U', '0', '8') /* 8-bit unsigned touch data */
+
+ /* Meta-data formats */
+ #define V4L2_META_FMT_VSP1_HGO v4l2_fourcc('V', 'S', 'P', 'H') /* R-Car VSP1 1-D Histogram */
+ #define V4L2_META_FMT_VSP1_HGT v4l2_fourcc('V', 'S', 'P', 'T') /* R-Car VSP1 2-D Histogram */
+ #define V4L2_META_FMT_UVC v4l2_fourcc('U', 'V', 'C', 'H') /* UVC Payload Header metadata */
+ #define V4L2_META_FMT_D4XX v4l2_fourcc('D', '4', 'X', 'X') /* D4XX Payload Header metadata */
+#define V4L2_PIX_FMT_INVZ v4l2_fourcc('I', 'N', 'V', 'Z') /* Intel Planar Depth 16-bit */
+
/* priv field value to indicates that subsequent fields are valid. */
#define V4L2_PIX_FMT_PRIV_MAGIC 0xfeedcafe
#define V4L2_FMT_FLAG_COMPRESSED 0x0001
#define V4L2_FMT_FLAG_EMULATED 0x0002
- #if 1
- /* Experimental Frame Size and frame rate enumeration */
+ /* Frame Size and frame rate enumeration */
/*
* F R A M E S I Z E E N U M E R A T I O N
*/
__u32 reserved[2]; /* Reserved space for future use */
};
- #endif
/*
* T I M E C O D E
__u32 count;
__u32 type; /* enum v4l2_buf_type */
__u32 memory; /* enum v4l2_memory */
- __u32 reserved[2];
+ __u32 capabilities;
+ __u32 reserved[1];
};
+ /* capabilities for struct v4l2_requestbuffers and v4l2_create_buffers */
+ #define V4L2_BUF_CAP_SUPPORTS_MMAP (1 << 0)
+ #define V4L2_BUF_CAP_SUPPORTS_USERPTR (1 << 1)
+ #define V4L2_BUF_CAP_SUPPORTS_DMABUF (1 << 2)
+ #define V4L2_BUF_CAP_SUPPORTS_REQUESTS (1 << 3)
+ #define V4L2_BUF_CAP_SUPPORTS_ORPHANED_BUFS (1 << 4)
+
/**
* struct v4l2_plane - plane info for multi-planar buffers
* @bytesused: number of bytes occupied by data in the plane (payload)
* @length: size in bytes of the buffer (NOT its payload) for single-plane
* buffers (when type != *_MPLANE); number of elements in the
* planes array for multi-plane buffers
+ * @request_fd: fd of the request that this buffer should use
*
* Contains data exchanged by application and driver using one of the Streaming
* I/O methods.
} m;
__u32 length;
__u32 reserved2;
- __u32 reserved;
+ union {
+ __s32 request_fd;
+ __u32 reserved;
+ };
};
+ /**
+ * v4l2_timeval_to_ns - Convert timeval to nanoseconds
+ * @ts: pointer to the timeval variable to be converted
+ *
+ * Returns the scalar nanosecond representation of the timeval
+ * parameter.
+ */
+ static __inline__ __u64 v4l2_timeval_to_ns(const struct timeval *tv)
+ {
+ return (__u64)tv->tv_sec * 1000000000ULL + tv->tv_usec * 1000;
+ }
+
/* Flags for 'flags' field */
/* Buffer is mapped (flag) */
#define V4L2_BUF_FLAG_MAPPED 0x00000001
#define V4L2_BUF_FLAG_BFRAME 0x00000020
/* Buffer is ready, but the data contained within is corrupted. */
#define V4L2_BUF_FLAG_ERROR 0x00000040
+ /* Buffer is added to an unqueued request */
+ #define V4L2_BUF_FLAG_IN_REQUEST 0x00000080
/* timecode field is valid */
#define V4L2_BUF_FLAG_TIMECODE 0x00000100
/* Buffer is prepared for queuing */
#define V4L2_BUF_FLAG_TSTAMP_SRC_SOE 0x00010000
/* mem2mem encoder/decoder */
#define V4L2_BUF_FLAG_LAST 0x00100000
+ /* request_fd is valid */
+ #define V4L2_BUF_FLAG_REQUEST_FD 0x00800000
/**
* struct v4l2_exportbuffer - export of video buffer as DMABUF file descriptor
struct v4l2_clip {
struct v4l2_rect c;
- struct v4l2_clip __user *next;
+ struct v4l2_clip *next;
};
struct v4l2_window {
struct v4l2_rect w;
__u32 field; /* enum v4l2_field */
__u32 chromakey;
- struct v4l2_clip __user *clips;
+ struct v4l2_clip *clips;
__u32 clipcount;
- void __user *bitmap;
+ void *bitmap;
__u8 global_alpha;
};
V4L2_STD_NTSC_M_JP |\
V4L2_STD_NTSC_M_KR)
/* Secam macros */
- #define V4L2_STD_SECAM_DK (V4L2_STD_SECAM_D |\
+ #define V4L2_STD_SECAM_DK (V4L2_STD_SECAM_D |\
V4L2_STD_SECAM_K |\
V4L2_STD_SECAM_K1)
/* All Secam Standards */
};
/*
- * D V B T T I M I N G S
+ * D V B T T I M I N G S
*/
/** struct v4l2_bt_timings - BT.656/BT.1120 timing data
* (aka field 2) of interlaced field formats
* @standards: Standards the timing belongs to
* @flags: Flags
+ * @picture_aspect: The picture aspect ratio (hor/vert).
+ * @cea861_vic: VIC code as per the CEA-861 standard.
+ * @hdmi_vic: VIC code as per the HDMI standard.
* @reserved: Reserved fields, must be zeroed.
*
* A note regarding vertical interlaced timings: height refers to the total
__u32 il_vbackporch;
__u32 standards;
__u32 flags;
- __u32 reserved[14];
+ struct v4l2_fract picture_aspect;
+ __u8 cea861_vic;
+ __u8 hdmi_vic;
+ __u8 reserved[46];
} __attribute__ ((packed));
/* Interlaced or progressive format */
#define V4L2_DV_BT_STD_DMT (1 << 1) /* VESA Discrete Monitor Timings */
#define V4L2_DV_BT_STD_CVT (1 << 2) /* VESA Coordinated Video Timings */
#define V4L2_DV_BT_STD_GTF (1 << 3) /* VESA Generalized Timings Formula */
+ #define V4L2_DV_BT_STD_SDI (1 << 4) /* SDI Timings */
/* Flags */
- /* CVT/GTF specific: timing uses reduced blanking (CVT) or the 'Secondary
- GTF' curve (GTF). In both cases the horizontal and/or vertical blanking
- intervals are reduced, allowing a higher resolution over the same
- bandwidth. This is a read-only flag. */
+ /*
+ * CVT/GTF specific: timing uses reduced blanking (CVT) or the 'Secondary
+ * GTF' curve (GTF). In both cases the horizontal and/or vertical blanking
+ * intervals are reduced, allowing a higher resolution over the same
+ * bandwidth. This is a read-only flag.
+ */
#define V4L2_DV_FL_REDUCED_BLANKING (1 << 0)
- /* CEA-861 specific: set for CEA-861 formats with a framerate of a multiple
- of six. These formats can be optionally played at 1 / 1.001 speed.
- This is a read-only flag. */
+ /*
+ * CEA-861 specific: set for CEA-861 formats with a framerate of a multiple
+ * of six. These formats can be optionally played at 1 / 1.001 speed.
+ * This is a read-only flag.
+ */
#define V4L2_DV_FL_CAN_REDUCE_FPS (1 << 1)
- /* CEA-861 specific: only valid for video transmitters, the flag is cleared
- by receivers.
- If the framerate of the format is a multiple of six, then the pixelclock
- used to set up the transmitter is divided by 1.001 to make it compatible
- with 60 Hz based standards such as NTSC and PAL-M that use a framerate of
- 29.97 Hz. Otherwise this flag is cleared. If the transmitter can't generate
- such frequencies, then the flag will also be cleared. */
+ /*
+ * CEA-861 specific: only valid for video transmitters, the flag is cleared
+ * by receivers.
+ * If the framerate of the format is a multiple of six, then the pixelclock
+ * used to set up the transmitter is divided by 1.001 to make it compatible
+ * with 60 Hz based standards such as NTSC and PAL-M that use a framerate of
+ * 29.97 Hz. Otherwise this flag is cleared. If the transmitter can't generate
+ * such frequencies, then the flag will also be cleared.
+ */
#define V4L2_DV_FL_REDUCED_FPS (1 << 2)
- /* Specific to interlaced formats: if set, then field 1 is really one half-line
- longer and field 2 is really one half-line shorter, so each field has
- exactly the same number of half-lines. Whether half-lines can be detected
- or used depends on the hardware. */
+ /*
+ * Specific to interlaced formats: if set, then field 1 is really one half-line
+ * longer and field 2 is really one half-line shorter, so each field has
+ * exactly the same number of half-lines. Whether half-lines can be detected
+ * or used depends on the hardware.
+ */
#define V4L2_DV_FL_HALF_LINE (1 << 3)
- /* If set, then this is a Consumer Electronics (CE) video format. Such formats
+ /*
+ * If set, then this is a Consumer Electronics (CE) video format. Such formats
* differ from other formats (commonly called IT formats) in that if RGB
* encoding is used then by default the RGB values use limited range (i.e.
* use the range 16-235) as opposed to 0-255. All formats defined in CEA-861
- * except for the 640x480 format are CE formats. */
+ * except for the 640x480 format are CE formats.
+ */
#define V4L2_DV_FL_IS_CE_VIDEO (1 << 4)
+ /* Some formats like SMPTE-125M have an interlaced signal with a odd
+ * total height. For these formats, if this flag is set, the first
+ * field has the extra line. If not, it is the second field.
+ */
+ #define V4L2_DV_FL_FIRST_FIELD_EXTRA_LINE (1 << 5)
+ /*
+ * If set, then the picture_aspect field is valid. Otherwise assume that the
+ * pixels are square, so the picture aspect ratio is the same as the width to
+ * height ratio.
+ */
+ #define V4L2_DV_FL_HAS_PICTURE_ASPECT (1 << 6)
+ /*
+ * If set, then the cea861_vic field is valid and contains the Video
+ * Identification Code as per the CEA-861 standard.
+ */
+ #define V4L2_DV_FL_HAS_CEA861_VIC (1 << 7)
+ /*
+ * If set, then the hdmi_vic field is valid and contains the Video
+ * Identification Code as per the HDMI standard (HDMI Vendor Specific
+ * InfoFrame).
+ */
+ #define V4L2_DV_FL_HAS_HDMI_VIC (1 << 8)
+ /*
+ * CEA-861 specific: only valid for video receivers.
+ * If set, then HW can detect the difference between regular FPS and
+ * 1000/1001 FPS. Note: This flag is only valid for HDMI VIC codes with
+ * the V4L2_DV_FL_CAN_REDUCE_FPS flag set.
+ */
+ #define V4L2_DV_FL_CAN_DETECT_REDUCED_FPS (1 << 9)
/* A few useful defines to calculate the total blanking and frame sizes */
#define V4L2_DV_BT_BLANKING_WIDTH(bt) \
/* Values for the 'type' field */
#define V4L2_INPUT_TYPE_TUNER 1
#define V4L2_INPUT_TYPE_CAMERA 2
+ #define V4L2_INPUT_TYPE_TOUCH 3
/* field 'status' - general */
#define V4L2_IN_ST_NO_POWER 0x00000001 /* Attached device is off */
/* field 'status' - analog */
#define V4L2_IN_ST_NO_H_LOCK 0x00000100 /* No horizontal sync lock */
#define V4L2_IN_ST_COLOR_KILL 0x00000200 /* Color killer is active */
+ #define V4L2_IN_ST_NO_V_LOCK 0x00000400 /* No vertical sync lock */
+ #define V4L2_IN_ST_NO_STD_LOCK 0x00000800 /* No standard format lock */
/* field 'status' - digital */
#define V4L2_IN_ST_NO_SYNC 0x00010000 /* No synchronization lock */
union {
__s32 value;
__s64 value64;
- char __user *string;
- __u8 __user *p_u8;
- __u16 __user *p_u16;
- __u32 __user *p_u32;
- void __user *ptr;
+ char *string;
+ __u8 *p_u8;
+ __u16 *p_u16;
+ __u32 *p_u32;
+ void *ptr;
};
} __attribute__ ((packed));
struct v4l2_ext_controls {
- __u32 ctrl_class;
+ union {
+ __u32 ctrl_class;
+ __u32 which;
+ };
__u32 count;
__u32 error_idx;
- __u32 reserved[2];
+ __s32 request_fd;
+ __u32 reserved[1];
struct v4l2_ext_control *controls;
};
- #define V4L2_CTRL_ID_MASK (0x0fffffff)
+ #define V4L2_CTRL_ID_MASK (0x0fffffff)
#define V4L2_CTRL_ID2CLASS(id) ((id) & 0x0fff0000UL)
+ #define V4L2_CTRL_ID2WHICH(id) ((id) & 0x0fff0000UL)
#define V4L2_CTRL_DRIVER_PRIV(id) (((id) & 0xffff) >= 0x1000)
#define V4L2_CTRL_MAX_DIMS (4)
+ #define V4L2_CTRL_WHICH_CUR_VAL 0
+ #define V4L2_CTRL_WHICH_DEF_VAL 0x0f000000
+ #define V4L2_CTRL_WHICH_REQUEST_VAL 0x0f010000
enum v4l2_ctrl_type {
V4L2_CTRL_TYPE_INTEGER = 1,
/* Control flags */
#define V4L2_CTRL_FLAG_DISABLED 0x0001
#define V4L2_CTRL_FLAG_GRABBED 0x0002
- #define V4L2_CTRL_FLAG_READ_ONLY 0x0004
- #define V4L2_CTRL_FLAG_UPDATE 0x0008
- #define V4L2_CTRL_FLAG_INACTIVE 0x0010
- #define V4L2_CTRL_FLAG_SLIDER 0x0020
- #define V4L2_CTRL_FLAG_WRITE_ONLY 0x0040
+ #define V4L2_CTRL_FLAG_READ_ONLY 0x0004
+ #define V4L2_CTRL_FLAG_UPDATE 0x0008
+ #define V4L2_CTRL_FLAG_INACTIVE 0x0010
+ #define V4L2_CTRL_FLAG_SLIDER 0x0020
+ #define V4L2_CTRL_FLAG_WRITE_ONLY 0x0040
#define V4L2_CTRL_FLAG_VOLATILE 0x0080
#define V4L2_CTRL_FLAG_HAS_PAYLOAD 0x0100
#define V4L2_CTRL_FLAG_EXECUTE_ON_WRITE 0x0200
+ #define V4L2_CTRL_FLAG_MODIFY_LAYOUT 0x0400
/* Query flags, to be ORed with the control ID */
#define V4L2_CTRL_FLAG_NEXT_CTRL 0x80000000
__u32 rangelow;
__u32 rangehigh;
__u32 txsubchans;
- __u32 reserved[4];
+ __u32 type; /* enum v4l2_tuner_type */
+ __u32 reserved[3];
};
/* Flags for the 'capability' field */
*/
struct v4l2_rds_data {
- __u8 lsb;
- __u8 msb;
- __u8 block;
+ __u8 lsb;
+ __u8 msb;
+ __u8 block;
} __attribute__ ((packed));
- #define V4L2_RDS_BLOCK_MSK 0x7
- #define V4L2_RDS_BLOCK_A 0
- #define V4L2_RDS_BLOCK_B 1
- #define V4L2_RDS_BLOCK_C 2
- #define V4L2_RDS_BLOCK_D 3
- #define V4L2_RDS_BLOCK_C_ALT 4
- #define V4L2_RDS_BLOCK_INVALID 7
+ #define V4L2_RDS_BLOCK_MSK 0x7
+ #define V4L2_RDS_BLOCK_A 0
+ #define V4L2_RDS_BLOCK_B 1
+ #define V4L2_RDS_BLOCK_C 2
+ #define V4L2_RDS_BLOCK_D 3
+ #define V4L2_RDS_BLOCK_C_ALT 4
+ #define V4L2_RDS_BLOCK_INVALID 7
#define V4L2_RDS_BLOCK_CORRECTED 0x40
- #define V4L2_RDS_BLOCK_ERROR 0x80
+ #define V4L2_RDS_BLOCK_ERROR 0x80
/*
* A U D I O
/*
* M P E G S E R V I C E S
- *
- * NOTE: EXPERIMENTAL API
*/
#if 1
#define V4L2_ENC_IDX_FRAME_I (0)
struct v4l2_plane_pix_format plane_fmt[VIDEO_MAX_PLANES];
__u8 num_planes;
__u8 flags;
- __u8 ycbcr_enc;
+ union {
+ __u8 ycbcr_enc;
+ __u8 hsv_enc;
+ };
__u8 quantization;
__u8 xfer_func;
__u8 reserved[7];
} __attribute__ ((packed));
/**
+ * struct v4l2_meta_format - metadata format definition
+ * @dataformat: little endian four character code (fourcc)
+ * @buffersize: maximum size in bytes required for data
+ */
+ struct v4l2_meta_format {
+ __u32 dataformat;
+ __u32 buffersize;
+ } __attribute__ ((packed));
+
+ /**
* struct v4l2_format - stream data format
* @type: enum v4l2_buf_type; type of the data stream
* @pix: definition of an image format
struct v4l2_vbi_format vbi; /* V4L2_BUF_TYPE_VBI_CAPTURE */
struct v4l2_sliced_vbi_format sliced; /* V4L2_BUF_TYPE_SLICED_VBI_CAPTURE */
struct v4l2_sdr_format sdr; /* V4L2_BUF_TYPE_SDR_CAPTURE */
+ struct v4l2_meta_format meta; /* V4L2_BUF_TYPE_META_CAPTURE */
__u8 raw_data[200]; /* user-defined */
} fmt;
};
* return: number of created buffers
* @memory: enum v4l2_memory; buffer memory type
* @format: frame format, for which buffers are requested
+ * @capabilities: capabilities of this buffer type.
* @reserved: future extensions
*/
struct v4l2_create_buffers {
__u32 count;
__u32 memory;
struct v4l2_format format;
- __u32 reserved[8];
+ __u32 capabilities;
+ __u32 reserved[7];
};
/*
*
*/
#define VIDIOC_QUERYCAP _IOR('V', 0, struct v4l2_capability)
- #define VIDIOC_RESERVED _IO('V', 1)
#define VIDIOC_ENUM_FMT _IOWR('V', 2, struct v4l2_fmtdesc)
#define VIDIOC_G_FMT _IOWR('V', 4, struct v4l2_format)
#define VIDIOC_S_FMT _IOWR('V', 5, struct v4l2_format)
#define VIDIOC_S_CROP _IOW('V', 60, struct v4l2_crop)
#define VIDIOC_G_JPEGCOMP _IOR('V', 61, struct v4l2_jpegcompression)
#define VIDIOC_S_JPEGCOMP _IOW('V', 62, struct v4l2_jpegcompression)
- #define VIDIOC_QUERYSTD _IOR('V', 63, v4l2_std_id)
- #define VIDIOC_TRY_FMT _IOWR('V', 64, struct v4l2_format)
+ #define VIDIOC_QUERYSTD _IOR('V', 63, v4l2_std_id)
+ #define VIDIOC_TRY_FMT _IOWR('V', 64, struct v4l2_format)
#define VIDIOC_ENUMAUDIO _IOWR('V', 65, struct v4l2_audio)
#define VIDIOC_ENUMAUDOUT _IOWR('V', 66, struct v4l2_audioout)
#define VIDIOC_G_PRIORITY _IOR('V', 67, __u32) /* enum v4l2_priority */
#define VIDIOC_ENCODER_CMD _IOWR('V', 77, struct v4l2_encoder_cmd)
#define VIDIOC_TRY_ENCODER_CMD _IOWR('V', 78, struct v4l2_encoder_cmd)
- /* Experimental, meant for debugging, testing and internal use.
- Only implemented if CONFIG_VIDEO_ADV_DEBUG is defined.
- You must be root to use these ioctls. Never use these in applications! */
- #define VIDIOC_DBG_S_REGISTER _IOW('V', 79, struct v4l2_dbg_register)
- #define VIDIOC_DBG_G_REGISTER _IOWR('V', 80, struct v4l2_dbg_register)
+ /*
+ * Experimental, meant for debugging, testing and internal use.
+ * Only implemented if CONFIG_VIDEO_ADV_DEBUG is defined.
+ * You must be root to use these ioctls. Never use these in applications!
+ */
+ #define VIDIOC_DBG_S_REGISTER _IOW('V', 79, struct v4l2_dbg_register)
+ #define VIDIOC_DBG_G_REGISTER _IOWR('V', 80, struct v4l2_dbg_register)
#define VIDIOC_S_HW_FREQ_SEEK _IOW('V', 82, struct v4l2_hw_freq_seek)
-
#define VIDIOC_S_DV_TIMINGS _IOWR('V', 87, struct v4l2_dv_timings)
#define VIDIOC_G_DV_TIMINGS _IOWR('V', 88, struct v4l2_dv_timings)
#define VIDIOC_DQEVENT _IOR('V', 89, struct v4l2_event)
#define VIDIOC_SUBSCRIBE_EVENT _IOW('V', 90, struct v4l2_event_subscription)
#define VIDIOC_UNSUBSCRIBE_EVENT _IOW('V', 91, struct v4l2_event_subscription)
-
- /* Experimental, the below two ioctls may change over the next couple of kernel
- versions */
#define VIDIOC_CREATE_BUFS _IOWR('V', 92, struct v4l2_create_buffers)
#define VIDIOC_PREPARE_BUF _IOWR('V', 93, struct v4l2_buffer)
-
- /* Experimental selection API */
#define VIDIOC_G_SELECTION _IOWR('V', 94, struct v4l2_selection)
#define VIDIOC_S_SELECTION _IOWR('V', 95, struct v4l2_selection)
-
- /* Experimental, these two ioctls may change over the next couple of kernel
- versions. */
#define VIDIOC_DECODER_CMD _IOWR('V', 96, struct v4l2_decoder_cmd)
#define VIDIOC_TRY_DECODER_CMD _IOWR('V', 97, struct v4l2_decoder_cmd)
-
- /* Experimental, these three ioctls may change over the next couple of kernel
- versions. */
#define VIDIOC_ENUM_DV_TIMINGS _IOWR('V', 98, struct v4l2_enum_dv_timings)
#define VIDIOC_QUERY_DV_TIMINGS _IOR('V', 99, struct v4l2_dv_timings)
#define VIDIOC_DV_TIMINGS_CAP _IOWR('V', 100, struct v4l2_dv_timings_cap)
-
- /* Experimental, this ioctl may change over the next couple of kernel
- versions. */
#define VIDIOC_ENUM_FREQ_BANDS _IOWR('V', 101, struct v4l2_frequency_band)
- /* Experimental, meant for debugging, testing and internal use.
- Never use these in applications! */
+ /*
+ * Experimental, meant for debugging, testing and internal use.
+ * Never use this in applications!
+ */
#define VIDIOC_DBG_G_CHIP_INFO _IOWR('V', 102, struct v4l2_dbg_chip_info)
#define VIDIOC_QUERY_EXT_CTRL _IOWR('V', 103, struct v4l2_query_ext_ctrl)
/* Reminder: when adding new ioctls please add support for them to
- drivers/media/video/v4l2-compat-ioctl32.c as well! */
+ drivers/media/v4l2-core/v4l2-compat-ioctl32.c as well! */
#define BASE_VIDIOC_PRIVATE 192 /* 192-255 are private */
#include "gstv4l2sink.h"
#include "gstv4l2radio.h"
#include "gstv4l2videodec.h"
+ #include "gstv4l2fwhtenc.h"
+ #include "gstv4l2h263enc.h"
+ #include "gstv4l2h264enc.h"
+ #include "gstv4l2h265enc.h"
+ #include "gstv4l2jpegenc.h"
+ #include "gstv4l2mpeg4enc.h"
+ #include "gstv4l2vp8enc.h"
+ #include "gstv4l2vp9enc.h"
#include "gstv4l2deviceprovider.h"
#include "gstv4l2transform.h"
- /* used in v4l2_calls.c and v4l2src_calls.c */
+ /* used in gstv4l2object.c and v4l2_calls.c */
GST_DEBUG_CATEGORY (v4l2_debug);
#define GST_CAT_DEFAULT v4l2_debug
GstV4l2Iterator *it;
gint video_fd = -1;
struct v4l2_capability vcap;
- gboolean ret = TRUE;
guint32 device_caps;
+ GST_DEBUG ("Probing devices");
+
it = gst_v4l2_iterator_new ();
while (gst_v4l2_iterator_next (it)) {
else
device_caps = vcap.capabilities;
- if (!((device_caps & (V4L2_CAP_VIDEO_M2M | V4L2_CAP_VIDEO_M2M_MPLANE)) ||
- /* But legacy driver may expose both CAPTURE and OUTPUT */
- ((device_caps &
- (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_CAPTURE_MPLANE)) &&
- (device_caps &
- (V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_VIDEO_OUTPUT_MPLANE)))))
+ if (!GST_V4L2_IS_M2M (device_caps))
continue;
GST_DEBUG ("Probing '%s' located at '%s'",
basename = g_path_get_basename (it->device_path);
- if (gst_v4l2_is_video_dec (sink_caps, src_caps))
- ret = gst_v4l2_video_dec_register (plugin, basename, it->device_path,
+ /* Caps won't be freed if the subclass is not instantiated */
+ GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+ GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+
+ if (gst_v4l2_is_video_dec (sink_caps, src_caps)) {
+ gst_v4l2_video_dec_register (plugin, basename, it->device_path,
sink_caps, src_caps);
- else if (gst_v4l2_is_transform (sink_caps, src_caps))
- ret = gst_v4l2_transform_register (plugin, basename, it->device_path,
+ } else if (gst_v4l2_is_video_enc (sink_caps, src_caps, NULL)) {
+ if (gst_v4l2_is_fwht_enc (sink_caps, src_caps))
+ gst_v4l2_fwht_enc_register (plugin, basename, it->device_path,
+ sink_caps, src_caps);
+
+ if (gst_v4l2_is_h264_enc (sink_caps, src_caps))
+ gst_v4l2_h264_enc_register (plugin, basename, it->device_path,
+ sink_caps, src_caps);
+
+ if (gst_v4l2_is_h265_enc (sink_caps, src_caps))
+ gst_v4l2_h265_enc_register (plugin, basename, it->device_path,
+ sink_caps, src_caps);
+
+ if (gst_v4l2_is_mpeg4_enc (sink_caps, src_caps))
+ gst_v4l2_mpeg4_enc_register (plugin, basename, it->device_path,
+ sink_caps, src_caps);
+
+ if (gst_v4l2_is_h263_enc (sink_caps, src_caps))
+ gst_v4l2_h263_enc_register (plugin, basename, it->device_path,
+ sink_caps, src_caps);
+
+ if (gst_v4l2_is_jpeg_enc (sink_caps, src_caps))
+ gst_v4l2_jpeg_enc_register (plugin, basename, it->device_path,
+ sink_caps, src_caps);
+
+ if (gst_v4l2_is_vp8_enc (sink_caps, src_caps))
+ gst_v4l2_vp8_enc_register (plugin, basename, it->device_path,
+ sink_caps, src_caps);
+
+ if (gst_v4l2_is_vp9_enc (sink_caps, src_caps))
+ gst_v4l2_vp9_enc_register (plugin, basename, it->device_path,
+ sink_caps, src_caps);
+ } else if (gst_v4l2_is_transform (sink_caps, src_caps)) {
+ gst_v4l2_transform_register (plugin, basename, it->device_path,
sink_caps, src_caps);
+ }
/* else if ( ... etc. */
gst_caps_unref (sink_caps);
gst_caps_unref (src_caps);
g_free (basename);
-
- if (!ret)
- break;
}
if (video_fd >= 0)
gst_v4l2_iterator_free (it);
- return ret;
+ return TRUE;
}
#endif
static gboolean
plugin_init (GstPlugin * plugin)
{
+#ifndef TIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY
const gchar *paths[] = { "/dev", "/dev/v4l2", NULL };
const gchar *names[] = { "video", NULL };
+#endif /* TIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY */
GST_DEBUG_CATEGORY_INIT (v4l2_debug, "v4l2", 0, "V4L2 API calls");
-
+#ifndef TIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY
/* Add some depedency, so the dynamic features get updated upon changes in
* /dev/video* */
gst_plugin_add_dependency (plugin,
NULL, paths, names, GST_PLUGIN_DEPENDENCY_FLAG_FILE_NAME_IS_PREFIX);
+#endif /* TIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY */
if (!gst_element_register (plugin, "v4l2src", GST_RANK_PRIMARY,
GST_TYPE_V4L2SRC) ||
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
- #include <unistd.h>
#include <string.h>
+ #include <sys/mman.h>
+ #include <sys/ioctl.h>
+
#ifdef HAVE_GUDEV
#include <gudev/gudev.h>
#endif
- #include "v4l2_calls.h"
+ #include "ext/videodev2.h"
+ #include "gstv4l2object.h"
#include "gstv4l2tuner.h"
#include "gstv4l2colorbalance.h"
#include "gst/gst-i18n-plugin.h"
#include <gst/video/video.h>
+ #include <gst/allocators/gstdmabuf.h>
GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
#define GST_CAT_DEFAULT v4l2_debug
#define DEFAULT_PROP_DEVICE_FD -1
#define DEFAULT_PROP_FLAGS 0
#define DEFAULT_PROP_TV_NORM 0
- #define DEFAULT_PROP_CHANNEL NULL
- #define DEFAULT_PROP_FREQUENCY 0
#define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO
- #define ENCODED_BUFFER_SIZE (1 * 1024 * 1024)
+ #define ENCODED_BUFFER_SIZE (2 * 1024 * 1024)
enum
{
{V4L2_PIX_FMT_Y16, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_Y16_BE, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_Y10BPACK, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_INVZ, TRUE, GST_V4L2_RAW},
/* Palette formats */
{V4L2_PIX_FMT_PAL8, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_NV42, TRUE, GST_V4L2_RAW},
/* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
- {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_CODEC},
- {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_CODEC},
- {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_CODEC},
- {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_RAW},
/* compressed formats */
{V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT},
{V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT},
+ {V4L2_PIX_FMT_FWHT, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_H264_NO_SC, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_H264_MVC, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_HEVC, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_VC1_ANNEX_G, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_VC1_ANNEX_L, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_VP8, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
+ {V4L2_PIX_FMT_VP9, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
/* Vendor-specific formats */
{V4L2_PIX_FMT_WNVA, TRUE, GST_V4L2_CODEC},
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
+ /* Support for 32bit off_t, this wrapper is casting off_t to gint64 */
+ #ifdef HAVE_LIBV4L2
+ #if SIZEOF_OFF_T < 8
+
+ static gpointer
+ v4l2_mmap_wrapper (gpointer start, gsize length, gint prot, gint flags, gint fd,
+ off_t offset)
+ {
+ return v4l2_mmap (start, length, prot, flags, fd, (gint64) offset);
+ }
+
+ #define v4l2_mmap v4l2_mmap_wrapper
+
+ #endif /* SIZEOF_OFF_T < 8 */
+ #endif /* HAVE_LIBV4L2 */
+
GstV4l2Object *
gst_v4l2_object_new (GstElement * element,
+ GstObject * debug_object,
enum v4l2_buf_type type,
const char *default_device,
GstV4l2GetInOutFunction get_in_out_func,
v4l2object->formats = NULL;
v4l2object->element = element;
+ v4l2object->dbg_obj = debug_object;
v4l2object->get_in_out_func = get_in_out_func;
v4l2object->set_in_out_func = set_in_out_func;
v4l2object->update_fps_func = update_fps_func;
v4l2object->channels = NULL;
v4l2object->colors = NULL;
- v4l2object->xwindow_id = 0;
-
v4l2object->keep_aspect = TRUE;
v4l2object->n_v4l2_planes = 0;
v4l2object->no_initial_format = FALSE;
+ /* We now disable libv4l2 by default, but have an env to enable it. */
+ #ifdef HAVE_LIBV4L2
+ if (g_getenv ("GST_V4L2_USE_LIBV4L2")) {
+ v4l2object->fd_open = v4l2_fd_open;
+ v4l2object->close = v4l2_close;
+ v4l2object->dup = v4l2_dup;
+ v4l2object->ioctl = v4l2_ioctl;
+ v4l2object->read = v4l2_read;
+ v4l2object->mmap = v4l2_mmap;
+ v4l2object->munmap = v4l2_munmap;
+ } else
+ #endif
+ {
+ v4l2object->fd_open = NULL;
+ v4l2object->close = close;
+ v4l2object->dup = dup;
+ v4l2object->ioctl = ioctl;
+ v4l2object->read = read;
+ v4l2object->mmap = mmap;
+ v4l2object->munmap = munmap;
+ }
+
return v4l2object;
}
break;
}
case PROP_PIXEL_ASPECT_RATIO:
- g_free (v4l2object->par);
+ if (v4l2object->par) {
+ g_value_unset (v4l2object->par);
+ g_free (v4l2object->par);
+ }
v4l2object->par = g_new0 (GValue, 1);
g_value_init (v4l2object->par, GST_TYPE_FRACTION);
if (!g_value_transform (value, v4l2object->par)) {
g_warning ("Could not transform string to aspect ratio");
gst_value_set_fraction (v4l2object->par, 1, 1);
}
- GST_DEBUG_OBJECT (v4l2object->element, "set PAR to %d/%d",
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "set PAR to %d/%d",
gst_value_get_fraction_numerator (v4l2object->par),
gst_value_get_fraction_denominator (v4l2object->par));
break;
break;
case PROP_DEVICE_NAME:
{
- const guchar *new = NULL;
+ const guchar *name = NULL;
- if (GST_V4L2_IS_OPEN (v4l2object)) {
- new = v4l2object->vcap.card;
- } else if (gst_v4l2_open (v4l2object)) {
- new = v4l2object->vcap.card;
- gst_v4l2_close (v4l2object);
- }
- g_value_set_string (value, (gchar *) new);
+ if (GST_V4L2_IS_OPEN (v4l2object))
+ name = v4l2object->vcap.card;
+
+ g_value_set_string (value, (gchar *) name);
break;
}
case PROP_DEVICE_FD:
else
control.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0) {
- GST_DEBUG_OBJECT (v4l2object->element,
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0) {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"driver requires a minimum of %d buffers", control.value);
v4l2object->min_buffers = control.value;
} else {
if (v4l2object->tv_norm)
norm = gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm);
- GST_DEBUG_OBJECT (v4l2object->element, "tv_norm=0x%" G_GINT64_MODIFIER "x, "
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "tv_norm=0x%" G_GINT64_MODIFIER "x, "
"norm=%p", (guint64) v4l2object->tv_norm, norm);
if (norm) {
gst_tuner_set_norm (tuner, norm);
gst_v4l2_object_clear_format_list (v4l2object);
}
+ if (v4l2object->par) {
+ g_value_unset (v4l2object->par);
+ g_free (v4l2object->par);
+ v4l2object->par = NULL;
+ }
+
+ if (v4l2object->channel) {
+ g_free (v4l2object->channel);
+ v4l2object->channel = NULL;
+ }
+
return TRUE;
}
break;
case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
+ case V4L2_PIX_FMT_INVZ:
rank = GREY_BASE_RANK;
break;
gint n;
struct v4l2_fmtdesc *format;
- GST_DEBUG_OBJECT (v4l2object->element, "getting src format enumerations");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "getting src format enumerations");
/* format enumeration */
for (n = 0;; n++) {
format->index = n;
format->type = type;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
if (errno == EINVAL) {
g_free (format);
break; /* end of enumeration */
}
}
- GST_LOG_OBJECT (v4l2object->element, "index: %u", format->index);
- GST_LOG_OBJECT (v4l2object->element, "type: %d", format->type);
- GST_LOG_OBJECT (v4l2object->element, "flags: %08x", format->flags);
- GST_LOG_OBJECT (v4l2object->element, "description: '%s'",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "index: %u", format->index);
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "type: %d", format->type);
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "flags: %08x", format->flags);
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "description: '%s'",
format->description);
- GST_LOG_OBJECT (v4l2object->element, "pixelformat: %" GST_FOURCC_FORMAT,
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "pixelformat: %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (format->pixelformat));
/* sort formats according to our preference; we do this, because caps
{
GSList *l;
- GST_INFO_OBJECT (v4l2object->element, "got %d format(s):", n);
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "got %d format(s):", n);
for (l = v4l2object->formats; l != NULL; l = l->next) {
format = l->data;
- GST_INFO_OBJECT (v4l2object->element,
+ GST_INFO_OBJECT (v4l2object->dbg_obj,
" %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS (format->pixelformat),
((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : "");
}
{
g_free (format);
- if (!GST_IS_ELEMENT (v4l2object->element))
+ if (v4l2object->element)
return FALSE;
GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
case V4L2_PIX_FMT_NV24:
format = GST_VIDEO_FORMAT_NV24;
break;
+ case V4L2_PIX_FMT_INVZ:
+ format = GST_VIDEO_FORMAT_INVZ;
+ break;
default:
format = GST_VIDEO_FORMAT_UNKNOWN;
break;
case V4L2_PIX_FMT_BGR32:
case V4L2_PIX_FMT_ABGR32:
case V4L2_PIX_FMT_ARGB32:
+ case V4L2_PIX_FMT_SBGGR8:
+ case V4L2_PIX_FMT_SGBRG8:
+ case V4L2_PIX_FMT_SGRBG8:
+ case V4L2_PIX_FMT_SRGGB8:
ret = TRUE;
break;
default:
break;
case V4L2_PIX_FMT_MPEG1:
structure = gst_structure_new ("video/mpeg",
- "mpegversion", G_TYPE_INT, 2, NULL);
+ "mpegversion", G_TYPE_INT, 1, NULL);
break;
case V4L2_PIX_FMT_MPEG2:
structure = gst_structure_new ("video/mpeg",
"mpegversion", G_TYPE_INT, 4, "systemstream",
G_TYPE_BOOLEAN, FALSE, NULL);
break;
+ case V4L2_PIX_FMT_FWHT:
+ structure = gst_structure_new_empty ("video/x-fwht");
+ break;
case V4L2_PIX_FMT_H263:
structure = gst_structure_new ("video/x-h263",
"variant", G_TYPE_STRING, "itu", NULL);
"stream-format", G_TYPE_STRING, "avc", "alignment",
G_TYPE_STRING, "au", NULL);
break;
+ case V4L2_PIX_FMT_HEVC: /* H.265 */
+ structure = gst_structure_new ("video/x-h265",
+ "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
+ G_TYPE_STRING, "au", NULL);
+ break;
case V4L2_PIX_FMT_VC1_ANNEX_G:
case V4L2_PIX_FMT_VC1_ANNEX_L:
structure = gst_structure_new ("video/x-wmv",
case V4L2_PIX_FMT_VP8:
structure = gst_structure_new_empty ("video/x-vp8");
break;
+ case V4L2_PIX_FMT_VP9:
+ structure = gst_structure_new_empty ("video/x-vp9");
+ break;
case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
case V4L2_PIX_FMT_Y16:
case V4L2_PIX_FMT_Y16_BE:
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_YUV422P:
case V4L2_PIX_FMT_YVYU:
- case V4L2_PIX_FMT_YUV411P:{
+ case V4L2_PIX_FMT_YUV411P:
+ case V4L2_PIX_FMT_INVZ:{
GstVideoFormat format;
format = gst_v4l2_object_v4l2fourcc_to_video_format (fourcc);
if (format != GST_VIDEO_FORMAT_UNKNOWN)
case GST_VIDEO_FORMAT_GRAY16_BE:
fourcc = V4L2_PIX_FMT_Y16_BE;
break;
+ case GST_VIDEO_FORMAT_INVZ:
+ fourcc = V4L2_PIX_FMT_INVZ;
+ break;
default:
break;
}
break;
}
}
+ } else if (g_str_equal (mimetype, "video/x-fwht")) {
+ fourcc = V4L2_PIX_FMT_FWHT;
} else if (g_str_equal (mimetype, "video/x-h263")) {
fourcc = V4L2_PIX_FMT_H263;
} else if (g_str_equal (mimetype, "video/x-h264")) {
fourcc = V4L2_PIX_FMT_H264_NO_SC;
else
fourcc = V4L2_PIX_FMT_H264;
+ } else if (g_str_equal (mimetype, "video/x-h265")) {
+ fourcc = V4L2_PIX_FMT_HEVC;
} else if (g_str_equal (mimetype, "video/x-vp8")) {
fourcc = V4L2_PIX_FMT_VP8;
+ } else if (g_str_equal (mimetype, "video/x-vp9")) {
+ fourcc = V4L2_PIX_FMT_VP9;
} else if (g_str_equal (mimetype, "video/x-bayer")) {
const gchar *format = gst_structure_get_string (structure, "format");
if (format) {
static void
gst_v4l2_object_add_aspect_ratio (GstV4l2Object * v4l2object, GstStructure * s)
{
- struct v4l2_cropcap cropcap;
- int num = 1, den = 1;
-
- if (!v4l2object->keep_aspect)
- return;
-
- if (v4l2object->par) {
- num = gst_value_get_fraction_numerator (v4l2object->par);
- den = gst_value_get_fraction_denominator (v4l2object->par);
- goto done;
- }
-
- memset (&cropcap, 0, sizeof (cropcap));
-
- cropcap.type = v4l2object->type;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0)
- goto cropcap_failed;
-
- num = cropcap.pixelaspect.numerator;
- den = cropcap.pixelaspect.denominator;
-
- /* Ignore PAR that are 0/0 */
- if (den == 0)
- return;
-
- done:
- gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, num, den,
- NULL);
- return;
-
- cropcap_failed:
- if (errno != ENOTTY)
- GST_WARNING_OBJECT (v4l2object->element,
- "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
- g_strerror (errno));
- goto done;
+ if (v4l2object->keep_aspect && v4l2object->par)
+ gst_structure_set_value (s, "pixel-aspect-ratio", v4l2object->par);
}
/* returns TRUE if the value was changed in place, otherwise FALSE */
gst_v4l2_object_get_interlace_mode (enum v4l2_field field,
GstVideoInterlaceMode * interlace_mode)
{
- /* NB: If you add new return values, please fix mode_strings in
- * gst_v4l2_object_add_interlace_mode */
switch (field) {
case V4L2_FIELD_ANY:
GST_ERROR
cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
break;
- case V4L2_COLORSPACE_ADOBERGB:
+ case V4L2_COLORSPACE_OPRGB:
cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
case V4L2_COLORSPACE_BT2020:
cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
- cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
+ cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
break;
case V4L2_COLORSPACE_SMPTE240M:
cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
break;
case V4L2_YCBCR_ENC_BT2020_CONST_LUM:
- GST_FIXME ("BT2020 with constant lumma is not defined, assuming BT2020");
+ GST_FIXME ("BT2020 with constant luma is not defined, assuming BT2020");
/* fallthrough */
case V4L2_YCBCR_ENC_BT2020:
cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
switch (transfer) {
case V4L2_XFER_FUNC_709:
- cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
+ if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160)
+ cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
+ else
+ cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
break;
case V4L2_XFER_FUNC_SRGB:
cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
break;
- case V4L2_XFER_FUNC_ADOBERGB:
+ case V4L2_XFER_FUNC_OPRGB:
cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
break;
case V4L2_XFER_FUNC_SMPTE240M:
int r;
memcpy (&fmt, try_fmt, sizeof (fmt));
- r = v4l2_ioctl (fd, VIDIOC_TRY_FMT, &fmt);
+ r = v4l2object->ioctl (fd, VIDIOC_TRY_FMT, &fmt);
if (r < 0 && errno == ENOTTY) {
/* The driver might not implement TRY_FMT, in which case we will try
goto error;
memcpy (&fmt, try_fmt, sizeof (fmt));
- r = v4l2_ioctl (fd, VIDIOC_S_FMT, &fmt);
+ r = v4l2object->ioctl (fd, VIDIOC_S_FMT, &fmt);
}
memcpy (try_fmt, &fmt, sizeof (fmt));
+
return r;
error:
memcpy (try_fmt, &fmt, sizeof (fmt));
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Unable to try format: %s", g_strerror (errno));
return r;
}
{
struct v4l2_format fmt;
GValue interlace_formats = { 0, };
+ enum v4l2_field formats[] = { V4L2_FIELD_NONE, V4L2_FIELD_INTERLACED };
+ gsize i;
GstVideoInterlaceMode interlace_mode, prev = -1;
- const gchar *mode_strings[] = { "progressive",
- "interleaved",
- "mixed"
- };
-
if (!g_str_equal (gst_structure_get_name (s), "video/x-raw"))
return;
g_value_init (&interlace_formats, GST_TYPE_LIST);
/* Try twice - once for NONE, once for INTERLACED. */
- memset (&fmt, 0, sizeof (fmt));
- fmt.type = v4l2object->type;
- fmt.fmt.pix.width = width;
- fmt.fmt.pix.height = height;
- fmt.fmt.pix.pixelformat = pixelformat;
- fmt.fmt.pix.field = V4L2_FIELD_NONE;
-
- if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 &&
- gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)) {
- GValue interlace_enum = { 0, };
- g_value_init (&interlace_enum, G_TYPE_STRING);
- g_value_set_string (&interlace_enum, mode_strings[interlace_mode]);
- gst_value_list_append_and_take_value (&interlace_formats, &interlace_enum);
- prev = interlace_mode;
- }
-
- memset (&fmt, 0, sizeof (fmt));
- fmt.type = v4l2object->type;
- fmt.fmt.pix.width = width;
- fmt.fmt.pix.height = height;
- fmt.fmt.pix.pixelformat = pixelformat;
- fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
-
- if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 &&
- gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode) &&
- prev != interlace_mode) {
- GValue interlace_enum = { 0, };
- g_value_init (&interlace_enum, G_TYPE_STRING);
- g_value_set_string (&interlace_enum, mode_strings[interlace_mode]);
- gst_value_list_append_and_take_value (&interlace_formats, &interlace_enum);
+ for (i = 0; i < G_N_ELEMENTS (formats); i++) {
+ memset (&fmt, 0, sizeof (fmt));
+ fmt.type = v4l2object->type;
+ fmt.fmt.pix.width = width;
+ fmt.fmt.pix.height = height;
+ fmt.fmt.pix.pixelformat = pixelformat;
+ fmt.fmt.pix.field = formats[i];
+
+ if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 &&
+ gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)
+ && prev != interlace_mode) {
+ GValue interlace_enum = { 0, };
+ const gchar *mode_string;
+ g_value_init (&interlace_enum, G_TYPE_STRING);
+ mode_string = gst_video_interlace_mode_to_string (interlace_mode);
+ g_value_set_string (&interlace_enum, mode_string);
+ gst_value_list_append_and_take_value (&interlace_formats,
+ &interlace_enum);
+ prev = interlace_mode;
+ }
}
if (gst_v4l2src_value_simplify (&interlace_formats)
ival.width = width;
ival.height = height;
- GST_LOG_OBJECT (v4l2object->element,
+ GST_LOG_OBJECT (v4l2object->dbg_obj,
"get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height,
GST_FOURCC_ARGS (pixelformat));
/* keep in mind that v4l2 gives us frame intervals (durations); we invert the
* fraction to get framerate */
- if (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
+ if (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
goto enum_frameintervals_failed;
if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
denom >>= 1;
}
- GST_LOG_OBJECT (v4l2object->element, "adding discrete framerate: %d/%d",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "adding discrete framerate: %d/%d",
denom, num);
/* swap to get the framerate */
gst_value_list_append_value (&rates, &rate);
ival.index++;
- } while (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
+ } while (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
} else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
GValue min = { 0, };
GValue step = { 0, };
minnum >>= 1;
mindenom >>= 1;
}
- GST_LOG_OBJECT (v4l2object->element, "stepwise min frame interval: %d/%d",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise min frame interval: %d/%d",
minnum, mindenom);
gst_value_set_fraction (&min, minnum, mindenom);
maxdenom >>= 1;
}
- GST_LOG_OBJECT (v4l2object->element, "stepwise max frame interval: %d/%d",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise max frame interval: %d/%d",
maxnum, maxdenom);
gst_value_set_fraction (&max, maxnum, maxdenom);
/* since we only have gst_value_fraction_subtract and not add, negate the
* numerator */
- GST_LOG_OBJECT (v4l2object->element, "stepwise step frame interval: %d/%d",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise step frame interval: %d/%d",
num, denom);
gst_value_set_fraction (&step, -num, denom);
num = gst_value_get_fraction_numerator (&min);
denom = gst_value_get_fraction_denominator (&min);
- GST_LOG_OBJECT (v4l2object->element, "adding stepwise framerate: %d/%d",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "adding stepwise framerate: %d/%d",
denom, num);
/* invert to get the framerate */
/* we're actually adding because step was negated above. This is because
* there is no _add function... */
if (!gst_value_fraction_subtract (&min, &min, &step)) {
- GST_WARNING_OBJECT (v4l2object->element, "could not step fraction!");
+ GST_WARNING_OBJECT (v4l2object->dbg_obj, "could not step fraction!");
break;
}
}
if (!added) {
/* no range was added, leave the default range from the template */
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"no range added, leaving default");
g_value_unset (&rates);
}
maxdenom >>= 1;
}
- GST_LOG_OBJECT (v4l2object->element,
+ GST_LOG_OBJECT (v4l2object->dbg_obj,
"continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom,
num);
s = gst_structure_copy (template);
gst_structure_set (s, "width", G_TYPE_INT, (gint) width,
"height", G_TYPE_INT, (gint) height, NULL);
+
gst_v4l2_object_add_aspect_ratio (v4l2object, s);
- gst_v4l2_object_add_interlace_mode (v4l2object, s, width, height,
- pixelformat);
- gst_v4l2_object_add_colorspace (v4l2object, s, width, height, pixelformat);
+
+ if (!v4l2object->skip_try_fmt_probes) {
+ gst_v4l2_object_add_interlace_mode (v4l2object, s, width, height,
+ pixelformat);
+ gst_v4l2_object_add_colorspace (v4l2object, s, width, height, pixelformat);
+ }
if (G_IS_VALUE (&rates)) {
gst_v4l2src_value_simplify (&rates);
/* ERRORS */
enum_frameintervals_failed:
{
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
GST_FOURCC_ARGS (pixelformat), width, height);
goto return_data;
unknown_type:
{
/* I don't see how this is actually an error, we ignore the format then */
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
GST_FOURCC_ARGS (pixelformat), width, height, ival.type);
return NULL;
size.index = 0;
size.pixel_format = pixelformat;
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"Enumerating frame sizes for %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (pixelformat));
- if (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
+ if (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
goto enum_framesizes_failed;
if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
do {
- GST_LOG_OBJECT (v4l2object->element, "got discrete frame size %dx%d",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "got discrete frame size %dx%d",
size.discrete.width, size.discrete.height);
w = MIN (size.discrete.width, G_MAXINT);
}
size.index++;
- } while (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
- GST_DEBUG_OBJECT (v4l2object->element,
+ } while (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"done iterating discrete frame sizes");
} else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
guint32 maxw, maxh, step_w, step_h;
- GST_DEBUG_OBJECT (v4l2object->element, "we have stepwise frame sizes:");
- GST_DEBUG_OBJECT (v4l2object->element, "min width: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "we have stepwise frame sizes:");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min width: %d",
size.stepwise.min_width);
- GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
size.stepwise.min_height);
- GST_DEBUG_OBJECT (v4l2object->element, "max width: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d",
size.stepwise.max_width);
- GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
size.stepwise.max_height);
- GST_DEBUG_OBJECT (v4l2object->element, "step width: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step width: %d",
size.stepwise.step_width);
- GST_DEBUG_OBJECT (v4l2object->element, "step height: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step height: %d",
size.stepwise.step_height);
w = MAX (size.stepwise.min_width, 1);
} else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
guint32 maxw, maxh;
- GST_DEBUG_OBJECT (v4l2object->element, "we have continuous frame sizes:");
- GST_DEBUG_OBJECT (v4l2object->element, "min width: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "we have continuous frame sizes:");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min width: %d",
size.stepwise.min_width);
- GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
size.stepwise.min_height);
- GST_DEBUG_OBJECT (v4l2object->element, "max width: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d",
size.stepwise.max_width);
- GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
size.stepwise.max_height);
w = MAX (size.stepwise.min_width, 1);
enum_framesizes_failed:
{
/* I don't see how this is actually an error */
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
" (%s)", GST_FOURCC_ARGS (pixelformat), g_strerror (errno));
goto default_frame_sizes;
{
/* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
* question doesn't actually support it yet */
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"No results for pixelformat %" GST_FOURCC_FORMAT
" enumerating frame sizes, trying fallback",
GST_FOURCC_ARGS (pixelformat));
}
unknown_type:
{
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
": %u", GST_FOURCC_ARGS (pixelformat), size.type);
goto default_frame_sizes;
max_w = max_h = GST_V4L2_MAX_SIZE;
if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &min_w,
&min_h)) {
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Could not probe minimum capture size for pixelformat %"
GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
}
if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &max_w,
&max_h)) {
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Could not probe maximum capture size for pixelformat %"
GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
}
else
gst_structure_set (tmp, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);
- /* We could consider setting interlace mode from min and max. */
- gst_v4l2_object_add_interlace_mode (v4l2object, tmp, max_w, max_h,
- pixelformat);
gst_v4l2_object_add_aspect_ratio (v4l2object, tmp);
- /* We could consider to check colorspace for min too, in case it depends on
- * the size. But in this case, min and max could not be enough */
- gst_v4l2_object_add_colorspace (v4l2object, tmp, max_w, max_h, pixelformat);
+
+ if (!v4l2object->skip_try_fmt_probes) {
+ /* We could consider setting interlace mode from min and max. */
+ gst_v4l2_object_add_interlace_mode (v4l2object, tmp, max_w, max_h,
+ pixelformat);
+ /* We could consider to check colorspace for min too, in case it depends on
+ * the size. But in this case, min and max could not be enough */
+ gst_v4l2_object_add_colorspace (v4l2object, tmp, max_w, max_h,
+ pixelformat);
+ }
gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
return ret;
g_return_val_if_fail (width != NULL, FALSE);
g_return_val_if_fail (height != NULL, FALSE);
- GST_LOG_OBJECT (v4l2object->element,
+ GST_LOG_OBJECT (v4l2object->dbg_obj,
"getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
*width, *height, GST_FOURCC_ARGS (pixelformat));
if (gst_v4l2_object_try_fmt (v4l2object, &fmt) < 0)
goto error;
- GST_LOG_OBJECT (v4l2object->element,
+ GST_LOG_OBJECT (v4l2object->dbg_obj,
"got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
*width = fmt.fmt.pix.width;
*height = fmt.fmt.pix.height;
if (!gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)) {
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u: %u",
GST_FOURCC_ARGS (pixelformat), *width, *height, fmt.fmt.pix.field);
goto error;
error:
if (!ret) {
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Unable to try format: %s", g_strerror (errno));
}
}
static gboolean
+ gst_v4l2_object_is_dmabuf_supported (GstV4l2Object * v4l2object)
+ {
+ gboolean ret = TRUE;
+ struct v4l2_exportbuffer expbuf = {
+ .type = v4l2object->type,
+ .index = -1,
+ .plane = -1,
+ .flags = O_CLOEXEC | O_RDWR,
+ };
+
+ if (v4l2object->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED) {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "libv4l2 converter detected, disabling DMABuf");
+ ret = FALSE;
+ }
+
+ /* Expected to fail, but ENOTTY tells us that it is not implemented. */
+ v4l2object->ioctl (v4l2object->video_fd, VIDIOC_EXPBUF, &expbuf);
+ if (errno == ENOTTY)
+ ret = FALSE;
+
+ return ret;
+ }
+
+ static gboolean
gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps)
{
GstV4l2IOMode mode;
- GST_DEBUG_OBJECT (v4l2object->element, "initializing the %s system",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "initializing the %s system",
V4L2_TYPE_IS_OUTPUT (v4l2object->type) ? "output" : "capture");
GST_V4L2_CHECK_OPEN (v4l2object);
goto method_not_supported;
if (v4l2object->device_caps & V4L2_CAP_STREAMING) {
- if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
- mode = GST_V4L2_IO_MMAP;
- } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP)
+ if (v4l2object->req_mode == GST_V4L2_IO_AUTO) {
+ if (!V4L2_TYPE_IS_OUTPUT (v4l2object->type) &&
+ gst_v4l2_object_is_dmabuf_supported (v4l2object)) {
+ mode = GST_V4L2_IO_DMABUF;
+ } else {
+ mode = GST_V4L2_IO_MMAP;
+ }
+ }
+ } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP ||
+ v4l2object->req_mode == GST_V4L2_IO_DMABUF)
goto method_not_supported;
/* if still no transport selected, error out */
if (mode == GST_V4L2_IO_AUTO)
goto no_supported_capture_method;
- GST_INFO_OBJECT (v4l2object->element, "accessing buffers via mode %d", mode);
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "accessing buffers via mode %d", mode);
v4l2object->mode = mode;
/* If min_buffers is not set, the driver either does not support the control or
gst_v4l2_get_driver_min_buffers (v4l2object);
/* Map the buffers */
- GST_LOG_OBJECT (v4l2object->element, "initiating buffer pool");
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "initiating buffer pool");
if (!(v4l2object->pool = gst_v4l2_buffer_pool_new (v4l2object, caps)))
goto buffer_pool_new_failed;
offs += estride *
GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, padded_height);
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"Extrapolated for plane %d with base stride %d: "
"stride %d, offset %" G_GSIZE_FORMAT, i, stride, info->stride[i],
info->offset[i]);
{
const GstVideoFormatInfo *finfo = info->finfo;
gboolean standard_stride = TRUE;
- gint stride, padded_width, padded_height, i;
+ gint stride, pstride, padded_width, padded_height, i;
if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_ENCODED) {
v4l2object->n_v4l2_planes = 1;
else
stride = format->fmt.pix.bytesperline;
- padded_width = stride / GST_VIDEO_FORMAT_INFO_PSTRIDE (finfo, 0);
+ pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE (finfo, 0);
+ if (pstride) {
+ padded_width = stride / pstride;
+ } else {
+ /* pstride can be 0 for complex formats */
+ GST_WARNING_OBJECT (v4l2object->element,
+ "format %s has a pstride of 0, cannot compute padded with",
+ gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (info)));
+ padded_width = stride;
+ }
if (padded_width < format->fmt.pix.width)
- GST_WARNING_OBJECT (v4l2object->element,
- "Driver bug detected, stride is too small for the width");
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Driver bug detected, stride (%d) is too small for the width (%d)",
+ padded_width, format->fmt.pix.width);
align->padding_right = padded_width - info->width - align->padding_left;
/* adjust the offset to take into account left and top */
if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
if ((align->padding_left + align->padding_top) > 0)
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Left and top padding is not permitted for tiled formats");
} else {
for (i = 0; i < finfo->n_planes; i++) {
}
store_info:
- GST_DEBUG_OBJECT (v4l2object->element, "Got sizeimage %" G_GSIZE_FORMAT,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got sizeimage %" G_GSIZE_FORMAT,
info->size);
/* to avoid copies we need video meta if there is padding */
}
static gboolean
+ gst_v4l2_video_colorimetry_matches (const GstVideoColorimetry * cinfo,
+ const gchar * color)
+ {
+ GstVideoColorimetry ci;
+ static const GstVideoColorimetry ci_likely_jpeg = {
+ GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
+ GST_VIDEO_TRANSFER_UNKNOWN, GST_VIDEO_COLOR_PRIMARIES_UNKNOWN
+ };
+ static const GstVideoColorimetry ci_jpeg = {
+ GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
+ GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_COLOR_PRIMARIES_BT709
+ };
+
+ if (!gst_video_colorimetry_from_string (&ci, color))
+ return FALSE;
+
+ if (gst_video_colorimetry_is_equal (&ci, cinfo))
+ return TRUE;
+
+ /* Allow 1:4:0:0 (produced by jpegdec) if the device expects 1:4:7:1 */
+ if (gst_video_colorimetry_is_equal (&ci, &ci_likely_jpeg)
+ && gst_video_colorimetry_is_equal (cinfo, &ci_jpeg))
+ return TRUE;
+
+ return FALSE;
+ }
+
+ static gboolean
gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
gboolean try_only, GstV4l2Error * error)
{
enum v4l2_quantization range = 0;
enum v4l2_ycbcr_encoding matrix = 0;
enum v4l2_xfer_func transfer = 0;
+ GstStructure *s;
+ gboolean disable_colorimetry = FALSE;
+
+ g_return_val_if_fail (!v4l2object->skip_try_fmt_probes ||
+ gst_caps_is_writable (caps), FALSE);
GST_V4L2_CHECK_OPEN (v4l2object);
if (!try_only)
n_v4l_planes = 1;
if (GST_VIDEO_INFO_IS_INTERLACED (&info)) {
- GST_DEBUG_OBJECT (v4l2object->element, "interlaced video");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "interlaced video");
/* ideally we would differentiate between types of interlaced video
* but there is not sufficient information in the caps..
*/
field = V4L2_FIELD_INTERLACED;
} else {
- GST_DEBUG_OBJECT (v4l2object->element, "progressive video");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "progressive video");
field = V4L2_FIELD_NONE;
}
- if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
- /* We first pick th main colorspace from the primaries */
- switch (info.colorimetry.primaries) {
- case GST_VIDEO_COLOR_PRIMARIES_BT709:
- /* There is two colorspaces using these primaries, use the range to
- * differentiate */
- if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235)
- colorspace = V4L2_COLORSPACE_REC709;
- else
- colorspace = V4L2_COLORSPACE_SRGB;
- break;
- case GST_VIDEO_COLOR_PRIMARIES_BT470M:
- colorspace = V4L2_COLORSPACE_470_SYSTEM_M;
- break;
- case GST_VIDEO_COLOR_PRIMARIES_BT470BG:
- colorspace = V4L2_COLORSPACE_470_SYSTEM_BG;
- break;
- case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
- colorspace = V4L2_COLORSPACE_SMPTE170M;
- break;
- case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M:
- colorspace = V4L2_COLORSPACE_SMPTE240M;
- break;
+ /* We first pick the main colorspace from the primaries */
+ switch (info.colorimetry.primaries) {
+ case GST_VIDEO_COLOR_PRIMARIES_BT709:
+ /* There is two colorspaces using these primaries, use the range to
+ * differentiate */
+ if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235)
+ colorspace = V4L2_COLORSPACE_REC709;
+ else
+ colorspace = V4L2_COLORSPACE_SRGB;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_BT2020:
+ colorspace = V4L2_COLORSPACE_BT2020;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_BT470M:
+ colorspace = V4L2_COLORSPACE_470_SYSTEM_M;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_BT470BG:
+ colorspace = V4L2_COLORSPACE_470_SYSTEM_BG;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
+ colorspace = V4L2_COLORSPACE_SMPTE170M;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M:
+ colorspace = V4L2_COLORSPACE_SMPTE240M;
+ break;
- case GST_VIDEO_COLOR_PRIMARIES_FILM:
- case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
- /* We don't know, we will guess */
- break;
+ case GST_VIDEO_COLOR_PRIMARIES_FILM:
+ case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
+ /* We don't know, we will guess */
+ break;
- default:
- GST_WARNING_OBJECT (v4l2object->element,
- "Unknown colorimetry primaries %d", info.colorimetry.primaries);
- break;
- }
+ default:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown colorimetry primaries %d", info.colorimetry.primaries);
+ break;
+ }
- switch (info.colorimetry.range) {
- case GST_VIDEO_COLOR_RANGE_0_255:
- range = V4L2_QUANTIZATION_FULL_RANGE;
- break;
- case GST_VIDEO_COLOR_RANGE_16_235:
- range = V4L2_QUANTIZATION_LIM_RANGE;
- break;
- case GST_VIDEO_COLOR_RANGE_UNKNOWN:
- /* We let the driver pick a default one */
- break;
- default:
- GST_WARNING_OBJECT (v4l2object->element,
- "Unknown colorimetry range %d", info.colorimetry.range);
- break;
- }
+ switch (info.colorimetry.range) {
+ case GST_VIDEO_COLOR_RANGE_0_255:
+ range = V4L2_QUANTIZATION_FULL_RANGE;
+ break;
+ case GST_VIDEO_COLOR_RANGE_16_235:
+ range = V4L2_QUANTIZATION_LIM_RANGE;
+ break;
+ case GST_VIDEO_COLOR_RANGE_UNKNOWN:
+ /* We let the driver pick a default one */
+ break;
+ default:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown colorimetry range %d", info.colorimetry.range);
+ break;
+ }
- switch (info.colorimetry.matrix) {
- case GST_VIDEO_COLOR_MATRIX_RGB:
- /* Unspecified, leave to default */
- break;
- /* FCC is about the same as BT601 with less digit */
- case GST_VIDEO_COLOR_MATRIX_FCC:
- case GST_VIDEO_COLOR_MATRIX_BT601:
- matrix = V4L2_YCBCR_ENC_601;
- break;
- case GST_VIDEO_COLOR_MATRIX_BT709:
- matrix = V4L2_YCBCR_ENC_709;
- break;
- case GST_VIDEO_COLOR_MATRIX_SMPTE240M:
- matrix = V4L2_YCBCR_ENC_SMPTE240M;
- break;
- case GST_VIDEO_COLOR_MATRIX_BT2020:
- matrix = V4L2_YCBCR_ENC_BT2020;
- break;
- case GST_VIDEO_COLOR_MATRIX_UNKNOWN:
- /* We let the driver pick a default one */
- break;
- default:
- GST_WARNING_OBJECT (v4l2object->element,
- "Unknown colorimetry matrix %d", info.colorimetry.matrix);
- break;
- }
+ switch (info.colorimetry.matrix) {
+ case GST_VIDEO_COLOR_MATRIX_RGB:
+ /* Unspecified, leave to default */
+ break;
+ /* FCC is about the same as BT601 with less digit */
+ case GST_VIDEO_COLOR_MATRIX_FCC:
+ case GST_VIDEO_COLOR_MATRIX_BT601:
+ matrix = V4L2_YCBCR_ENC_601;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_BT709:
+ matrix = V4L2_YCBCR_ENC_709;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_SMPTE240M:
+ matrix = V4L2_YCBCR_ENC_SMPTE240M;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_BT2020:
+ matrix = V4L2_YCBCR_ENC_BT2020;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_UNKNOWN:
+ /* We let the driver pick a default one */
+ break;
+ default:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown colorimetry matrix %d", info.colorimetry.matrix);
+ break;
+ }
- switch (info.colorimetry.transfer) {
- case GST_VIDEO_TRANSFER_GAMMA18:
- case GST_VIDEO_TRANSFER_GAMMA20:
- case GST_VIDEO_TRANSFER_GAMMA22:
- case GST_VIDEO_TRANSFER_GAMMA28:
- GST_WARNING_OBJECT (v4l2object->element,
- "GAMMA 18, 20, 22, 28 transfer functions not supported");
- /* fallthrough */
- case GST_VIDEO_TRANSFER_GAMMA10:
- transfer = V4L2_XFER_FUNC_NONE;
- break;
- case GST_VIDEO_TRANSFER_BT709:
- transfer = V4L2_XFER_FUNC_709;
- break;
- case GST_VIDEO_TRANSFER_SMPTE240M:
- transfer = V4L2_XFER_FUNC_SMPTE240M;
- break;
- case GST_VIDEO_TRANSFER_SRGB:
- transfer = V4L2_XFER_FUNC_SRGB;
- break;
- case GST_VIDEO_TRANSFER_LOG100:
- case GST_VIDEO_TRANSFER_LOG316:
- GST_WARNING_OBJECT (v4l2object->element,
- "LOG 100, 316 transfer functions not supported");
- /* FIXME No known sensible default, maybe AdobeRGB ? */
- break;
- case GST_VIDEO_TRANSFER_UNKNOWN:
- /* We let the driver pick a default one */
- break;
- default:
- GST_WARNING_OBJECT (v4l2object->element,
- "Unknown colorimetry tranfer %d", info.colorimetry.transfer);
- break;
- }
+ switch (info.colorimetry.transfer) {
+ case GST_VIDEO_TRANSFER_GAMMA18:
+ case GST_VIDEO_TRANSFER_GAMMA20:
+ case GST_VIDEO_TRANSFER_GAMMA22:
+ case GST_VIDEO_TRANSFER_GAMMA28:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "GAMMA 18, 20, 22, 28 transfer functions not supported");
+ /* fallthrough */
+ case GST_VIDEO_TRANSFER_GAMMA10:
+ transfer = V4L2_XFER_FUNC_NONE;
+ break;
+ case GST_VIDEO_TRANSFER_BT2020_12:
+ case GST_VIDEO_TRANSFER_BT709:
+ transfer = V4L2_XFER_FUNC_709;
+ break;
+ case GST_VIDEO_TRANSFER_SMPTE240M:
+ transfer = V4L2_XFER_FUNC_SMPTE240M;
+ break;
+ case GST_VIDEO_TRANSFER_SRGB:
+ transfer = V4L2_XFER_FUNC_SRGB;
+ break;
+ case GST_VIDEO_TRANSFER_LOG100:
+ case GST_VIDEO_TRANSFER_LOG316:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "LOG 100, 316 transfer functions not supported");
+ /* FIXME No known sensible default, maybe AdobeRGB ? */
+ break;
+ case GST_VIDEO_TRANSFER_UNKNOWN:
+ /* We let the driver pick a default one */
+ break;
+ default:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown colorimetry tranfer %d", info.colorimetry.transfer);
+ break;
+ }
- if (colorspace == 0) {
- /* Try to guess colorspace according to pixelformat and size */
- if (GST_VIDEO_INFO_IS_YUV (&info)) {
+ if (colorspace == 0) {
+ /* Try to guess colorspace according to pixelformat and size */
+ if (GST_VIDEO_INFO_IS_YUV (&info)) {
+ if (range == V4L2_QUANTIZATION_FULL_RANGE
+ && matrix == V4L2_YCBCR_ENC_601 && transfer == 0) {
+ /* Full range BT.601 YCbCr encoding with unknown primaries and transfer
+ * function most likely is JPEG */
+ colorspace = V4L2_COLORSPACE_JPEG;
+ transfer = V4L2_XFER_FUNC_SRGB;
+ } else {
/* SD streams likely use SMPTE170M and HD streams REC709 */
if (width <= 720 && height <= 576)
colorspace = V4L2_COLORSPACE_SMPTE170M;
else
colorspace = V4L2_COLORSPACE_REC709;
- } else if (GST_VIDEO_INFO_IS_RGB (&info)) {
- colorspace = V4L2_COLORSPACE_SRGB;
- transfer = V4L2_XFER_FUNC_NONE;
}
+ } else if (GST_VIDEO_INFO_IS_RGB (&info)) {
+ colorspace = V4L2_COLORSPACE_SRGB;
+ transfer = V4L2_XFER_FUNC_NONE;
}
}
- GST_DEBUG_OBJECT (v4l2object->element, "Desired format %dx%d, format "
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired format %dx%d, format "
"%" GST_FOURCC_FORMAT " stride: %d", width, height,
GST_FOURCC_ARGS (pixelformat), GST_VIDEO_INFO_PLANE_STRIDE (&info, 0));
gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, 0);
format.type = v4l2object->type;
+
format.fmt.pix.width = width;
format.fmt.pix.height = height;
format.fmt.pix.pixelformat = pixelformat;
format.fmt.pix.sizeimage = ENCODED_BUFFER_SIZE;
}
- GST_DEBUG_OBJECT (v4l2object->element, "Desired format is %dx%d, format "
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired format is %dx%d, format "
"%" GST_FOURCC_FORMAT ", nb planes %d", format.fmt.pix.width,
format.fmt.pix_mp.height,
GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
#ifndef GST_DISABLE_GST_DEBUG
if (is_mplane) {
for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
- GST_DEBUG_OBJECT (v4l2object->element, " stride %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d",
format.fmt.pix_mp.plane_fmt[i].bytesperline);
} else {
- GST_DEBUG_OBJECT (v4l2object->element, " stride %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d",
format.fmt.pix.bytesperline);
}
#endif
- if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
- if (is_mplane) {
- format.fmt.pix_mp.colorspace = colorspace;
- format.fmt.pix_mp.quantization = range;
- format.fmt.pix_mp.ycbcr_enc = matrix;
- format.fmt.pix_mp.xfer_func = transfer;
- } else {
- format.fmt.pix.colorspace = colorspace;
- format.fmt.pix.quantization = range;
- format.fmt.pix.ycbcr_enc = matrix;
- format.fmt.pix.xfer_func = transfer;
- }
-
- GST_DEBUG_OBJECT (v4l2object->element, "Desired colorspace is %d:%d:%d:%d",
- colorspace, range, matrix, transfer);
+ if (is_mplane) {
+ format.fmt.pix_mp.colorspace = colorspace;
+ format.fmt.pix_mp.quantization = range;
+ format.fmt.pix_mp.ycbcr_enc = matrix;
+ format.fmt.pix_mp.xfer_func = transfer;
+ } else {
+ format.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
+ format.fmt.pix.colorspace = colorspace;
+ format.fmt.pix.quantization = range;
+ format.fmt.pix.ycbcr_enc = matrix;
+ format.fmt.pix.xfer_func = transfer;
}
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d",
+ colorspace, range, matrix, transfer);
+
if (try_only) {
- if (v4l2_ioctl (fd, VIDIOC_TRY_FMT, &format) < 0)
+ if (v4l2object->ioctl (fd, VIDIOC_TRY_FMT, &format) < 0)
goto try_fmt_failed;
} else {
- if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0)
+ if (v4l2object->ioctl (fd, VIDIOC_S_FMT, &format) < 0)
goto set_fmt_failed;
}
- GST_DEBUG_OBJECT (v4l2object->element, "Got format of %dx%d, format "
- "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d",
+ if (is_mplane) {
+ colorspace = format.fmt.pix_mp.colorspace;
+ range = format.fmt.pix_mp.quantization;
+ matrix = format.fmt.pix_mp.ycbcr_enc;
+ transfer = format.fmt.pix_mp.xfer_func;
+ } else {
+ colorspace = format.fmt.pix.colorspace;
+ range = format.fmt.pix.quantization;
+ matrix = format.fmt.pix.ycbcr_enc;
+ transfer = format.fmt.pix.xfer_func;
+ }
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got format of %dx%d, format "
+ "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d:%d:%d:%d",
format.fmt.pix.width, format.fmt.pix_mp.height,
GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
is_mplane ? format.fmt.pix_mp.num_planes : 1,
- is_mplane ? format.fmt.pix_mp.colorspace : format.fmt.pix.colorspace);
+ colorspace, range, matrix, transfer);
#ifndef GST_DISABLE_GST_DEBUG
if (is_mplane) {
for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
- GST_DEBUG_OBJECT (v4l2object->element, " stride %d, sizeimage %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d, sizeimage %d",
format.fmt.pix_mp.plane_fmt[i].bytesperline,
format.fmt.pix_mp.plane_fmt[i].sizeimage);
} else {
- GST_DEBUG_OBJECT (v4l2object->element, " stride %d, sizeimage %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d, sizeimage %d",
format.fmt.pix.bytesperline, format.fmt.pix.sizeimage);
}
#endif
if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
goto invalid_planes;
+ /* used to check colorimetry and interlace mode fields presence */
+ s = gst_caps_get_structure (caps, 0);
+
+ if (!gst_v4l2_object_get_interlace_mode (format.fmt.pix.field,
+ &info.interlace_mode))
+ goto invalid_field;
+ if (gst_structure_has_field (s, "interlace-mode")) {
+ if (format.fmt.pix.field != field)
+ goto invalid_field;
+ }
+
+ if (gst_v4l2_object_get_colorspace (&format, &info.colorimetry)) {
+ if (gst_structure_has_field (s, "colorimetry")) {
+ if (!gst_v4l2_video_colorimetry_matches (&info.colorimetry,
+ gst_structure_get_string (s, "colorimetry")))
+ goto invalid_colorimetry;
+ }
+ } else {
+ /* The driver (or libv4l2) is miss-behaving, just ignore colorimetry from
+ * the TRY_FMT */
+ disable_colorimetry = TRUE;
+ if (gst_structure_has_field (s, "colorimetry"))
+ gst_structure_remove_field (s, "colorimetry");
+ }
+
+ /* In case we have skipped the try_fmt probes, we'll need to set the
+ * colorimetry and interlace-mode back into the caps. */
+ if (v4l2object->skip_try_fmt_probes) {
+ if (!disable_colorimetry && !gst_structure_has_field (s, "colorimetry")) {
+ gchar *str = gst_video_colorimetry_to_string (&info.colorimetry);
+ gst_structure_set (s, "colorimetry", G_TYPE_STRING, str, NULL);
+ g_free (str);
+ }
+
+ if (!gst_structure_has_field (s, "interlace-mode"))
+ gst_structure_set (s, "interlace-mode", G_TYPE_STRING,
+ gst_video_interlace_mode_to_string (info.interlace_mode), NULL);
+ }
+
if (try_only) /* good enough for trying only */
return TRUE;
ctl.id = V4L2_CID_ALPHA_COMPONENT;
ctl.value = 0xff;
- if (v4l2_ioctl (fd, VIDIOC_S_CTRL, &ctl) < 0)
- GST_WARNING_OBJECT (v4l2object->element,
+ if (v4l2object->ioctl (fd, VIDIOC_S_CTRL, &ctl) < 0)
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Failed to set alpha component value");
}
/* Is there a reason we require the caller to always specify a framerate? */
- GST_DEBUG_OBJECT (v4l2object->element, "Desired framerate: %u/%u", fps_n,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired framerate: %u/%u", fps_n,
fps_d);
memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
streamparm.type = v4l2object->type;
- if (v4l2_ioctl (fd, VIDIOC_G_PARM, &streamparm) < 0)
+ if (v4l2object->ioctl (fd, VIDIOC_G_PARM, &streamparm) < 0)
goto get_parm_failed;
if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
GST_VIDEO_INFO_FPS_D (&info) =
streamparm.parm.capture.timeperframe.numerator;
- GST_DEBUG_OBJECT (v4l2object->element, "Got capture framerate: %u/%u",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got capture framerate: %u/%u",
streamparm.parm.capture.timeperframe.denominator,
streamparm.parm.capture.timeperframe.numerator);
* causing them to not output data (several models of Thinkpad cameras
* have this problem at least).
* So, don't skip. */
- GST_LOG_OBJECT (v4l2object->element, "Setting capture framerate to %u/%u",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "Setting capture framerate to %u/%u",
fps_n, fps_d);
/* We want to change the frame rate, so check whether we can. Some cheap USB
* cameras don't have the capability */
if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"Not setting capture framerate (not supported)");
goto done;
}
streamparm.parm.capture.timeperframe.denominator = fps_n;
/* some cheap USB cam's won't accept any change */
- if (v4l2_ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0)
+ if (v4l2object->ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0)
goto set_parm_failed;
if (streamparm.parm.capture.timeperframe.numerator > 0 &&
fps_d = streamparm.parm.capture.timeperframe.numerator;
fps_n = streamparm.parm.capture.timeperframe.denominator;
- GST_INFO_OBJECT (v4l2object->element, "Set capture framerate to %u/%u",
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "Set capture framerate to %u/%u",
fps_n, fps_d);
} else {
/* fix v4l2 capture driver to provide framerate values */
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Reuse caps framerate %u/%u - fix v4l2 capture driver", fps_n, fps_d);
}
GST_VIDEO_INFO_FPS_D (&info) =
streamparm.parm.output.timeperframe.numerator;
- GST_DEBUG_OBJECT (v4l2object->element, "Got output framerate: %u/%u",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got output framerate: %u/%u",
streamparm.parm.output.timeperframe.denominator,
streamparm.parm.output.timeperframe.numerator);
- GST_LOG_OBJECT (v4l2object->element, "Setting output framerate to %u/%u",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "Setting output framerate to %u/%u",
fps_n, fps_d);
if ((streamparm.parm.output.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"Not setting output framerate (not supported)");
goto done;
}
streamparm.parm.output.timeperframe.numerator = fps_d;
streamparm.parm.output.timeperframe.denominator = fps_n;
- if (v4l2_ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0)
+ if (v4l2object->ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0)
goto set_parm_failed;
if (streamparm.parm.output.timeperframe.numerator > 0 &&
fps_d = streamparm.parm.output.timeperframe.numerator;
fps_n = streamparm.parm.output.timeperframe.denominator;
- GST_INFO_OBJECT (v4l2object->element, "Set output framerate to %u/%u",
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "Set output framerate to %u/%u",
fps_n, fps_d);
} else {
/* fix v4l2 output driver to provide framerate values */
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Reuse caps framerate %u/%u - fix v4l2 output driver", fps_n, fps_d);
}
/* ERRORS */
invalid_caps:
{
- GST_DEBUG_OBJECT (v4l2object->element, "can't parse caps %" GST_PTR_FORMAT,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "can't parse caps %" GST_PTR_FORMAT,
caps);
return FALSE;
}
try_fmt_failed:
{
- if (errno == EBUSY) {
- GST_V4L2_ERROR (error, RESOURCE, BUSY,
- (_("Device '%s' is busy"), v4l2object->videodev),
+ if (errno == EINVAL) {
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' has no supported format"), v4l2object->videodev),
+ ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
+ GST_FOURCC_ARGS (pixelformat), width, height,
+ g_strerror (errno)));
+ } else {
+ GST_V4L2_ERROR (error, RESOURCE, FAILED,
+ (_("Device '%s' failed during initialization"),
+ v4l2object->videodev),
("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
GST_FOURCC_ARGS (pixelformat), width, height,
g_strerror (errno)));
("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
GST_FOURCC_ARGS (pixelformat), width, height,
g_strerror (errno)));
- } else {
+ } else if (errno == EINVAL) {
GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
- (_("Device '%s' cannot capture at %dx%d"),
- v4l2object->videodev, width, height),
+ (_("Device '%s' has no supported format"), v4l2object->videodev),
+ ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
+ GST_FOURCC_ARGS (pixelformat), width, height,
+ g_strerror (errno)));
+ } else {
+ GST_V4L2_ERROR (error, RESOURCE, FAILED,
+ (_("Device '%s' failed during initialization"),
+ v4l2object->videodev),
("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
GST_FOURCC_ARGS (pixelformat), width, height,
g_strerror (errno)));
}
invalid_dimensions:
{
- if (!try_only) {
- GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
- (_("Device '%s' cannot capture at %dx%d"),
- v4l2object->videodev, width, height),
- ("Tried to capture at %dx%d, but device returned size %dx%d",
- width, height, format.fmt.pix.width, format.fmt.pix.height));
- }
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' cannot capture at %dx%d"),
+ v4l2object->videodev, width, height),
+ ("Tried to capture at %dx%d, but device returned size %dx%d",
+ width, height, format.fmt.pix.width, format.fmt.pix.height));
return FALSE;
}
invalid_pixelformat:
{
- if (!try_only) {
- GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
- (_("Device '%s' cannot capture in the specified format"),
- v4l2object->videodev),
- ("Tried to capture in %" GST_FOURCC_FORMAT
- ", but device returned format" " %" GST_FOURCC_FORMAT,
- GST_FOURCC_ARGS (pixelformat),
- GST_FOURCC_ARGS (format.fmt.pix.pixelformat)));
- }
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' cannot capture in the specified format"),
+ v4l2object->videodev),
+ ("Tried to capture in %" GST_FOURCC_FORMAT
+ ", but device returned format" " %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (pixelformat),
+ GST_FOURCC_ARGS (format.fmt.pix.pixelformat)));
return FALSE;
}
invalid_planes:
{
- if (!try_only) {
- GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
- (_("Device '%s' does support non-contiguous planes"),
- v4l2object->videodev),
- ("Device wants %d planes", format.fmt.pix_mp.num_planes));
- }
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' does support non-contiguous planes"),
+ v4l2object->videodev),
+ ("Device wants %d planes", format.fmt.pix_mp.num_planes));
+ return FALSE;
+ }
+ invalid_field:
+ {
+ enum v4l2_field wanted_field;
+
+ if (is_mplane)
+ wanted_field = format.fmt.pix_mp.field;
+ else
+ wanted_field = format.fmt.pix.field;
+
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' does not support %s interlacing"),
+ v4l2object->videodev,
+ field == V4L2_FIELD_NONE ? "progressive" : "interleaved"),
+ ("Device wants %s interlacing",
+ wanted_field == V4L2_FIELD_NONE ? "progressive" : "interleaved"));
+ return FALSE;
+ }
+ invalid_colorimetry:
+ {
+ gchar *wanted_colorimetry;
+
+ wanted_colorimetry = gst_video_colorimetry_to_string (&info.colorimetry);
+
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' does not support %s colorimetry"),
+ v4l2object->videodev, gst_structure_get_string (s, "colorimetry")),
+ ("Device wants %s colorimetry", wanted_colorimetry));
+
+ g_free (wanted_colorimetry);
return FALSE;
}
get_parm_failed:
gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps,
GstV4l2Error * error)
{
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Setting format to %" GST_PTR_FORMAT,
+ caps);
return gst_v4l2_object_set_format_full (v4l2object, caps, FALSE, error);
}
gst_v4l2_object_try_format (GstV4l2Object * v4l2object, GstCaps * caps,
GstV4l2Error * error)
{
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying format %" GST_PTR_FORMAT,
+ caps);
return gst_v4l2_object_set_format_full (v4l2object, caps, TRUE, error);
}
memset (&fmt, 0x00, sizeof (struct v4l2_format));
fmt.type = v4l2object->type;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0)
goto get_fmt_failed;
fmtdesc = gst_v4l2_object_get_format_from_fourcc (v4l2object,
memset (&sel, 0, sizeof (struct v4l2_selection));
sel.type = v4l2object->type;
sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_SELECTION, &sel) >= 0) {
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_SELECTION, &sel) >= 0) {
r = &sel.r;
} else {
/* For ancient kernels, fall back to G_CROP */
memset (&crop, 0, sizeof (struct v4l2_crop));
crop.type = v4l2object->type;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0)
r = &crop.c;
}
if (r) {
gboolean
gst_v4l2_object_set_crop (GstV4l2Object * obj)
{
+ struct v4l2_selection sel = { 0 };
struct v4l2_crop crop = { 0 };
+ sel.type = obj->type;
+ sel.target = V4L2_SEL_TGT_CROP;
+ sel.flags = 0;
+ sel.r.left = obj->align.padding_left;
+ sel.r.top = obj->align.padding_top;
+ sel.r.width = obj->info.width;
+ sel.r.height = obj->info.height;
+
crop.type = obj->type;
- crop.c.left = obj->align.padding_left;
- crop.c.top = obj->align.padding_top;
- crop.c.width = obj->info.width;
- crop.c.height = obj->info.height;
+ crop.c = sel.r;
if (obj->align.padding_left + obj->align.padding_top +
obj->align.padding_right + obj->align.padding_bottom == 0) {
- GST_DEBUG_OBJECT (obj->element, "no cropping needed");
+ GST_DEBUG_OBJECT (obj->dbg_obj, "no cropping needed");
return TRUE;
}
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
crop.c.width, crop.c.height);
- if (v4l2_ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) {
- GST_WARNING_OBJECT (obj->element, "VIDIOC_S_CROP failed");
- return FALSE;
- }
+ if (obj->ioctl (obj->video_fd, VIDIOC_S_SELECTION, &sel) < 0) {
+ if (errno != ENOTTY) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Failed to set crop rectangle with VIDIOC_S_SELECTION: %s",
+ g_strerror (errno));
+ return FALSE;
+ } else {
+ if (obj->ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_S_CROP failed");
+ return FALSE;
+ }
- if (v4l2_ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) {
- GST_WARNING_OBJECT (obj->element, "VIDIOC_G_CROP failed");
- return FALSE;
+ if (obj->ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_G_CROP failed");
+ return FALSE;
+ }
+
+ sel.r = crop.c;
+ }
}
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
crop.c.width, crop.c.height);
}
gboolean
+ gst_v4l2_object_caps_is_subset (GstV4l2Object * v4l2object, GstCaps * caps)
+ {
+ GstStructure *config;
+ GstCaps *oldcaps;
+ gboolean ret;
+
+ if (!v4l2object->pool)
+ return FALSE;
+
+ config = gst_buffer_pool_get_config (v4l2object->pool);
+ gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
+
+ ret = oldcaps && gst_caps_is_subset (oldcaps, caps);
+
+ gst_structure_free (config);
+
+ return ret;
+ }
+
+ GstCaps *
+ gst_v4l2_object_get_current_caps (GstV4l2Object * v4l2object)
+ {
+ GstStructure *config;
+ GstCaps *oldcaps;
+
+ if (!v4l2object->pool)
+ return NULL;
+
+ config = gst_buffer_pool_get_config (v4l2object->pool);
+ gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
+
+ if (oldcaps)
+ gst_caps_ref (oldcaps);
+
+ gst_structure_free (config);
+
+ return oldcaps;
+ }
+
+ gboolean
gst_v4l2_object_unlock (GstV4l2Object * v4l2object)
{
gboolean ret = TRUE;
- GST_LOG_OBJECT (v4l2object->element, "start flushing");
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "start flushing");
if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool))
gst_buffer_pool_set_flushing (v4l2object->pool, TRUE);
{
gboolean ret = TRUE;
- GST_LOG_OBJECT (v4l2object->element, "stop flushing");
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "stop flushing");
if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool))
gst_buffer_pool_set_flushing (v4l2object->pool, FALSE);
gboolean
gst_v4l2_object_stop (GstV4l2Object * v4l2object)
{
- GST_DEBUG_OBJECT (v4l2object->element, "stopping");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "stopping");
if (!GST_V4L2_IS_OPEN (v4l2object))
goto done;
goto done;
if (v4l2object->pool) {
- GST_DEBUG_OBJECT (v4l2object->element, "deactivating pool");
- gst_buffer_pool_set_active (v4l2object->pool, FALSE);
- gst_object_unref (v4l2object->pool);
+ if (!gst_v4l2_buffer_pool_orphan (&v4l2object->pool)) {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "deactivating pool");
+ gst_buffer_pool_set_active (v4l2object->pool, FALSE);
+ gst_object_unref (v4l2object->pool);
+ }
v4l2object->pool = NULL;
}
ret = gst_caps_new_empty ();
+ if (v4l2object->keep_aspect && !v4l2object->par) {
+ struct v4l2_cropcap cropcap;
+
+ memset (&cropcap, 0, sizeof (cropcap));
+
+ cropcap.type = v4l2object->type;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0) {
+ if (errno != ENOTTY)
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
+ g_strerror (errno));
+ } else if (cropcap.pixelaspect.numerator && cropcap.pixelaspect.denominator) {
+ v4l2object->par = g_new0 (GValue, 1);
+ g_value_init (v4l2object->par, GST_TYPE_FRACTION);
+ gst_value_set_fraction (v4l2object->par, cropcap.pixelaspect.numerator,
+ cropcap.pixelaspect.denominator);
+ }
+ }
+
for (walk = formats; walk; walk = walk->next) {
struct v4l2_fmtdesc *format;
GstStructure *template;
+ GstCaps *tmp;
format = (struct v4l2_fmtdesc *) walk->data;
template = gst_v4l2_object_v4l2fourcc_to_bare_struct (format->pixelformat);
- if (template) {
- GstCaps *tmp;
+ if (!template) {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "unknown format %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (format->pixelformat));
+ continue;
+ }
+
+ /* If we have a filter, check if we need to probe this format or not */
+ if (filter) {
+ GstCaps *format_caps = gst_caps_new_empty ();
- tmp = gst_v4l2_object_probe_caps_for_format (v4l2object,
- format->pixelformat, template);
- if (tmp)
- gst_caps_append (ret, tmp);
+ gst_caps_append_structure (format_caps, gst_structure_copy (template));
- gst_structure_free (template);
- } else {
- GST_DEBUG_OBJECT (v4l2object->element, "unknown format %u",
- format->pixelformat);
+ if (!gst_caps_can_intersect (format_caps, filter)) {
+ gst_caps_unref (format_caps);
+ gst_structure_free (template);
+ continue;
+ }
+
+ gst_caps_unref (format_caps);
}
+
+ tmp = gst_v4l2_object_probe_caps_for_format (v4l2object,
+ format->pixelformat, template);
+ if (tmp)
+ gst_caps_append (ret, tmp);
+
+ gst_structure_free (template);
}
if (filter) {
gst_caps_unref (tmp);
}
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "probed caps: %" GST_PTR_FORMAT, ret);
+
return ret;
}
ret = gst_caps_ref (v4l2object->probed_caps);
}
- GST_INFO_OBJECT (v4l2object->element, "probed caps: %" GST_PTR_FORMAT, ret);
-
return ret;
}
GstAllocator *allocator = NULL;
GstAllocationParams params = { 0 };
- GST_DEBUG_OBJECT (obj->element, "decide allocation");
+ GST_DEBUG_OBJECT (obj->dbg_obj, "decide allocation");
g_return_val_if_fail (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, FALSE);
update = FALSE;
}
- GST_DEBUG_OBJECT (obj->element, "allocation: size:%u min:%u max:%u pool:%"
+ GST_DEBUG_OBJECT (obj->dbg_obj, "allocation: size:%u min:%u max:%u pool:%"
GST_PTR_FORMAT, size, min, max, pool);
has_video_meta =
if (pool) {
/* in READ/WRITE mode, prefer a downstream pool because our own pool
* doesn't help much, we have to write to it as well */
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"read/write mode: using downstream pool");
/* use the bigest size, when we use our own pool we can't really do any
* other size than what the hardware gives us but for downstream pools
size = MAX (size, obj->info.size);
} else if (can_share_own_pool) {
/* no downstream pool, use our own then */
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"read/write mode: no downstream pool, using our own");
pool = gst_object_ref (obj->pool);
size = obj->info.size;
gst_object_unref (pool);
pool = gst_object_ref (obj->pool);
size = obj->info.size;
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
pushing_from_our_pool = TRUE;
} else if (pool) {
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"streaming mode: copying to downstream pool %" GST_PTR_FORMAT,
pool);
} else {
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"streaming mode: no usable pool, copying to generic pool");
size = MAX (size, obj->info.size);
}
break;
case GST_V4L2_IO_AUTO:
default:
- GST_WARNING_OBJECT (obj->element, "unhandled mode");
+ GST_WARNING_OBJECT (obj->dbg_obj, "unhandled mode");
break;
}
if (pushing_from_our_pool) {
/* When pushing from our own pool, we need what downstream one, to be able
* to fill the pipeline, the minimum required to decoder according to the
- * driver and 1 more, so we don't endup up with everything downstream or
- * held by the decoder. */
- own_min = min + obj->min_buffers + 1;
+ * driver and 2 more, so we don't endup up with everything downstream or
+ * held by the decoder. We account 2 buffers for v4l2 so when one is being
+ * pushed downstream the other one can already be queued for the next
+ * frame. */
+ own_min = min + obj->min_buffers + 2;
/* If no allocation parameters where provided, allow for a little more
* buffers and enable copy threshold */
if (!update) {
- own_min += 3;
+ own_min += 2;
gst_v4l2_buffer_pool_copy_at_threshold (GST_V4L2_BUFFER_POOL (pool),
TRUE);
} else {
config = gst_buffer_pool_get_config (obj->pool);
if (obj->need_video_meta || has_video_meta) {
- GST_DEBUG_OBJECT (obj->element, "activate Video Meta");
+ GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta");
gst_buffer_pool_config_add_option (config,
GST_BUFFER_POOL_OPTION_VIDEO_META);
}
gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
gst_buffer_pool_config_set_params (config, caps, size, own_min, 0);
- GST_DEBUG_OBJECT (obj->element, "setting own pool config to %"
+ GST_DEBUG_OBJECT (obj->dbg_obj, "setting own pool config to %"
GST_PTR_FORMAT, config);
/* Our pool often need to adjust the value */
if (!gst_buffer_pool_set_config (obj->pool, config)) {
config = gst_buffer_pool_get_config (obj->pool);
- GST_DEBUG_OBJECT (obj->element, "own pool config changed to %"
+ GST_DEBUG_OBJECT (obj->dbg_obj, "own pool config changed to %"
GST_PTR_FORMAT, config);
/* our pool will adjust the maximum buffer, which we are fine with */
gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
gst_buffer_pool_config_set_params (config, caps, size, min, max);
- GST_DEBUG_OBJECT (obj->element, "setting other pool config to %"
+ GST_DEBUG_OBJECT (obj->dbg_obj, "setting other pool config to %"
GST_PTR_FORMAT, config);
/* if downstream supports video metadata, add this to the pool config */
if (has_video_meta) {
- GST_DEBUG_OBJECT (obj->element, "activate Video Meta");
+ GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta");
gst_buffer_pool_config_add_option (config,
GST_BUFFER_POOL_OPTION_VIDEO_META);
}
if (caps == NULL)
goto no_caps;
- if ((pool = obj->pool))
- gst_object_ref (pool);
+ switch (obj->mode) {
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_DMABUF:
+ if ((pool = obj->pool))
+ gst_object_ref (pool);
+ break;
+ default:
+ pool = NULL;
+ break;
+ }
if (pool != NULL) {
GstCaps *pcaps;
config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_get_params (config, &pcaps, NULL, NULL, NULL);
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"we had a pool with caps %" GST_PTR_FORMAT, pcaps);
if (!gst_caps_is_equal (caps, pcaps)) {
gst_structure_free (config);
/* ERRORS */
no_caps:
{
- GST_DEBUG_OBJECT (obj->element, "no caps specified");
+ GST_DEBUG_OBJECT (obj->dbg_obj, "no caps specified");
return FALSE;
}
different_caps:
{
/* different caps, we can't use this pool */
- GST_DEBUG_OBJECT (obj->element, "pool has different caps");
+ GST_DEBUG_OBJECT (obj->dbg_obj, "pool has different caps");
+ return FALSE;
+ }
+ }
+
+ gboolean
+ gst_v4l2_object_try_import (GstV4l2Object * obj, GstBuffer * buffer)
+ {
+ GstVideoMeta *vmeta;
+ guint n_mem = gst_buffer_n_memory (buffer);
+
+ /* only import if requested */
+ switch (obj->mode) {
+ case GST_V4L2_IO_USERPTR:
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ break;
+ default:
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "The io-mode does not enable importation");
+ return FALSE;
+ }
+
+ vmeta = gst_buffer_get_video_meta (buffer);
+ if (!vmeta && obj->need_video_meta) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Downstream buffer uses standard "
+ "stride/offset while the driver does not.");
+ return FALSE;
+ }
+
+ /* we need matching strides/offsets and size */
+ if (vmeta) {
+ guint p;
+ gboolean need_fmt_update = FALSE;
+
+ if (vmeta->n_planes != GST_VIDEO_INFO_N_PLANES (&obj->info)) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Cannot import buffers with different number planes");
+ return FALSE;
+ }
+
+ for (p = 0; p < vmeta->n_planes; p++) {
+ if (vmeta->stride[p] < obj->info.stride[p]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Not importing as remote stride %i is smaller then %i on plane %u",
+ vmeta->stride[p], obj->info.stride[p], p);
+ return FALSE;
+ } else if (vmeta->stride[p] > obj->info.stride[p]) {
+ need_fmt_update = TRUE;
+ }
+
+ if (vmeta->offset[p] < obj->info.offset[p]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Not importing as offset %" G_GSIZE_FORMAT
+ " is smaller then %" G_GSIZE_FORMAT " on plane %u",
+ vmeta->offset[p], obj->info.offset[p], p);
+ return FALSE;
+ } else if (vmeta->offset[p] > obj->info.offset[p]) {
+ need_fmt_update = TRUE;
+ }
+ }
+
+ if (need_fmt_update) {
+ struct v4l2_format format;
+ gint wanted_stride[GST_VIDEO_MAX_PLANES] = { 0, };
+
+ format = obj->format;
+
+ /* update the current format with the stride we want to import from */
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ guint i;
+
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted strides:");
+
+ for (i = 0; i < obj->n_v4l2_planes; i++) {
+ gint stride = vmeta->stride[i];
+
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo))
+ stride = GST_VIDEO_TILE_X_TILES (stride) <<
+ GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo);
+
+ format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
+ wanted_stride[i] = stride;
+ GST_DEBUG_OBJECT (obj->dbg_obj, " [%u] %i", i, wanted_stride[i]);
+ }
+ } else {
+ gint stride = vmeta->stride[0];
+
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted stride: %i", stride);
+
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo))
+ stride = GST_VIDEO_TILE_X_TILES (stride) <<
+ GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo);
+
+ format.fmt.pix.bytesperline = stride;
+ wanted_stride[0] = stride;
+ }
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_S_FMT, &format) < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Something went wrong trying to update current format: %s",
+ g_strerror (errno));
+ return FALSE;
+ }
+
+ gst_v4l2_object_save_format (obj, obj->fmtdesc, &format, &obj->info,
+ &obj->align);
+
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ guint i;
+
+ for (i = 0; i < obj->n_v4l2_planes; i++) {
+ if (format.fmt.pix_mp.plane_fmt[i].bytesperline != wanted_stride[i]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "[%i] Driver did not accept the new stride (wants %i, got %i)",
+ i, format.fmt.pix_mp.plane_fmt[i].bytesperline,
+ wanted_stride[i]);
+ return FALSE;
+ }
+ }
+ } else {
+ if (format.fmt.pix.bytesperline != wanted_stride[0]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Driver did not accept the new stride (wants %i, got %i)",
+ format.fmt.pix.bytesperline, wanted_stride[0]);
+ return FALSE;
+ }
+ }
+ }
+ }
+
+ /* we can always import single memory buffer, but otherwise we need the same
+ * amount of memory object. */
+ if (n_mem != 1 && n_mem != obj->n_v4l2_planes) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Can only import %i memory, "
+ "buffers contains %u memory", obj->n_v4l2_planes, n_mem);
return FALSE;
}
+
+ /* For DMABuf importation we need DMABuf of course */
+ if (obj->mode == GST_V4L2_IO_DMABUF_IMPORT) {
+ guint i;
+
+ for (i = 0; i < n_mem; i++) {
+ GstMemory *mem = gst_buffer_peek_memory (buffer, i);
+
+ if (!gst_is_dmabuf_memory (mem)) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Cannot import non-DMABuf memory.");
+ return FALSE;
+ }
+ }
+ }
+
+ /* for the remaining, only the kernel driver can tell */
+ return TRUE;
}
* ]| This pipeline shows the video captured from a webcam that delivers jpeg
* images.
* </refsect2>
+ *
+ * Since 1.14, the use of libv4l2 has been disabled due to major bugs in the
+ * emulation layer. To enable usage of this library, set the environment
+ * variable GST_V4L2_USE_LIBV4L2=1.
*/
#ifdef HAVE_CONFIG_H
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
+#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
+ PROP_CAMERA_ID,
+#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
PROP_LAST
};
static gboolean gst_v4l2src_unlock (GstBaseSrc * src);
static gboolean gst_v4l2src_unlock_stop (GstBaseSrc * src);
static gboolean gst_v4l2src_stop (GstBaseSrc * src);
- static gboolean gst_v4l2src_set_caps (GstBaseSrc * src, GstCaps * caps);
static GstCaps *gst_v4l2src_get_caps (GstBaseSrc * src, GstCaps * filter);
static gboolean gst_v4l2src_query (GstBaseSrc * bsrc, GstQuery * query);
static gboolean gst_v4l2src_decide_allocation (GstBaseSrc * src,
GstQuery * query);
static GstFlowReturn gst_v4l2src_create (GstPushSrc * src, GstBuffer ** out);
- static GstCaps *gst_v4l2src_fixate (GstBaseSrc * basesrc, GstCaps * caps);
+ static GstCaps *gst_v4l2src_fixate (GstBaseSrc * basesrc, GstCaps * caps,
+ GstStructure * pref_s);
static gboolean gst_v4l2src_negotiate (GstBaseSrc * basesrc);
static void gst_v4l2src_set_property (GObject * object, guint prop_id,
gst_v4l2_object_install_properties_helper (gobject_class,
DEFAULT_PROP_DEVICE);
+#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
+ /**
+ * GstV4l2Src:camera-id:
+ *
+ * The value which is set by application will be used as a number of device node.
+ * ex) 1 -> /dev/video1
+ */
+ g_object_class_install_property (gobject_class, PROP_CAMERA_ID,
+ g_param_spec_uint ("camera-id", "Camera ID",
+ "Camera ID for device node", 0, G_MAXUINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
+
/**
* GstV4l2Src::prepare-format:
* @v4l2src: the v4l2src instance
* happen prior to the format being set.
* This is mostly useful for UVC H264 encoding cameras which need the H264
* Probe & Commit to happen prior to the normal Probe & Commit.
- *
- * Since: 0.10.32
*/
gst_v4l2_signals[SIGNAL_PRE_SET_FORMAT] = g_signal_new ("prepare-format",
G_TYPE_FROM_CLASS (klass),
gst_v4l2_object_get_all_caps ()));
basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_v4l2src_get_caps);
- basesrc_class->set_caps = GST_DEBUG_FUNCPTR (gst_v4l2src_set_caps);
basesrc_class->start = GST_DEBUG_FUNCPTR (gst_v4l2src_start);
basesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_v4l2src_unlock);
basesrc_class->unlock_stop = GST_DEBUG_FUNCPTR (gst_v4l2src_unlock_stop);
basesrc_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2src_stop);
basesrc_class->query = GST_DEBUG_FUNCPTR (gst_v4l2src_query);
- basesrc_class->fixate = GST_DEBUG_FUNCPTR (gst_v4l2src_fixate);
basesrc_class->negotiate = GST_DEBUG_FUNCPTR (gst_v4l2src_negotiate);
basesrc_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_v4l2src_decide_allocation);
{
/* fixme: give an update_fps_function */
v4l2src->v4l2object = gst_v4l2_object_new (GST_ELEMENT (v4l2src),
- V4L2_BUF_TYPE_VIDEO_CAPTURE, DEFAULT_PROP_DEVICE,
- gst_v4l2_get_input, gst_v4l2_set_input, NULL);
+ GST_OBJECT (GST_BASE_SRC_PAD (v4l2src)), V4L2_BUF_TYPE_VIDEO_CAPTURE,
+ DEFAULT_PROP_DEVICE, gst_v4l2_get_input, gst_v4l2_set_input, NULL);
+
+ /* Avoid the slow probes */
+ v4l2src->v4l2object->skip_try_fmt_probes = TRUE;
gst_base_src_set_format (GST_BASE_SRC (v4l2src), GST_FORMAT_TIME);
gst_base_src_set_live (GST_BASE_SRC (v4l2src), TRUE);
if (!gst_v4l2_object_set_property_helper (v4l2src->v4l2object,
prop_id, value, pspec)) {
switch (prop_id) {
+#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
+ case PROP_CAMERA_ID:
+ g_free (v4l2src->v4l2object->videodev);
+
+ v4l2src->camera_id = g_value_get_uint (value);
+ v4l2src->v4l2object->videodev = g_strdup_printf ("/dev/video%u", v4l2src->camera_id);
+
+ GST_INFO_OBJECT(v4l2src, "videodev [%s]", v4l2src->v4l2object->videodev);
+ break;
+#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
if (!gst_v4l2_object_get_property_helper (v4l2src->v4l2object,
prop_id, value, pspec)) {
switch (prop_id) {
+#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
+ case PROP_CAMERA_ID:
+ g_value_set_uint (value, v4l2src->camera_id);
+ break;
+#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
- /* this function is a bit of a last resort */
+ struct PreferedCapsInfo
+ {
+ gint width;
+ gint height;
+ gint fps_n;
+ gint fps_d;
+ };
+
+ static gboolean
+ gst_vl42_src_fixate_fields (GQuark field_id, GValue * value, gpointer user_data)
+ {
+ GstStructure *s = user_data;
+
+ if (field_id == g_quark_from_string ("interlace-mode"))
+ return TRUE;
+
+ if (field_id == g_quark_from_string ("colorimetry"))
+ return TRUE;
+
+ gst_structure_fixate_field (s, g_quark_to_string (field_id));
+
+ return TRUE;
+ }
+
+ static void
+ gst_v4l2_src_fixate_struct_with_preference (GstStructure * s,
+ struct PreferedCapsInfo *pref)
+ {
+ if (gst_structure_has_field (s, "width"))
+ gst_structure_fixate_field_nearest_int (s, "width", pref->width);
+
+ if (gst_structure_has_field (s, "height"))
+ gst_structure_fixate_field_nearest_int (s, "height", pref->height);
+
+ if (gst_structure_has_field (s, "framerate"))
+ gst_structure_fixate_field_nearest_fraction (s, "framerate", pref->fps_n,
+ pref->fps_d);
+
+ /* Finally, fixate everything else except the interlace-mode and colorimetry
+ * which still need further negotiation as it wasn't probed */
+ gst_structure_map_in_place (s, gst_vl42_src_fixate_fields, s);
+ }
+
+ static void
+ gst_v4l2_src_parse_fixed_struct (GstStructure * s,
+ gint * width, gint * height, gint * fps_n, gint * fps_d)
+ {
+ if (gst_structure_has_field (s, "width") && width)
+ gst_structure_get_int (s, "width", width);
+
+ if (gst_structure_has_field (s, "height") && height)
+ gst_structure_get_int (s, "height", height);
+
+ if (gst_structure_has_field (s, "framerate") && fps_n && fps_d)
+ gst_structure_get_fraction (s, "framerate", fps_n, fps_d);
+ }
+
+ /* TODO Consider framerate */
+ static gint
+ gst_v4l2src_fixed_caps_compare (GstCaps * caps_a, GstCaps * caps_b,
+ struct PreferedCapsInfo *pref)
+ {
+ GstStructure *a, *b;
+ gint aw = G_MAXINT, ah = G_MAXINT, ad = G_MAXINT;
+ gint bw = G_MAXINT, bh = G_MAXINT, bd = G_MAXINT;
+ gint ret;
+
+ a = gst_caps_get_structure (caps_a, 0);
+ b = gst_caps_get_structure (caps_b, 0);
+
+ gst_v4l2_src_parse_fixed_struct (a, &aw, &ah, NULL, NULL);
+ gst_v4l2_src_parse_fixed_struct (b, &bw, &bh, NULL, NULL);
+
+ /* When both are smaller then pref, just append to the end */
+ if ((bw < pref->width || bh < pref->height)
+ && (aw < pref->width || ah < pref->height)) {
+ ret = 1;
+ goto done;
+ }
+
+ /* If a is smaller then pref and not b, then a goes after b */
+ if (aw < pref->width || ah < pref->height) {
+ ret = 1;
+ goto done;
+ }
+
+ /* If b is smaller then pref and not a, then a goes before b */
+ if (bw < pref->width || bh < pref->height) {
+ ret = -1;
+ goto done;
+ }
+
+ /* Both are larger or equal to the preference, prefer the smallest */
+ ad = MAX (1, aw - pref->width) * MAX (1, ah - pref->height);
+ bd = MAX (1, bw - pref->width) * MAX (1, bh - pref->height);
+
+ /* Adjust slightly in case width/height matched the preference */
+ if (aw == pref->width)
+ ad -= 1;
+
+ if (ah == pref->height)
+ ad -= 1;
+
+ if (bw == pref->width)
+ bd -= 1;
+
+ if (bh == pref->height)
+ bd -= 1;
+
+ /* If the choices are equivalent, maintain the order */
+ if (ad == bd)
+ ret = 1;
+ else
+ ret = ad - bd;
+
+ done:
+ GST_TRACE ("Placing %ix%i (%s) %s %ix%i (%s)", aw, ah,
+ gst_structure_get_string (a, "format"), ret > 0 ? "after" : "before", bw,
+ bh, gst_structure_get_string (b, "format"));
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2src_set_format (GstV4l2Src * v4l2src, GstCaps * caps,
+ GstV4l2Error * error)
+ {
+ GstV4l2Object *obj;
+
+ obj = v4l2src->v4l2object;
+
+ /* make sure we stop capturing and dealloc buffers */
+ if (!gst_v4l2_object_stop (obj))
+ return FALSE;
+
+ g_signal_emit (v4l2src, gst_v4l2_signals[SIGNAL_PRE_SET_FORMAT], 0,
+ v4l2src->v4l2object->video_fd, caps);
+
+ return gst_v4l2_object_set_format (obj, caps, error);
+ }
+
static GstCaps *
- gst_v4l2src_fixate (GstBaseSrc * basesrc, GstCaps * caps)
+ gst_v4l2src_fixate (GstBaseSrc * basesrc, GstCaps * caps, GstStructure * pref_s)
{
- GstStructure *structure;
- gint i;
+ /* Let's prefer a good resolutiion as of today's standard. */
+ struct PreferedCapsInfo pref = {
+ 3840, 2160, 120, 1
+ };
+ GstV4l2Src *v4l2src = GST_V4L2SRC (basesrc);
+ GstV4l2Object *obj = v4l2src->v4l2object;
+ GList *caps_list = NULL;
+ GstStructure *s;
+ gint i = G_MAXINT;
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
+ GstCaps *fcaps = NULL;
GST_DEBUG_OBJECT (basesrc, "fixating caps %" GST_PTR_FORMAT, caps);
- caps = gst_caps_make_writable (caps);
+ /* We consider the first structure from peercaps to be a preference. This is
+ * useful for matching a reported native display, or simply to avoid
+ * transformation to happen downstream. */
+ if (pref_s) {
+ pref_s = gst_structure_copy (pref_s);
+ gst_v4l2_src_fixate_struct_with_preference (pref_s, &pref);
+ gst_v4l2_src_parse_fixed_struct (pref_s, &pref.width, &pref.height,
+ &pref.fps_n, &pref.fps_d);
+ gst_structure_free (pref_s);
+ }
- for (i = 0; i < gst_caps_get_size (caps); ++i) {
- structure = gst_caps_get_structure (caps, i);
+ GST_DEBUG_OBJECT (basesrc, "Prefered size %ix%i", pref.width, pref.height);
- /* We are fixating to a reasonable 320x200 resolution
- and the maximum framerate resolution for that size */
- if (gst_structure_has_field (structure, "width"))
- gst_structure_fixate_field_nearest_int (structure, "width", 320);
+ /* Sort the structures to get the caps that is nearest to our preferences,
+ * first. Use single struct caps for sorting so we preserve the features. */
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ GstCaps *tmp = gst_caps_copy_nth (caps, i);
- if (gst_structure_has_field (structure, "height"))
- gst_structure_fixate_field_nearest_int (structure, "height", 200);
+ s = gst_caps_get_structure (tmp, 0);
+ gst_v4l2_src_fixate_struct_with_preference (s, &pref);
- if (gst_structure_has_field (structure, "framerate"))
- gst_structure_fixate_field_nearest_fraction (structure, "framerate",
- 100, 1);
+ caps_list = g_list_insert_sorted_with_data (caps_list, tmp,
+ (GCompareDataFunc) gst_v4l2src_fixed_caps_compare, &pref);
+ }
- if (gst_structure_has_field (structure, "format"))
- gst_structure_fixate_field (structure, "format");
+ gst_caps_unref (caps);
+ caps = gst_caps_new_empty ();
- if (gst_structure_has_field (structure, "interlace-mode"))
- gst_structure_fixate_field (structure, "interlace-mode");
+ while (caps_list) {
+ GstCaps *tmp = caps_list->data;
+ caps_list = g_list_delete_link (caps_list, caps_list);
+ gst_caps_append (caps, tmp);
}
- GST_DEBUG_OBJECT (basesrc, "fixated caps %" GST_PTR_FORMAT, caps);
+ GST_DEBUG_OBJECT (basesrc, "sorted and normalized caps %" GST_PTR_FORMAT,
+ caps);
- caps = GST_BASE_SRC_CLASS (parent_class)->fixate (basesrc, caps);
+ /* Each structure in the caps has been fixated, except for the
+ * interlace-mode and colorimetry. Now normalize the caps so we can
+ * enumerate the possibilities */
+ caps = gst_caps_normalize (caps);
- return caps;
- }
+ for (i = 0; i < gst_caps_get_size (caps); ++i) {
+ gst_v4l2_clear_error (&error);
+ if (fcaps)
+ gst_caps_unref (fcaps);
+
+ fcaps = gst_caps_copy_nth (caps, i);
+
+ /* try hard to avoid TRY_FMT since some UVC camera just crash when this
+ * is called at run-time. */
+ if (gst_v4l2_object_caps_is_subset (obj, fcaps)) {
+ gst_caps_unref (fcaps);
+ fcaps = gst_v4l2_object_get_current_caps (obj);
+ break;
+ }
+
+ /* Just check if the format is acceptable, once we know
+ * no buffers should be outstanding we try S_FMT.
+ *
+ * Basesrc will do an allocation query that
+ * should indirectly reclaim buffers, after that we can
+ * set the format and then configure our pool */
+ if (gst_v4l2_object_try_format (obj, fcaps, &error)) {
+ /* make sure the caps changed before doing anything */
+ if (gst_v4l2_object_caps_equal (obj, fcaps))
+ break;
+
+ v4l2src->renegotiation_adjust = v4l2src->offset + 1;
+ v4l2src->pending_set_fmt = TRUE;
+ break;
+ }
+
+ /* Only EIVAL make sense, report any other errors, this way we don't keep
+ * probing if the device got disconnected, or if it's firmware stopped
+ * responding */
+ if (error.error->code != GST_RESOURCE_ERROR_SETTINGS) {
+ i = G_MAXINT;
+ break;
+ }
+ }
+
+ if (i >= gst_caps_get_size (caps)) {
+ gst_v4l2_error (v4l2src, &error);
+ if (fcaps)
+ gst_caps_unref (fcaps);
+ gst_caps_unref (caps);
+ return NULL;
+ }
+
+ gst_caps_unref (caps);
+ GST_DEBUG_OBJECT (basesrc, "fixated caps %" GST_PTR_FORMAT, fcaps);
+
+ return fcaps;
+ }
static gboolean
gst_v4l2src_negotiate (GstBaseSrc * basesrc)
peercaps = gst_pad_peer_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
if (peercaps && !gst_caps_is_any (peercaps)) {
- GstCaps *icaps = NULL;
-
/* Prefer the first caps we are compatible with that the peer proposed */
- icaps = gst_caps_intersect_full (peercaps, thiscaps,
+ caps = gst_caps_intersect_full (peercaps, thiscaps,
GST_CAPS_INTERSECT_FIRST);
- GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, icaps);
- if (icaps) {
- /* If there are multiple intersections pick the one with the smallest
- * resolution strictly bigger then the first peer caps */
- if (gst_caps_get_size (icaps) > 1) {
- GstStructure *s = gst_caps_get_structure (peercaps, 0);
- int best = 0;
- int twidth, theight;
- int width = G_MAXINT, height = G_MAXINT;
-
- if (gst_structure_get_int (s, "width", &twidth)
- && gst_structure_get_int (s, "height", &theight)) {
- int i;
-
- /* Walk the structure backwards to get the first entry of the
- * smallest resolution bigger (or equal to) the preferred resolution)
- */
- for (i = gst_caps_get_size (icaps) - 1; i >= 0; i--) {
- GstStructure *is = gst_caps_get_structure (icaps, i);
- int w, h;
-
- if (gst_structure_get_int (is, "width", &w)
- && gst_structure_get_int (is, "height", &h)) {
- if (w >= twidth && w <= width && h >= theight && h <= height) {
- width = w;
- height = h;
- best = i;
- }
- }
- }
- }
-
- caps = gst_caps_copy_nth (icaps, best);
- gst_caps_unref (icaps);
- } else {
- caps = icaps;
- }
- }
+ GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, caps);
+
gst_caps_unref (thiscaps);
} else {
/* no peer or peer have ANY caps, work with our own caps then */
caps = thiscaps;
}
- if (peercaps)
- gst_caps_unref (peercaps);
- if (caps) {
- caps = gst_caps_truncate (caps);
+ if (caps) {
/* now fixate */
if (!gst_caps_is_empty (caps)) {
- caps = gst_v4l2src_fixate (basesrc, caps);
+ GstStructure *pref = NULL;
+
+ if (peercaps && !gst_caps_is_any (peercaps))
+ pref = gst_caps_get_structure (peercaps, 0);
+
+ caps = gst_v4l2src_fixate (basesrc, caps, pref);
+
+ /* Fixating may fail as we now set the selected format */
+ if (!caps) {
+ result = FALSE;
+ goto done;
+ }
+
GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps);
if (gst_caps_is_any (caps)) {
}
gst_caps_unref (caps);
}
+
+ done:
+ if (peercaps)
+ gst_caps_unref (peercaps);
+
return result;
no_nego_needed:
}
static gboolean
- gst_v4l2src_set_format (GstV4l2Src * v4l2src, GstCaps * caps)
- {
- GstV4l2Error error = GST_V4L2_ERROR_INIT;
- GstV4l2Object *obj;
-
- obj = v4l2src->v4l2object;
-
- g_signal_emit (v4l2src, gst_v4l2_signals[SIGNAL_PRE_SET_FORMAT], 0,
- v4l2src->v4l2object->video_fd, caps);
-
- if (!gst_v4l2_object_set_format (obj, caps, &error)) {
- gst_v4l2_error (v4l2src, &error);
- return FALSE;
- }
-
- return TRUE;
- }
-
- static gboolean
- gst_v4l2src_set_caps (GstBaseSrc * src, GstCaps * caps)
- {
- GstV4l2Src *v4l2src;
- GstV4l2Object *obj;
-
- v4l2src = GST_V4L2SRC (src);
- obj = v4l2src->v4l2object;
-
- /* make sure the caps changed before doing anything */
- if (gst_v4l2_object_caps_equal (obj, caps))
- return TRUE;
-
- if (GST_V4L2_IS_ACTIVE (obj)) {
- GstV4l2Error error = GST_V4L2_ERROR_INIT;
- /* Just check if the format is acceptable, once we know
- * no buffers should be outstanding we try S_FMT.
- *
- * Basesrc will do an allocation query that
- * should indirectly reclaim buffers, after that we can
- * set the format and then configure our pool */
- if (gst_v4l2_object_try_format (obj, caps, &error)) {
- v4l2src->renegotiation_adjust = v4l2src->offset + 1;
- v4l2src->pending_set_fmt = TRUE;
- } else {
- gst_v4l2_error (v4l2src, &error);
- return FALSE;
- }
- } else {
- /* make sure we stop capturing and dealloc buffers */
- if (!gst_v4l2_object_stop (obj))
- return FALSE;
-
- return gst_v4l2src_set_format (v4l2src, caps);
- }
-
- return TRUE;
- }
-
- static gboolean
gst_v4l2src_decide_allocation (GstBaseSrc * bsrc, GstQuery * query)
{
GstV4l2Src *src = GST_V4L2SRC (bsrc);
if (src->pending_set_fmt) {
GstCaps *caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (bsrc));
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
+
+ caps = gst_caps_make_writable (caps);
+ if (!(ret = gst_v4l2src_set_format (src, caps, &error)))
+ gst_v4l2_error (src, &error);
- if (!gst_v4l2_object_stop (src->v4l2object))
- return FALSE;
- ret = gst_v4l2src_set_format (src, caps);
gst_caps_unref (caps);
src->pending_set_fmt = FALSE;
} else if (gst_buffer_pool_is_active (src->v4l2object->pool)) {
#include <gstv4l2object.h>
#include <gstv4l2bufferpool.h>
- GST_DEBUG_CATEGORY_EXTERN (v4l2src_debug);
-
G_BEGIN_DECLS
#define GST_TYPE_V4L2SRC \
/* Timestamp sanity check */
GstClockTime last_timestamp;
gboolean has_bad_timestamp;
+
+#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
+ /* Properties */
+ guint camera_id;
+#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
};
struct _GstV4l2SrcClass
#include <string.h>
#include <errno.h>
#include <unistd.h>
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+#include <glob.h>
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
#ifdef __sun
/* Needed on older Solaris Nevada builds (72 at least) */
#include <stropts.h>
#include <sys/ioccom.h>
#endif
- #include "v4l2_calls.h"
+ #include "gstv4l2object.h"
#include "gstv4l2tuner.h"
- #if 0
- #include "gstv4l2xoverlay.h"
- #endif
#include "gstv4l2colorbalance.h"
#include "gstv4l2src.h"
#include "gst/gst-i18n-plugin.h"
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+enum {
+ V4L2_OPEN_ERROR = 0,
+ V4L2_OPEN_ERROR_STAT_FAILED,
+ V4L2_OPEN_ERROR_NO_DEVICE,
+ V4L2_OPEN_ERROR_NOT_OPEN,
+ V4L2_OPEN_ERROR_NOT_CAPTURE,
+ V4L2_OPEN_ERROR_NOT_OUTPUT
+};
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+
GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
#define GST_CAT_DEFAULT v4l2_debug
* get the device's capturing capabilities
* return value: TRUE on success, FALSE on error
******************************************************/
- gboolean
+ static gboolean
gst_v4l2_get_capabilities (GstV4l2Object * v4l2object)
{
GstElement *e;
if (!GST_V4L2_IS_OPEN (v4l2object))
return FALSE;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_QUERYCAP, &v4l2object->vcap) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_QUERYCAP,
+ &v4l2object->vcap) < 0)
goto cap_failed;
if (v4l2object->vcap.capabilities & V4L2_CAP_DEVICE_CAPS)
memset (&input, 0, sizeof (input));
input.index = n;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_ENUMINPUT, &input) < 0) {
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_ENUMINPUT, &input) < 0) {
if (errno == EINVAL || errno == ENOTTY)
break; /* end of enumeration */
else {
channel->flags |= GST_TUNER_CHANNEL_FREQUENCY;
vtun.index = input.tuner;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_TUNER, &vtun) < 0) {
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_TUNER, &vtun) < 0) {
GST_ELEMENT_ERROR (e, RESOURCE, SETTINGS,
(_("Failed to get setting of tuner %d on device '%s'."),
input.tuner, v4l2object->videodev), GST_ERROR_SYSTEM);
standard.frameperiod.denominator = 0;
standard.index = n;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_ENUMSTD, &standard) < 0) {
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_ENUMSTD, &standard) < 0) {
if (errno == EINVAL || errno == ENOTTY)
break; /* end of enumeration */
#ifdef ENODATA
standard.frameperiod.denominator, standard.frameperiod.numerator);
v4l2norm->index = standard.id;
- GST_DEBUG_OBJECT (v4l2object->element, "index=%08x, label=%s",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "index=%08x, label=%s",
(unsigned int) v4l2norm->index, norm->label);
v4l2object->norms = g_list_prepend (v4l2object->norms, (gpointer) norm);
GST_DEBUG_OBJECT (e, "checking control %08x", n);
control.id = n | next;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_QUERYCTRL, &control) < 0) {
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_QUERYCTRL,
+ &control) < 0) {
if (next) {
if (n > 0) {
GST_DEBUG_OBJECT (e, "controls finished");
}
default:
GST_DEBUG_OBJECT (e,
- "Control type for '%s' not suppored for extra controls.",
+ "Control type for '%s' not supported for extra controls.",
control.name);
break;
}
menu.id = n;
for (i = 0;; i++) {
menu.index = i;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_QUERYMENU, &menu) < 0) {
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_QUERYMENU,
+ &menu) < 0) {
if (errno == EINVAL)
break; /* end of enumeration */
else {
static void
gst_v4l2_empty_lists (GstV4l2Object * v4l2object)
{
- GST_DEBUG_OBJECT (v4l2object->element, "deleting enumerations");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "deleting enumerations");
g_list_foreach (v4l2object->channels, (GFunc) g_object_unref, NULL);
g_list_free (v4l2object->channels);
gst_v4l2_open (GstV4l2Object * v4l2object)
{
struct stat st;
- int libv4l2_fd;
+ int libv4l2_fd = -1;
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ int error_type = V4L2_OPEN_ERROR_STAT_FAILED;
+ int device_index = 0;
+ glob_t glob_buf;
+
+ memset(&glob_buf, 0x0, sizeof(glob_t));
-
- GST_DEBUG_OBJECT (v4l2object->element, "Trying to open device %s",
+ if (!v4l2object) {
+ GST_ERROR("v4l2object is NULL");
+ return FALSE;
+ }
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying to open device %s",
v4l2object->videodev);
GST_V4L2_CHECK_NOT_OPEN (v4l2object);
if (!v4l2object->videodev)
v4l2object->videodev = g_strdup ("/dev/video");
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (!v4l2object->videodev) {
+ GST_ERROR_OBJECT(v4l2object->element, "videodev is NULL");
+ return FALSE;
+ }
+
+CHECK_AGAIN:
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
/* check if it is a device */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (stat (v4l2object->videodev, &st) == -1) {
+ error_type = V4L2_OPEN_ERROR_STAT_FAILED;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (stat (v4l2object->videodev, &st) == -1)
goto stat_failed;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (!S_ISCHR (st.st_mode)) {
+ error_type = V4L2_OPEN_ERROR_NO_DEVICE;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (!S_ISCHR (st.st_mode))
goto no_device;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
/* open the device */
v4l2object->video_fd =
open (v4l2object->videodev, O_RDWR /* | O_NONBLOCK */ );
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (!GST_V4L2_IS_OPEN (v4l2object)) {
+ error_type = V4L2_OPEN_ERROR_NOT_OPEN;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (!GST_V4L2_IS_OPEN (v4l2object))
goto not_open;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
- libv4l2_fd = v4l2_fd_open (v4l2object->video_fd,
- V4L2_ENABLE_ENUM_FMT_EMULATION);
+ #ifdef HAVE_LIBV4L2
+ if (v4l2object->fd_open)
+ libv4l2_fd = v4l2object->fd_open (v4l2object->video_fd,
+ V4L2_ENABLE_ENUM_FMT_EMULATION);
+ #endif
+
/* Note the v4l2_xxx functions are designed so that if they get passed an
unknown fd, the will behave exactly as their regular xxx counterparts, so
if v4l2_fd_open fails, we continue as normal (missing the libv4l2 custom
v4l2object->video_fd = libv4l2_fd;
/* get capabilities, error will be posted */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (!gst_v4l2_get_capabilities (v4l2object)) {
+ error_type = V4L2_OPEN_ERROR;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (!gst_v4l2_get_capabilities (v4l2object))
goto error;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
/* do we need to be a capture device? */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ GST_INFO_OBJECT(v4l2object->element, "device_caps 0x%x", v4l2object->device_caps);
+ if (GST_IS_V4L2SRC (v4l2object->element) &&
+ (!(v4l2object->device_caps & (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_CAPTURE_MPLANE)) ||
+ (v4l2object->device_caps & (V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) {
+ error_type = V4L2_OPEN_ERROR_NOT_CAPTURE;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (GST_IS_V4L2SRC (v4l2object->element) &&
!(v4l2object->device_caps & (V4L2_CAP_VIDEO_CAPTURE |
V4L2_CAP_VIDEO_CAPTURE_MPLANE)))
goto not_capture;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (GST_IS_V4L2SINK (v4l2object->element) &&
+ !(v4l2object->device_caps & (V4L2_CAP_VIDEO_OUTPUT |
+ V4L2_CAP_VIDEO_OUTPUT_MPLANE))) {
+ error_type = V4L2_OPEN_ERROR_NOT_OUTPUT;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (GST_IS_V4L2SINK (v4l2object->element) &&
!(v4l2object->device_caps & (V4L2_CAP_VIDEO_OUTPUT |
V4L2_CAP_VIDEO_OUTPUT_MPLANE)))
goto not_output;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (GST_IS_V4L2_VIDEO_DEC (v4l2object->element) &&
- /* Today's M2M device only expose M2M */
- !((v4l2object->device_caps & (V4L2_CAP_VIDEO_M2M |
- V4L2_CAP_VIDEO_M2M_MPLANE)) ||
- /* But legacy driver may expose both CAPTURE and OUTPUT */
- ((v4l2object->device_caps &
- (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_CAPTURE_MPLANE)) &&
- (v4l2object->device_caps &
- (V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_VIDEO_OUTPUT_MPLANE)))))
+ !GST_V4L2_IS_M2M (v4l2object->device_caps))
goto not_m2m;
gst_v4l2_adjust_buf_type (v4l2object);
/* create enumerations, posts errors. */
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ if (!gst_v4l2_fill_lists (v4l2object)) {
+ error_type = V4L2_OPEN_ERROR;
+ goto pre_error_check;
+ }
+#else /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
if (!gst_v4l2_fill_lists (v4l2object))
goto error;
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
- GST_INFO_OBJECT (v4l2object->element,
+ GST_INFO_OBJECT (v4l2object->dbg_obj,
"Opened device '%s' (%s) successfully",
v4l2object->vcap.card, v4l2object->videodev);
if (v4l2object->extra_controls)
gst_v4l2_set_controls (v4l2object, v4l2object->extra_controls);
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ globfree(&glob_buf);
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+
/* UVC devices are never interlaced, and doing VIDIOC_TRY_FMT on them
* causes expensive and slow USB IO, so don't probe them for interlaced
*/
- if (!strcmp ((char *) v4l2object->vcap.driver, "uvcusb")) {
+ if (!strcmp ((char *) v4l2object->vcap.driver, "uvcusb") ||
+ !strcmp ((char *) v4l2object->vcap.driver, "uvcvideo")) {
v4l2object->never_interlaced = TRUE;
}
return TRUE;
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+pre_error_check:
+ {
+ if (GST_IS_V4L2SRC(v4l2object->element) && glob_buf.gl_pathc == 0) {
+ if (glob("/dev/video*", 0, 0, &glob_buf) != 0) {
+ GST_WARNING_OBJECT(v4l2object->element, "glob failed");
+ }
+ }
+
+ if (glob_buf.gl_pathc > 0 && device_index < glob_buf.gl_pathc) {
+ if (v4l2object->videodev) {
+ g_free(v4l2object->videodev);
+ v4l2object->videodev = NULL;
+ }
+ v4l2object->videodev = g_strdup(glob_buf.gl_pathv[device_index]);
+ if (v4l2object->videodev) {
+ device_index++;
+ GST_INFO_OBJECT(v4l2object->element, "check device [%s]",
+ v4l2object->videodev);
+
+ if (GST_V4L2_IS_OPEN (v4l2object)) {
+ /* close device */
+ v4l2_close (v4l2object->video_fd);
+ v4l2object->video_fd = -1;
+ }
+ /* empty lists */
+ gst_v4l2_empty_lists (v4l2object);
+
+ goto CHECK_AGAIN;
+ } else {
+ GST_WARNING_OBJECT(v4l2object->element, "g_strdup failed [%s]",
+ glob_buf.gl_pathv[device_index]);
+ }
+ }
+
+ GST_WARNING_OBJECT(v4l2object->element, "error type : %d", error_type);
+
+ switch (error_type) {
+ case V4L2_OPEN_ERROR_STAT_FAILED:
+ goto stat_failed;
+ case V4L2_OPEN_ERROR_NO_DEVICE:
+ goto no_device;
+ case V4L2_OPEN_ERROR_NOT_OPEN:
+ goto not_open;
+ case V4L2_OPEN_ERROR_NOT_CAPTURE:
+ goto not_capture;
+ case V4L2_OPEN_ERROR_NOT_OUTPUT:
+ goto not_output;
+ default:
+ goto error;
+ }
+ }
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+
/* ERRORS */
stat_failed:
{
{
if (GST_V4L2_IS_OPEN (v4l2object)) {
/* close device */
- v4l2_close (v4l2object->video_fd);
+ v4l2object->close (v4l2object->video_fd);
v4l2object->video_fd = -1;
}
/* empty lists */
gst_v4l2_empty_lists (v4l2object);
+#ifdef TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE
+ globfree(&glob_buf);
+#endif /* TIZEN_FEATURE_V4L2SRC_SCAN_DEVICE_NODE */
+
return FALSE;
}
}
gboolean
gst_v4l2_dup (GstV4l2Object * v4l2object, GstV4l2Object * other)
{
- GST_DEBUG_OBJECT (v4l2object->element, "Trying to dup device %s",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying to dup device %s",
other->videodev);
GST_V4L2_CHECK_OPEN (other);
v4l2object->device_caps = other->device_caps;
gst_v4l2_adjust_buf_type (v4l2object);
- v4l2object->video_fd = v4l2_dup (other->video_fd);
+ v4l2object->video_fd = v4l2object->dup (other->video_fd);
if (!GST_V4L2_IS_OPEN (v4l2object))
goto not_open;
g_free (v4l2object->videodev);
v4l2object->videodev = g_strdup (other->videodev);
- GST_INFO_OBJECT (v4l2object->element,
+ GST_INFO_OBJECT (v4l2object->dbg_obj,
"Cloned device '%s' (%s) successfully",
v4l2object->vcap.card, v4l2object->videodev);
v4l2object->never_interlaced = other->never_interlaced;
+ v4l2object->no_initial_format = other->no_initial_format;
return TRUE;
gboolean
gst_v4l2_close (GstV4l2Object * v4l2object)
{
- GST_DEBUG_OBJECT (v4l2object->element, "Trying to close %s",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying to close %s",
v4l2object->videodev);
GST_V4L2_CHECK_OPEN (v4l2object);
GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
/* close device */
- v4l2_close (v4l2object->video_fd);
+ v4l2object->close (v4l2object->video_fd);
v4l2object->video_fd = -1;
/* empty lists */
gboolean
gst_v4l2_get_norm (GstV4l2Object * v4l2object, v4l2_std_id * norm)
{
- GST_DEBUG_OBJECT (v4l2object->element, "getting norm");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "getting norm");
if (!GST_V4L2_IS_OPEN (v4l2object))
return FALSE;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_STD, norm) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_STD, norm) < 0)
goto std_failed;
return TRUE;
gboolean
gst_v4l2_set_norm (GstV4l2Object * v4l2object, v4l2_std_id norm)
{
- GST_DEBUG_OBJECT (v4l2object->element, "trying to set norm to "
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "trying to set norm to "
"%" G_GINT64_MODIFIER "x", (guint64) norm);
if (!GST_V4L2_IS_OPEN (v4l2object))
return FALSE;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_S_STD, &norm) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_STD, &norm) < 0)
goto std_failed;
return TRUE;
GstTunerChannel *channel;
- GST_DEBUG_OBJECT (v4l2object->element, "getting current tuner frequency");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "getting current tuner frequency");
if (!GST_V4L2_IS_OPEN (v4l2object))
return FALSE;
channel = gst_tuner_get_channel (GST_TUNER (v4l2object->element));
freq.tuner = tunernum;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_FREQUENCY, &freq) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_FREQUENCY, &freq) < 0)
goto freq_failed;
*frequency = freq.frequency * channel->freq_multiplicator;
GstTunerChannel *channel;
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"setting current tuner frequency to %lu", frequency);
if (!GST_V4L2_IS_OPEN (v4l2object))
freq.tuner = tunernum;
/* fill in type - ignore error */
- (void) v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_FREQUENCY, &freq);
+ (void) v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_FREQUENCY, &freq);
freq.frequency = frequency / channel->freq_multiplicator;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_S_FREQUENCY, &freq) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_FREQUENCY, &freq) < 0)
goto freq_failed;
return TRUE;
{
struct v4l2_tuner tuner = { 0, };
- GST_DEBUG_OBJECT (v4l2object->element, "trying to get signal strength");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "trying to get signal strength");
if (!GST_V4L2_IS_OPEN (v4l2object))
return FALSE;
tuner.index = tunernum;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_TUNER, &tuner) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_TUNER, &tuner) < 0)
goto tuner_failed;
*signal_strength = tuner.signal;
{
struct v4l2_control control = { 0, };
- GST_DEBUG_OBJECT (v4l2object->element, "getting value of attribute %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "getting value of attribute %d",
attribute_num);
if (!GST_V4L2_IS_OPEN (v4l2object))
control.id = attribute_num;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0)
goto ctrl_failed;
*value = control.value;
{
struct v4l2_control control = { 0, };
- GST_DEBUG_OBJECT (v4l2object->element, "setting value of attribute %d to %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "setting value of attribute %d to %d",
attribute_num, value);
if (!GST_V4L2_IS_OPEN (v4l2object))
control.id = attribute_num;
control.value = value;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_S_CTRL, &control) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_CTRL, &control) < 0)
goto ctrl_failed;
return TRUE;
{
gint n;
- GST_DEBUG_OBJECT (v4l2object->element, "trying to get input");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "trying to get input");
if (!GST_V4L2_IS_OPEN (v4l2object))
return FALSE;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_INPUT, &n) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_INPUT, &n) < 0)
goto input_failed;
*input = n;
- GST_DEBUG_OBJECT (v4l2object->element, "input: %d", n);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "input: %d", n);
return TRUE;
gboolean
gst_v4l2_set_input (GstV4l2Object * v4l2object, gint input)
{
- GST_DEBUG_OBJECT (v4l2object->element, "trying to set input to %d", input);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "trying to set input to %d", input);
if (!GST_V4L2_IS_OPEN (v4l2object))
return FALSE;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_S_INPUT, &input) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_INPUT, &input) < 0)
goto input_failed;
return TRUE;
{
gint n;
- GST_DEBUG_OBJECT (v4l2object->element, "trying to get output");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "trying to get output");
if (!GST_V4L2_IS_OPEN (v4l2object))
return FALSE;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_OUTPUT, &n) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_OUTPUT, &n) < 0)
goto output_failed;
*output = n;
- GST_DEBUG_OBJECT (v4l2object->element, "output: %d", n);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "output: %d", n);
return TRUE;
gboolean
gst_v4l2_set_output (GstV4l2Object * v4l2object, gint output)
{
- GST_DEBUG_OBJECT (v4l2object->element, "trying to set output to %d", output);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "trying to set output to %d", output);
if (!GST_V4L2_IS_OPEN (v4l2object))
return FALSE;
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_S_OUTPUT, &output) < 0)
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_OUTPUT, &output) < 0)
goto output_failed;
return TRUE;